From 26a029d407be480d791972afb5975cf62c9360a6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 19 Apr 2024 02:47:55 +0200 Subject: Adding upstream version 124.0.1. Signed-off-by: Daniel Baumann --- dom/media/platforms/wmf/DXVA2Manager.cpp | 1251 ++++++++++++++++++++ dom/media/platforms/wmf/DXVA2Manager.h | 90 ++ dom/media/platforms/wmf/MFCDMExtra.h | 19 + dom/media/platforms/wmf/MFCDMProxy.cpp | 97 ++ dom/media/platforms/wmf/MFCDMProxy.h | 75 ++ dom/media/platforms/wmf/MFCDMSession.cpp | 318 +++++ dom/media/platforms/wmf/MFCDMSession.h | 93 ++ .../platforms/wmf/MFContentProtectionManager.cpp | 171 +++ .../platforms/wmf/MFContentProtectionManager.h | 81 ++ .../platforms/wmf/MFMediaEngineAudioStream.cpp | 137 +++ dom/media/platforms/wmf/MFMediaEngineAudioStream.h | 51 + .../platforms/wmf/MFMediaEngineDecoderModule.cpp | 185 +++ .../platforms/wmf/MFMediaEngineDecoderModule.h | 47 + dom/media/platforms/wmf/MFMediaEngineExtension.cpp | 88 ++ dom/media/platforms/wmf/MFMediaEngineExtension.h | 49 + dom/media/platforms/wmf/MFMediaEngineExtra.h | 12 + dom/media/platforms/wmf/MFMediaEngineNotify.cpp | 25 + dom/media/platforms/wmf/MFMediaEngineNotify.h | 55 + dom/media/platforms/wmf/MFMediaEngineStream.cpp | 596 ++++++++++ dom/media/platforms/wmf/MFMediaEngineStream.h | 228 ++++ .../platforms/wmf/MFMediaEngineVideoStream.cpp | 375 ++++++ dom/media/platforms/wmf/MFMediaEngineVideoStream.h | 107 ++ dom/media/platforms/wmf/MFMediaSource.cpp | 606 ++++++++++ dom/media/platforms/wmf/MFMediaSource.h | 188 +++ dom/media/platforms/wmf/MFPMPHostWrapper.cpp | 92 ++ dom/media/platforms/wmf/MFPMPHostWrapper.h | 44 + dom/media/platforms/wmf/MFTDecoder.cpp | 430 +++++++ dom/media/platforms/wmf/MFTDecoder.h | 132 +++ dom/media/platforms/wmf/MFTEncoder.cpp | 754 ++++++++++++ dom/media/platforms/wmf/MFTEncoder.h | 144 +++ dom/media/platforms/wmf/WMF.h | 198 ++++ dom/media/platforms/wmf/WMFAudioMFTManager.cpp | 315 +++++ dom/media/platforms/wmf/WMFAudioMFTManager.h | 69 ++ dom/media/platforms/wmf/WMFDataEncoderUtils.h | 154 +++ dom/media/platforms/wmf/WMFDecoderModule.cpp | 492 ++++++++ dom/media/platforms/wmf/WMFDecoderModule.h | 70 ++ dom/media/platforms/wmf/WMFEncoderModule.cpp | 32 + dom/media/platforms/wmf/WMFEncoderModule.h | 27 + dom/media/platforms/wmf/WMFMediaDataDecoder.cpp | 272 +++++ dom/media/platforms/wmf/WMFMediaDataDecoder.h | 182 +++ dom/media/platforms/wmf/WMFMediaDataEncoder.h | 347 ++++++ dom/media/platforms/wmf/WMFUtils.cpp | 628 ++++++++++ dom/media/platforms/wmf/WMFUtils.h | 123 ++ dom/media/platforms/wmf/WMFVideoMFTManager.cpp | 1014 ++++++++++++++++ dom/media/platforms/wmf/WMFVideoMFTManager.h | 133 +++ .../wmf/gtest/TestCanCreateMFTDecoder.cpp | 15 + dom/media/platforms/wmf/gtest/moz.build | 15 + dom/media/platforms/wmf/metrics.yaml | 88 ++ dom/media/platforms/wmf/moz.build | 85 ++ 49 files changed, 10799 insertions(+) create mode 100644 dom/media/platforms/wmf/DXVA2Manager.cpp create mode 100644 dom/media/platforms/wmf/DXVA2Manager.h create mode 100644 dom/media/platforms/wmf/MFCDMExtra.h create mode 100644 dom/media/platforms/wmf/MFCDMProxy.cpp create mode 100644 dom/media/platforms/wmf/MFCDMProxy.h create mode 100644 dom/media/platforms/wmf/MFCDMSession.cpp create mode 100644 dom/media/platforms/wmf/MFCDMSession.h create mode 100644 dom/media/platforms/wmf/MFContentProtectionManager.cpp create mode 100644 dom/media/platforms/wmf/MFContentProtectionManager.h create mode 100644 dom/media/platforms/wmf/MFMediaEngineAudioStream.cpp create mode 100644 dom/media/platforms/wmf/MFMediaEngineAudioStream.h create mode 100644 dom/media/platforms/wmf/MFMediaEngineDecoderModule.cpp create mode 100644 dom/media/platforms/wmf/MFMediaEngineDecoderModule.h create mode 100644 dom/media/platforms/wmf/MFMediaEngineExtension.cpp create mode 100644 dom/media/platforms/wmf/MFMediaEngineExtension.h create mode 100644 dom/media/platforms/wmf/MFMediaEngineExtra.h create mode 100644 dom/media/platforms/wmf/MFMediaEngineNotify.cpp create mode 100644 dom/media/platforms/wmf/MFMediaEngineNotify.h create mode 100644 dom/media/platforms/wmf/MFMediaEngineStream.cpp create mode 100644 dom/media/platforms/wmf/MFMediaEngineStream.h create mode 100644 dom/media/platforms/wmf/MFMediaEngineVideoStream.cpp create mode 100644 dom/media/platforms/wmf/MFMediaEngineVideoStream.h create mode 100644 dom/media/platforms/wmf/MFMediaSource.cpp create mode 100644 dom/media/platforms/wmf/MFMediaSource.h create mode 100644 dom/media/platforms/wmf/MFPMPHostWrapper.cpp create mode 100644 dom/media/platforms/wmf/MFPMPHostWrapper.h create mode 100644 dom/media/platforms/wmf/MFTDecoder.cpp create mode 100644 dom/media/platforms/wmf/MFTDecoder.h create mode 100644 dom/media/platforms/wmf/MFTEncoder.cpp create mode 100644 dom/media/platforms/wmf/MFTEncoder.h create mode 100644 dom/media/platforms/wmf/WMF.h create mode 100644 dom/media/platforms/wmf/WMFAudioMFTManager.cpp create mode 100644 dom/media/platforms/wmf/WMFAudioMFTManager.h create mode 100644 dom/media/platforms/wmf/WMFDataEncoderUtils.h create mode 100644 dom/media/platforms/wmf/WMFDecoderModule.cpp create mode 100644 dom/media/platforms/wmf/WMFDecoderModule.h create mode 100644 dom/media/platforms/wmf/WMFEncoderModule.cpp create mode 100644 dom/media/platforms/wmf/WMFEncoderModule.h create mode 100644 dom/media/platforms/wmf/WMFMediaDataDecoder.cpp create mode 100644 dom/media/platforms/wmf/WMFMediaDataDecoder.h create mode 100644 dom/media/platforms/wmf/WMFMediaDataEncoder.h create mode 100644 dom/media/platforms/wmf/WMFUtils.cpp create mode 100644 dom/media/platforms/wmf/WMFUtils.h create mode 100644 dom/media/platforms/wmf/WMFVideoMFTManager.cpp create mode 100644 dom/media/platforms/wmf/WMFVideoMFTManager.h create mode 100644 dom/media/platforms/wmf/gtest/TestCanCreateMFTDecoder.cpp create mode 100644 dom/media/platforms/wmf/gtest/moz.build create mode 100644 dom/media/platforms/wmf/metrics.yaml create mode 100644 dom/media/platforms/wmf/moz.build (limited to 'dom/media/platforms/wmf') diff --git a/dom/media/platforms/wmf/DXVA2Manager.cpp b/dom/media/platforms/wmf/DXVA2Manager.cpp new file mode 100644 index 0000000000..36b424ab8e --- /dev/null +++ b/dom/media/platforms/wmf/DXVA2Manager.cpp @@ -0,0 +1,1251 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifdef MOZ_AV1 +# include "AOMDecoder.h" +#endif +#include "DXVA2Manager.h" +#include +#include "DriverCrashGuard.h" +#include "GfxDriverInfo.h" +#include "ImageContainer.h" +#include "MFTDecoder.h" +#include "MediaTelemetryConstants.h" +#include "PerformanceRecorder.h" +#include "VideoUtils.h" +#include "VPXDecoder.h" +#include "WMFUtils.h" +#include "gfxCrashReporterUtils.h" +#include "gfxWindowsPlatform.h" +#include "mfapi.h" +#include "mozilla/StaticMutex.h" +#include "mozilla/StaticPrefs_media.h" +#include "mozilla/Telemetry.h" +#include "mozilla/gfx/DeviceManagerDx.h" +#include "mozilla/layers/D3D11ShareHandleImage.h" +#include "mozilla/layers/D3D11TextureIMFSampleImage.h" +#include "mozilla/layers/HelpersD3D11.h" +#include "mozilla/layers/ImageBridgeChild.h" +#include "mozilla/layers/TextureD3D11.h" +#include "mozilla/layers/TextureForwarder.h" +#include "mozilla/mscom/EnsureMTA.h" +#include "nsPrintfCString.h" +#include "nsThreadUtils.h" + +const GUID MF_XVP_PLAYBACK_MODE = { + 0x3c5d293f, + 0xad67, + 0x4e29, + {0xaf, 0x12, 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9}}; + +DEFINE_GUID(MF_LOW_LATENCY, 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27, + 0x27, 0xa0, 0x24, 0xee); + +// R600, R700, Evergreen and Cayman AMD cards. These support DXVA via UVD3 or +// earlier, and don't handle 1080p60 well. +static const DWORD sAMDPreUVD4[] = { + // clang-format off + 0x9400, 0x9401, 0x9402, 0x9403, 0x9405, 0x940a, 0x940b, 0x940f, 0x94c0, 0x94c1, 0x94c3, 0x94c4, 0x94c5, + 0x94c6, 0x94c7, 0x94c8, 0x94c9, 0x94cb, 0x94cc, 0x94cd, 0x9580, 0x9581, 0x9583, 0x9586, 0x9587, 0x9588, + 0x9589, 0x958a, 0x958b, 0x958c, 0x958d, 0x958e, 0x958f, 0x9500, 0x9501, 0x9504, 0x9505, 0x9506, 0x9507, + 0x9508, 0x9509, 0x950f, 0x9511, 0x9515, 0x9517, 0x9519, 0x95c0, 0x95c2, 0x95c4, 0x95c5, 0x95c6, 0x95c7, + 0x95c9, 0x95cc, 0x95cd, 0x95ce, 0x95cf, 0x9590, 0x9591, 0x9593, 0x9595, 0x9596, 0x9597, 0x9598, 0x9599, + 0x959b, 0x9610, 0x9611, 0x9612, 0x9613, 0x9614, 0x9615, 0x9616, 0x9710, 0x9711, 0x9712, 0x9713, 0x9714, + 0x9715, 0x9440, 0x9441, 0x9442, 0x9443, 0x9444, 0x9446, 0x944a, 0x944b, 0x944c, 0x944e, 0x9450, 0x9452, + 0x9456, 0x945a, 0x945b, 0x945e, 0x9460, 0x9462, 0x946a, 0x946b, 0x947a, 0x947b, 0x9480, 0x9487, 0x9488, + 0x9489, 0x948a, 0x948f, 0x9490, 0x9491, 0x9495, 0x9498, 0x949c, 0x949e, 0x949f, 0x9540, 0x9541, 0x9542, + 0x954e, 0x954f, 0x9552, 0x9553, 0x9555, 0x9557, 0x955f, 0x94a0, 0x94a1, 0x94a3, 0x94b1, 0x94b3, 0x94b4, + 0x94b5, 0x94b9, 0x68e0, 0x68e1, 0x68e4, 0x68e5, 0x68e8, 0x68e9, 0x68f1, 0x68f2, 0x68f8, 0x68f9, 0x68fa, + 0x68fe, 0x68c0, 0x68c1, 0x68c7, 0x68c8, 0x68c9, 0x68d8, 0x68d9, 0x68da, 0x68de, 0x68a0, 0x68a1, 0x68a8, + 0x68a9, 0x68b0, 0x68b8, 0x68b9, 0x68ba, 0x68be, 0x68bf, 0x6880, 0x6888, 0x6889, 0x688a, 0x688c, 0x688d, + 0x6898, 0x6899, 0x689b, 0x689e, 0x689c, 0x689d, 0x9802, 0x9803, 0x9804, 0x9805, 0x9806, 0x9807, 0x9808, + 0x9809, 0x980a, 0x9640, 0x9641, 0x9647, 0x9648, 0x964a, 0x964b, 0x964c, 0x964e, 0x964f, 0x9642, 0x9643, + 0x9644, 0x9645, 0x9649, 0x6720, 0x6721, 0x6722, 0x6723, 0x6724, 0x6725, 0x6726, 0x6727, 0x6728, 0x6729, + 0x6738, 0x6739, 0x673e, 0x6740, 0x6741, 0x6742, 0x6743, 0x6744, 0x6745, 0x6746, 0x6747, 0x6748, 0x6749, + 0x674a, 0x6750, 0x6751, 0x6758, 0x6759, 0x675b, 0x675d, 0x675f, 0x6840, 0x6841, 0x6842, 0x6843, 0x6849, + 0x6850, 0x6858, 0x6859, 0x6760, 0x6761, 0x6762, 0x6763, 0x6764, 0x6765, 0x6766, 0x6767, 0x6768, 0x6770, + 0x6771, 0x6772, 0x6778, 0x6779, 0x677b, 0x6700, 0x6701, 0x6702, 0x6703, 0x6704, 0x6705, 0x6706, 0x6707, + 0x6708, 0x6709, 0x6718, 0x6719, 0x671c, 0x671d, 0x671f, 0x9900, 0x9901, 0x9903, 0x9904, 0x9905, 0x9906, + 0x9907, 0x9908, 0x9909, 0x990a, 0x990b, 0x990c, 0x990d, 0x990e, 0x990f, 0x9910, 0x9913, 0x9917, 0x9918, + 0x9919, 0x9990, 0x9991, 0x9992, 0x9993, 0x9994, 0x9995, 0x9996, 0x9997, 0x9998, 0x9999, 0x999a, 0x999b, + 0x999c, 0x999d, 0x99a0, 0x99a2, 0x99a4 + // clang-format on +}; + +// List of NVidia Telsa GPU known to have broken NV12 rendering. +static const DWORD sNVIDIABrokenNV12[] = { + // clang-format off + 0x0191, 0x0193, 0x0194, 0x0197, 0x019d, 0x019e, // G80 + 0x0400, 0x0401, 0x0402, 0x0403, 0x0404, 0x0405, 0x0406, 0x0407, 0x0408, 0x0409, // G84 + 0x040a, 0x040b, 0x040c, 0x040d, 0x040e, 0x040f, + 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, // G86 + 0x042a, 0x042b, 0x042c, 0x042d, 0x042e, 0x042f, + 0x0410, 0x0600, 0x0601, 0x0602, 0x0603, 0x0604, 0x0605, 0x0606, 0x0607, 0x0608, // G92 + 0x0609, 0x060a, 0x060b, 0x060c, 0x060f, 0x0610, 0x0611, 0x0612, 0x0613, 0x0614, + 0x0615, 0x0617, 0x0618, 0x0619, 0x061a, 0x061b, 0x061c, 0x061d, 0x061e, 0x061f, // G94 + 0x0621, 0x0622, 0x0623, 0x0625, 0x0626, 0x0627, 0x0628, 0x062a, 0x062b, 0x062c, + 0x062d, 0x062e, 0x0631, 0x0635, 0x0637, 0x0638, 0x063a, + 0x0640, 0x0641, 0x0643, 0x0644, 0x0645, 0x0646, 0x0647, 0x0648, 0x0649, 0x064a, // G96 + 0x064b, 0x064c, 0x0651, 0x0652, 0x0653, 0x0654, 0x0655, 0x0656, 0x0658, 0x0659, + 0x065a, 0x065b, 0x065c, 0x065f, + 0x06e0, 0x06e1, 0x06e2, 0x06e3, 0x06e4, 0x06e6, 0x06e7, 0x06e8, 0x06e9, 0x06ea, // G98 + 0x06eb, 0x06ec, 0x06ef, 0x06f1, 0x06f8, 0x06f9, 0x06fa, 0x06fb, 0x06fd, 0x06ff, + 0x05e0, 0x05e1, 0x05e2, 0x05e3, 0x05e6, 0x05e7, 0x05e9, 0x05ea, 0x05eb, 0x05ed, // G200 + 0x05ee, 0x05ef, + 0x0840, 0x0844, 0x0845, 0x0846, 0x0847, 0x0848, 0x0849, 0x084a, 0x084b, 0x084c, // MCP77 + 0x084d, 0x084f, + 0x0860, 0x0861, 0x0862, 0x0863, 0x0864, 0x0865, 0x0866, 0x0867, 0x0868, 0x0869, // MCP79 + 0x086a, 0x086c, 0x086d, 0x086e, 0x086f, 0x0870, 0x0871, 0x0872, 0x0873, 0x0874, + 0x0876, 0x087a, 0x087d, 0x087e, 0x087f, + 0x0ca0, 0x0ca2, 0x0ca3, 0x0ca2, 0x0ca4, 0x0ca5, 0x0ca7, 0x0ca9, 0x0cac, 0x0caf, // GT215 + 0x0cb0, 0x0cb1, 0x0cbc, + 0x0a20, 0x0a22, 0x0a23, 0x0a26, 0x0a27, 0x0a28, 0x0a29, 0x0a2a, 0x0a2b, 0x0a2c, // GT216 + 0x0a2d, 0x0a32, 0x0a34, 0x0a35, 0x0a38, 0x0a3c, + 0x0a60, 0x0a62, 0x0a63, 0x0a64, 0x0a65, 0x0a66, 0x0a67, 0x0a68, 0x0a69, 0x0a6a, // GT218 + 0x0a6c, 0x0a6e, 0x0a6f, 0x0a70, 0x0a71, 0x0a72, 0x0a73, 0x0a74, 0x0a75, 0x0a76, + 0x0a78, 0x0a7a, 0x0a7c, 0x10c0, 0x10c3, 0x10c5, 0x10d8 + // clang-format on +}; + +extern mozilla::LazyLogModule sPDMLog; +#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__)) + +namespace mozilla { + +using layers::D3D11RecycleAllocator; +using layers::D3D11ShareHandleImage; +using layers::Image; +using layers::ImageContainer; +using namespace layers; +using namespace gfx; + +void GetDXVA2ExtendedFormatFromMFMediaType(IMFMediaType* pType, + DXVA2_ExtendedFormat* pFormat) { + // Get the interlace mode. + MFVideoInterlaceMode interlace = MFVideoInterlaceMode(MFGetAttributeUINT32( + pType, MF_MT_INTERLACE_MODE, MFVideoInterlace_Unknown)); + + if (interlace == MFVideoInterlace_MixedInterlaceOrProgressive) { + pFormat->SampleFormat = DXVA2_SampleFieldInterleavedEvenFirst; + } else { + pFormat->SampleFormat = UINT(interlace); + } + + pFormat->VideoChromaSubsampling = MFGetAttributeUINT32( + pType, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown); + pFormat->NominalRange = MFGetAttributeUINT32(pType, MF_MT_VIDEO_NOMINAL_RANGE, + MFNominalRange_Unknown); + pFormat->VideoTransferMatrix = MFGetAttributeUINT32( + pType, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown); + pFormat->VideoLighting = MFGetAttributeUINT32(pType, MF_MT_VIDEO_LIGHTING, + MFVideoLighting_Unknown); + pFormat->VideoPrimaries = MFGetAttributeUINT32(pType, MF_MT_VIDEO_PRIMARIES, + MFVideoPrimaries_Unknown); + pFormat->VideoTransferFunction = MFGetAttributeUINT32( + pType, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown); +} + +HRESULT ConvertMFTypeToDXVAType(IMFMediaType* pType, DXVA2_VideoDesc* pDesc) { + ZeroMemory(pDesc, sizeof(*pDesc)); + + // The D3D format is the first DWORD of the subtype GUID. + GUID subtype = GUID_NULL; + HRESULT hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + pDesc->Format = (D3DFORMAT)subtype.Data1; + + UINT32 width = 0; + UINT32 height = 0; + hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL); + NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL); + pDesc->SampleWidth = width; + pDesc->SampleHeight = height; + + UINT32 fpsNumerator = 0; + UINT32 fpsDenominator = 0; + if (SUCCEEDED(MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &fpsNumerator, + &fpsDenominator))) { + pDesc->InputSampleFreq.Numerator = fpsNumerator; + pDesc->InputSampleFreq.Denominator = fpsDenominator; + + GetDXVA2ExtendedFormatFromMFMediaType(pType, &pDesc->SampleFormat); + pDesc->OutputFrameFreq = pDesc->InputSampleFreq; + if ((pDesc->SampleFormat.SampleFormat == + DXVA2_SampleFieldInterleavedEvenFirst) || + (pDesc->SampleFormat.SampleFormat == + DXVA2_SampleFieldInterleavedOddFirst)) { + pDesc->OutputFrameFreq.Numerator *= 2; + } + } + + return S_OK; +} + +// All GUIDs other than Intel ClearVideo can be found here: +// https://docs.microsoft.com/en-us/windows/win32/medfound/direct3d-12-video-guids +// VLD = Variable-length decoder, FGT = Film grain technology +static const GUID DXVA2_ModeH264_VLD_NoFGT = { + 0x1b81be68, + 0xa0c7, + 0x11d3, + {0xb9, 0x84, 0x00, 0xc0, 0x4f, 0x2e, 0x73, 0xc5}}; + +// Also known as DXVADDI_Intel_ModeH264_E here: +// https://www.intel.com/content/dam/develop/external/us/en/documents/h264-avc-x4500-acceration-esardell-157713.pdf +// Named based on the fact that this is only supported on older ClearVideo +// Intel decoding hardware. +static const GUID DXVA2_Intel_ClearVideo_ModeH264_VLD_NoFGT = { + 0x604F8E68, + 0x4951, + 0x4c54, + {0x88, 0xFE, 0xAB, 0xD2, 0x5C, 0x15, 0xB3, 0xD6}}; + +// VP8 profiles +static const GUID DXVA2_ModeVP8_VLD = { + 0x90b899ea, + 0x3a62, + 0x4705, + {0x88, 0xb3, 0x8d, 0xf0, 0x4b, 0x27, 0x44, 0xe7}}; + +// VP9 profiles +static const GUID DXVA2_ModeVP9_VLD_Profile0 = { + 0x463707f8, + 0xa1d0, + 0x4585, + {0x87, 0x6d, 0x83, 0xaa, 0x6d, 0x60, 0xb8, 0x9e}}; + +static const GUID DXVA2_ModeVP9_VLD_10bit_Profile2 = { + 0xa4c749ef, + 0x6ecf, + 0x48aa, + {0x84, 0x48, 0x50, 0xa7, 0xa1, 0x16, 0x5f, 0xf7}}; + +// AV1 profiles +static const GUID DXVA2_ModeAV1_VLD_Profile0 = { + 0xb8be4ccb, + 0xcf53, + 0x46ba, + {0x8d, 0x59, 0xd6, 0xb8, 0xa6, 0xda, 0x5d, 0x2a}}; + +static const GUID DXVA2_ModeAV1_VLD_Profile1 = { + 0x6936ff0f, + 0x45b1, + 0x4163, + {0x9c, 0xc1, 0x64, 0x6e, 0xf6, 0x94, 0x61, 0x08}}; + +static const GUID DXVA2_ModeAV1_VLD_Profile2 = { + 0x0c5f2aa1, + 0xe541, + 0x4089, + {0xbb, 0x7b, 0x98, 0x11, 0x0a, 0x19, 0xd7, 0xc8}}; + +static const GUID DXVA2_ModeAV1_VLD_12bit_Profile2 = { + 0x17127009, + 0xa00f, + 0x4ce1, + {0x99, 0x4e, 0xbf, 0x40, 0x81, 0xf6, 0xf3, 0xf0}}; + +static const GUID DXVA2_ModeAV1_VLD_12bit_Profile2_420 = { + 0x2d80bed6, + 0x9cac, + 0x4835, + {0x9e, 0x91, 0x32, 0x7b, 0xbc, 0x4f, 0x9e, 0xe8}}; + +// D3D12_VIDEO_DECODE_PROFILE_HEVC_MAIN +static const GUID DXVA2_ModeHEVC_VLD_MAIN = { + 0x5b11d51b, + 0x2f4c, + 0x4452, + {0xbc, 0xc3, 0x09, 0xf2, 0xa1, 0x16, 0x0c, 0xc0}}; + +// D3D12_VIDEO_DECODE_PROFILE_HEVC_MAIN10 +static const GUID DXVA2_ModeHEVC_VLD_MAIN10 = { + 0x107af0e0, + 0xef1a, + 0x4d19, + {0xab, 0xa8, 0x67, 0xa1, 0x63, 0x07, 0x3d, 0x13}}; + +static const char* DecoderGUIDToStr(const GUID& aGuid) { + if (aGuid == DXVA2_ModeH264_VLD_NoFGT) { + return "H264"; + } + if (aGuid == DXVA2_Intel_ClearVideo_ModeH264_VLD_NoFGT) { + return "Intel H264"; + } + if (aGuid == DXVA2_ModeVP8_VLD) { + return "VP8"; + } + if (aGuid == DXVA2_ModeVP9_VLD_Profile0) { + return "VP9 Profile0"; + } + if (aGuid == DXVA2_ModeVP9_VLD_10bit_Profile2) { + return "VP9 10bits Profile2"; + } + if (aGuid == DXVA2_ModeAV1_VLD_Profile0) { + return "AV1 Profile0"; + } + if (aGuid == DXVA2_ModeAV1_VLD_Profile1) { + return "AV1 Profile1"; + } + if (aGuid == DXVA2_ModeAV1_VLD_Profile2) { + return "AV1 Profile2"; + } + if (aGuid == DXVA2_ModeAV1_VLD_12bit_Profile2) { + return "AV1 12bits Profile2"; + } + if (aGuid == DXVA2_ModeAV1_VLD_12bit_Profile2_420) { + return "AV1 12bits Profile2 420"; + } + if (aGuid == DXVA2_ModeHEVC_VLD_MAIN) { + return "HEVC main"; + } + if (aGuid == DXVA2_ModeHEVC_VLD_MAIN10) { + return "HEVC main10"; + } + return "none"; +} + +// Count of the number of DXVAManager's we've created. This is also the +// number of videos we're decoding with DXVA. Use on main thread only. +static Atomic sDXVAVideosCount(0); + +class D3D11DXVA2Manager : public DXVA2Manager { + public: + D3D11DXVA2Manager(); + virtual ~D3D11DXVA2Manager(); + + HRESULT Init(layers::KnowsCompositor* aKnowsCompositor, + nsACString& aFailureReason, ID3D11Device* aDevice); + HRESULT InitInternal(layers::KnowsCompositor* aKnowsCompositor, + nsACString& aFailureReason, ID3D11Device* aDevice); + + IUnknown* GetDXVADeviceManager() override; + + // Copies a region (aRegion) of the video frame stored in aVideoSample + // into an image which is returned by aOutImage. + HRESULT CopyToImage(IMFSample* aVideoSample, const gfx::IntRect& aRegion, + Image** aOutImage) override; + + HRESULT WrapTextureWithImage(IMFSample* aVideoSample, + const gfx::IntRect& aRegion, + layers::Image** aOutImage) override; + + HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture, uint32_t aArrayIndex, + ID3D11Texture2D** aOutTexture) override; + + HRESULT ConfigureForSize(IMFMediaType* aInputType, + gfx::YUVColorSpace aColorSpace, + gfx::ColorRange aColorRange, uint32_t aWidth, + uint32_t aHeight) override; + + bool IsD3D11() override { return true; } + + bool SupportsConfig(const VideoInfo& aInfo, IMFMediaType* aInputType, + IMFMediaType* aOutputType) override; + + void BeforeShutdownVideoMFTDecoder() override; + + bool SupportsZeroCopyNV12Texture() override { + if (mIMFSampleUsageInfo->SupportsZeroCopyNV12Texture() && + (mDevice != DeviceManagerDx::Get()->GetCompositorDevice())) { + mIMFSampleUsageInfo->DisableZeroCopyNV12Texture(); + } + return mIMFSampleUsageInfo->SupportsZeroCopyNV12Texture(); + } + + private: + HRESULT CreateOutputSample(RefPtr& aSample, + ID3D11Texture2D* aTexture); + + bool CanCreateDecoder(const D3D11_VIDEO_DECODER_DESC& aDesc) const; + + already_AddRefed CreateDecoder( + const D3D11_VIDEO_DECODER_DESC& aDesc) const; + void RefreshIMFSampleWrappers(); + void ReleaseAllIMFSamples(); + + RefPtr mDevice; + RefPtr mContext; + RefPtr mDXGIDeviceManager; + RefPtr mTransform; + RefPtr mTextureClientAllocator; + RefPtr mKnowsCompositor; + RefPtr mDecoder; + RefPtr mSyncObject; + uint32_t mWidth = 0; + uint32_t mHeight = 0; + UINT mDeviceManagerToken = 0; + RefPtr mInputType; + GUID mInputSubType; + gfx::YUVColorSpace mYUVColorSpace; + gfx::ColorRange mColorRange = gfx::ColorRange::LIMITED; + std::list> mIMFSampleWrappers; + RefPtr mIMFSampleUsageInfo; + uint32_t mVendorID = 0; +}; + +bool D3D11DXVA2Manager::SupportsConfig(const VideoInfo& aInfo, + IMFMediaType* aInputType, + IMFMediaType* aOutputType) { + D3D11_VIDEO_DECODER_DESC desc = {GUID_NULL, 0, 0, DXGI_FORMAT_UNKNOWN}; + + HRESULT hr = MFGetAttributeSize(aInputType, MF_MT_FRAME_SIZE, + &desc.SampleWidth, &desc.SampleHeight); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + NS_ENSURE_TRUE(desc.SampleWidth <= MAX_VIDEO_WIDTH, false); + NS_ENSURE_TRUE(desc.SampleHeight <= MAX_VIDEO_HEIGHT, false); + + GUID subtype; + hr = aInputType->GetGUID(MF_MT_SUBTYPE, &subtype); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + if (subtype == MFVideoFormat_H264) { + // IsUnsupportedResolution is only used to work around an AMD H264 issue. + const float framerate = [&]() { + UINT32 numerator; + UINT32 denominator; + if (SUCCEEDED(MFGetAttributeRatio(aInputType, MF_MT_FRAME_RATE, + &numerator, &denominator))) { + return static_cast(numerator) / denominator; + } + return 30.0f; + }(); + NS_ENSURE_FALSE( + IsUnsupportedResolution(desc.SampleWidth, desc.SampleHeight, framerate), + false); + NS_ENSURE_TRUE(aInfo.mColorDepth == ColorDepth::COLOR_8, false); + + RefPtr videoDevice; + hr = mDevice->QueryInterface( + static_cast(getter_AddRefs(videoDevice))); + + GUID guids[] = {DXVA2_ModeH264_VLD_NoFGT, + DXVA2_Intel_ClearVideo_ModeH264_VLD_NoFGT}; + for (const GUID& guid : guids) { + BOOL supported = false; + hr = videoDevice->CheckVideoDecoderFormat(&guid, DXGI_FORMAT_NV12, + &supported); + if (SUCCEEDED(hr) && supported) { + desc.Guid = guid; + break; + } + } + } else if (subtype == MFVideoFormat_VP80) { + NS_ENSURE_TRUE(aInfo.mColorDepth == ColorDepth::COLOR_8, false); + desc.Guid = DXVA2_ModeVP8_VLD; + } else if (subtype == MFVideoFormat_VP90) { + NS_ENSURE_TRUE(aInfo.mColorDepth == ColorDepth::COLOR_8 || + aInfo.mColorDepth == ColorDepth::COLOR_10, + false); + uint8_t profile; + + if (aInfo.mExtraData && !aInfo.mExtraData->IsEmpty()) { + VPXDecoder::VPXStreamInfo vp9Info; + VPXDecoder::ReadVPCCBox(vp9Info, aInfo.mExtraData); + profile = vp9Info.mProfile; + } else { + // If no vpcC is present, we can't know the profile, which limits the + // subsampling mode, but 4:2:0 is most supported so default to profiles 0 + // and 2: + // Profile 0 = 8bit, 4:2:0 + // Profile 2 = 10/12bit, 4:2:0 + profile = aInfo.mColorDepth == ColorDepth::COLOR_8 ? 0 : 2; + } + + switch (profile) { + case 0: + desc.Guid = DXVA2_ModeVP9_VLD_Profile0; + break; + case 2: + desc.Guid = DXVA2_ModeVP9_VLD_10bit_Profile2; + break; + default: + break; + } + } else if (subtype == MFVideoFormat_AV1) { + uint8_t profile; + bool yuv420; + + if (aInfo.mExtraData && !aInfo.mExtraData->IsEmpty()) { + AOMDecoder::AV1SequenceInfo av1Info; + bool hadSeqHdr; + AOMDecoder::ReadAV1CBox(aInfo.mExtraData, av1Info, hadSeqHdr); + profile = av1Info.mProfile; + yuv420 = av1Info.mSubsamplingX && av1Info.mSubsamplingY; + } else { + // If no av1C is present, we can't get profile or subsampling mode. 4:2:0 + // subsampling is most likely to be supported in hardware, so set av1Info + // accordingly. + // 8bit/10bit = Main profile, 4:2:0 + // 12bit = Professional, 4:2:0 + profile = aInfo.mColorDepth == ColorDepth::COLOR_12 ? 2 : 0; + yuv420 = true; + } + + switch (profile) { + case 0: + desc.Guid = DXVA2_ModeAV1_VLD_Profile0; + break; + case 1: + desc.Guid = DXVA2_ModeAV1_VLD_Profile1; + break; + case 2: + MOZ_ASSERT(aInfo.mColorDepth < ColorDepth::COLOR_16); + if (aInfo.mColorDepth == ColorDepth::COLOR_12) { + if (yuv420) { + desc.Guid = DXVA2_ModeAV1_VLD_12bit_Profile2_420; + } else { + desc.Guid = DXVA2_ModeAV1_VLD_12bit_Profile2; + } + } else { + desc.Guid = DXVA2_ModeAV1_VLD_Profile2; + } + break; + default: + break; + } + } else if (subtype == MFVideoFormat_HEVC) { + RefPtr videoDevice; + hr = mDevice->QueryInterface( + static_cast(getter_AddRefs(videoDevice))); + GUID guids[] = {DXVA2_ModeHEVC_VLD_MAIN, DXVA2_ModeHEVC_VLD_MAIN10}; + for (const GUID& guid : guids) { + BOOL supported = false; + hr = videoDevice->CheckVideoDecoderFormat(&guid, DXGI_FORMAT_NV12, + &supported); + if (SUCCEEDED(hr) && supported) { + desc.Guid = guid; + break; + } + } + } + LOG("Select %s GUID", DecoderGUIDToStr(desc.Guid)); + + hr = aOutputType->GetGUID(MF_MT_SUBTYPE, &subtype); + if (SUCCEEDED(hr)) { + if (subtype == MFVideoFormat_NV12) { + desc.OutputFormat = DXGI_FORMAT_NV12; + } else if (subtype == MFVideoFormat_P010) { + desc.OutputFormat = DXGI_FORMAT_P010; + } else if (subtype == MFVideoFormat_P016) { + desc.OutputFormat = DXGI_FORMAT_P016; + } + } + + if (desc.Guid == GUID_NULL || desc.OutputFormat == DXGI_FORMAT_UNKNOWN) { + return false; + } + + return CanCreateDecoder(desc); +} + +D3D11DXVA2Manager::D3D11DXVA2Manager() + : mIMFSampleUsageInfo(new layers::IMFSampleUsageInfo) {} + +D3D11DXVA2Manager::~D3D11DXVA2Manager() {} + +IUnknown* D3D11DXVA2Manager::GetDXVADeviceManager() { + MutexAutoLock lock(mLock); + return mDXGIDeviceManager; +} +HRESULT +D3D11DXVA2Manager::Init(layers::KnowsCompositor* aKnowsCompositor, + nsACString& aFailureReason, ID3D11Device* aDevice) { + if (aDevice) { + return InitInternal(aKnowsCompositor, aFailureReason, aDevice); + } + + HRESULT hr; + ScopedGfxFeatureReporter reporter("DXVA2D3D11"); + + hr = InitInternal(aKnowsCompositor, aFailureReason, aDevice); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (layers::ImageBridgeChild::GetSingleton() || !aKnowsCompositor) { + // There's no proper KnowsCompositor for ImageBridge currently (and it + // implements the interface), so just use that if it's available. + mTextureClientAllocator = new D3D11RecycleAllocator( + layers::ImageBridgeChild::GetSingleton().get(), mDevice, + gfx::SurfaceFormat::NV12); + + if (ImageBridgeChild::GetSingleton() && + StaticPrefs::media_wmf_use_sync_texture_AtStartup() && + mDevice != DeviceManagerDx::Get()->GetCompositorDevice()) { + // We use a syncobject to avoid the cost of the mutex lock when + // compositing, and because it allows color conversion ocurring directly + // from this texture DXVA does not seem to accept IDXGIKeyedMutex textures + // as input. + mSyncObject = layers::SyncObjectClient::CreateSyncObjectClient( + layers::ImageBridgeChild::GetSingleton() + ->GetTextureFactoryIdentifier() + .mSyncHandle, + mDevice); + } + } else { + mTextureClientAllocator = new D3D11RecycleAllocator( + aKnowsCompositor, mDevice, gfx::SurfaceFormat::NV12); + mKnowsCompositor = aKnowsCompositor; + if (StaticPrefs::media_wmf_use_sync_texture_AtStartup()) { + // We use a syncobject to avoid the cost of the mutex lock when + // compositing, and because it allows color conversion ocurring directly + // from this texture DXVA does not seem to accept IDXGIKeyedMutex textures + // as input. + mSyncObject = layers::SyncObjectClient::CreateSyncObjectClient( + aKnowsCompositor->GetTextureFactoryIdentifier().mSyncHandle, mDevice); + } + } + mTextureClientAllocator->SetMaxPoolSize(5); + + Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED, + uint32_t(media::MediaDecoderBackend::WMFDXVA2D3D11)); + + reporter.SetSuccessful(); + + return S_OK; +} + +HRESULT +D3D11DXVA2Manager::InitInternal(layers::KnowsCompositor* aKnowsCompositor, + nsACString& aFailureReason, + ID3D11Device* aDevice) { + HRESULT hr; + + mDevice = aDevice; + + if (!mDevice) { + bool useHardwareWebRender = + aKnowsCompositor && aKnowsCompositor->UsingHardwareWebRender(); + mDevice = + gfx::DeviceManagerDx::Get()->CreateDecoderDevice(useHardwareWebRender); + if (!mDevice) { + aFailureReason.AssignLiteral("Failed to create D3D11 device for decoder"); + return E_FAIL; + } + } + + RefPtr mt; + hr = mDevice->QueryInterface((ID3D10Multithread**)getter_AddRefs(mt)); + NS_ENSURE_TRUE(SUCCEEDED(hr) && mt, hr); + mt->SetMultithreadProtected(TRUE); + + mDevice->GetImmediateContext(getter_AddRefs(mContext)); + + hr = wmf::MFCreateDXGIDeviceManager(&mDeviceManagerToken, + getter_AddRefs(mDXGIDeviceManager)); + if (!SUCCEEDED(hr)) { + aFailureReason = + nsPrintfCString("MFCreateDXGIDeviceManager failed with code %lX", hr); + return hr; + } + + hr = mDXGIDeviceManager->ResetDevice(mDevice, mDeviceManagerToken); + if (!SUCCEEDED(hr)) { + aFailureReason = nsPrintfCString( + "IMFDXGIDeviceManager::ResetDevice failed with code %lX", hr); + return hr; + } + + // The IMFTransform interface used by MFTDecoder is documented to require to + // run on an MTA thread. + // https://msdn.microsoft.com/en-us/library/windows/desktop/ee892371(v=vs.85).aspx#components + // The main thread (where this function is called) is STA, not MTA. + RefPtr mft; + mozilla::mscom::EnsureMTA([&]() -> void { + mft = new MFTDecoder(); + hr = mft->Create(MFT_CATEGORY_VIDEO_PROCESSOR, MFVideoFormat_NV12, + MFVideoFormat_ARGB32); + + if (!SUCCEEDED(hr)) { + aFailureReason = nsPrintfCString( + "MFTDecoder::Create of Video Processor MFT for color conversion " + "failed with code %lX", + hr); + return; + } + + hr = mft->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, + ULONG_PTR(mDXGIDeviceManager.get())); + if (!SUCCEEDED(hr)) { + aFailureReason = nsPrintfCString( + "MFTDecoder::SendMFTMessage(MFT_MESSAGE_" + "SET_D3D_MANAGER) failed with code %lX", + hr); + return; + } + }); + + if (!SUCCEEDED(hr)) { + return hr; + } + mTransform = mft; + + RefPtr dxgiDevice; + hr = mDevice->QueryInterface( + static_cast(getter_AddRefs(dxgiDevice))); + if (!SUCCEEDED(hr)) { + aFailureReason = + nsPrintfCString("QI to IDXGIDevice failed with code %lX", hr); + return hr; + } + + RefPtr adapter; + hr = dxgiDevice->GetAdapter(adapter.StartAssignment()); + if (!SUCCEEDED(hr)) { + aFailureReason = + nsPrintfCString("IDXGIDevice::GetAdapter failed with code %lX", hr); + return hr; + } + + DXGI_ADAPTER_DESC adapterDesc; + hr = adapter->GetDesc(&adapterDesc); + if (!SUCCEEDED(hr)) { + aFailureReason = + nsPrintfCString("IDXGIAdapter::GetDesc failed with code %lX", hr); + return hr; + } + + mVendorID = adapterDesc.VendorId; + + if ((adapterDesc.VendorId == 0x1022 || adapterDesc.VendorId == 0x1002) && + !StaticPrefs::media_wmf_skip_blacklist()) { + for (const auto& model : sAMDPreUVD4) { + if (adapterDesc.DeviceId == model) { + mIsAMDPreUVD4 = true; + break; + } + } + } + + if (!IsD3D11() || !XRE_IsGPUProcess() || + (mDevice != DeviceManagerDx::Get()->GetCompositorDevice())) { + mIMFSampleUsageInfo->DisableZeroCopyNV12Texture(); + } + + return S_OK; +} + +HRESULT +D3D11DXVA2Manager::CreateOutputSample(RefPtr& aSample, + ID3D11Texture2D* aTexture) { + RefPtr sample; + HRESULT hr = wmf::MFCreateSample(getter_AddRefs(sample)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr buffer; + hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), aTexture, 0, + FALSE, getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = sample->AddBuffer(buffer); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + aSample = sample; + return S_OK; +} + +HRESULT +D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample, + const gfx::IntRect& aRegion, Image** aOutImage) { + NS_ENSURE_TRUE(aVideoSample, E_POINTER); + NS_ENSURE_TRUE(aOutImage, E_POINTER); + MOZ_ASSERT(mTextureClientAllocator); + + RefPtr image = + new D3D11ShareHandleImage(gfx::IntSize(mWidth, mHeight), aRegion, + ToColorSpace2(mYUVColorSpace), mColorRange); + + // Retrieve the DXGI_FORMAT for the current video sample. + RefPtr buffer; + HRESULT hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr dxgiBuf; + hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr tex; + hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(tex)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + D3D11_TEXTURE2D_DESC inDesc; + tex->GetDesc(&inDesc); + + bool ok = image->AllocateTexture(mTextureClientAllocator, mDevice); + NS_ENSURE_TRUE(ok, E_FAIL); + + RefPtr client = + image->GetTextureClient(ImageBridgeChild::GetSingleton().get()); + NS_ENSURE_TRUE(client, E_FAIL); + + RefPtr texture = image->GetTexture(); + D3D11_TEXTURE2D_DESC outDesc; + texture->GetDesc(&outDesc); + + RefPtr mutex; + texture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex)); + + { + AutoTextureLock(mutex, hr, 2000); + if (mutex && (FAILED(hr) || hr == WAIT_TIMEOUT || hr == WAIT_ABANDONED)) { + return hr; + } + + if (!mutex && mDevice != DeviceManagerDx::Get()->GetCompositorDevice()) { + NS_ENSURE_TRUE(mSyncObject, E_FAIL); + } + + UINT height = std::min(inDesc.Height, outDesc.Height); + PerformanceRecorder perfRecorder( + MediaStage::CopyDecodedVideo, height); + // The D3D11TextureClientAllocator may return a different texture format + // than preferred. In which case the destination texture will be BGRA32. + if (outDesc.Format == inDesc.Format) { + // Our video frame is stored in a non-sharable ID3D11Texture2D. We need + // to create a copy of that frame as a sharable resource, save its share + // handle, and put that handle into the rendering pipeline. + UINT width = std::min(inDesc.Width, outDesc.Width); + D3D11_BOX srcBox = {0, 0, 0, width, height, 1}; + + UINT index; + dxgiBuf->GetSubresourceIndex(&index); + mContext->CopySubresourceRegion(texture, 0, 0, 0, 0, tex, index, &srcBox); + } else { + // Use MFT to do color conversion. + hr = E_FAIL; + mozilla::mscom::EnsureMTA( + [&]() -> void { hr = mTransform->Input(aVideoSample); }); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr sample; + hr = CreateOutputSample(sample, texture); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = E_FAIL; + mozilla::mscom::EnsureMTA( + [&]() -> void { hr = mTransform->Output(&sample); }); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + perfRecorder.Record(); + } + + if (!mutex && mDevice != DeviceManagerDx::Get()->GetCompositorDevice() && + mSyncObject) { + static StaticMutex sMutex MOZ_UNANNOTATED; + // Ensure that we only ever attempt to synchronise via the sync object + // serially as when using the same D3D11 device for multiple video decoders + // it can lead to deadlocks. + StaticMutexAutoLock lock(sMutex); + // It appears some race-condition may allow us to arrive here even when + // mSyncObject is null. It's better to avoid that crash. + client->SyncWithObject(mSyncObject); + if (!mSyncObject->Synchronize(true)) { + return DXGI_ERROR_DEVICE_RESET; + } + } else if (mDevice == DeviceManagerDx::Get()->GetCompositorDevice() && + mVendorID != 0x8086) { + MOZ_ASSERT(XRE_IsGPUProcess()); + MOZ_ASSERT(mVendorID); + + // Normally when D3D11Texture2D is copied by + // ID3D11DeviceContext::CopySubresourceRegion() with compositor device, + // WebRender does not need to wait copy complete, since WebRender also uses + // compositor device. But with some non-Intel GPUs, the copy complete need + // to be wait explicitly even with compositor device such as when using + // video overlays. + + RefPtr context; + mDevice->GetImmediateContext(getter_AddRefs(context)); + + RefPtr query; + CD3D11_QUERY_DESC desc(D3D11_QUERY_EVENT); + HRESULT hr = mDevice->CreateQuery(&desc, getter_AddRefs(query)); + if (SUCCEEDED(hr) && query) { + context->End(query); + + auto* data = client->GetInternalData()->AsD3D11TextureData(); + MOZ_ASSERT(data); + if (data) { + // Wait query happens only just before blitting for video overlay. + data->RegisterQuery(query); + } else { + gfxCriticalNoteOnce << "D3D11TextureData does not exist"; + } + } else { + gfxCriticalNoteOnce << "Could not create D3D11_QUERY_EVENT: " + << gfx::hexa(hr); + } + } + + image.forget(aOutImage); + + return S_OK; +} + +HRESULT D3D11DXVA2Manager::WrapTextureWithImage(IMFSample* aVideoSample, + const gfx::IntRect& aRegion, + layers::Image** aOutImage) { + NS_ENSURE_TRUE(aVideoSample, E_POINTER); + NS_ENSURE_TRUE(aOutImage, E_POINTER); + + RefPtr buffer; + HRESULT hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr dxgiBuf; + hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr texture; + hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(texture)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + D3D11_TEXTURE2D_DESC desc; + texture->GetDesc(&desc); + + UINT arrayIndex; + dxgiBuf->GetSubresourceIndex(&arrayIndex); + + RefreshIMFSampleWrappers(); + + RefPtr image = new D3D11TextureIMFSampleImage( + aVideoSample, texture, arrayIndex, gfx::IntSize(mWidth, mHeight), aRegion, + ToColorSpace2(mYUVColorSpace), mColorRange); + image->AllocateTextureClient(mKnowsCompositor, mIMFSampleUsageInfo); + + RefPtr wrapper = image->GetIMFSampleWrapper(); + ThreadSafeWeakPtr weak(wrapper); + mIMFSampleWrappers.push_back(weak); + + image.forget(aOutImage); + + return S_OK; +} + +void D3D11DXVA2Manager::RefreshIMFSampleWrappers() { + for (auto it = mIMFSampleWrappers.begin(); it != mIMFSampleWrappers.end();) { + auto wrapper = RefPtr(*it); + if (!wrapper) { + // wrapper is already destroyed. + it = mIMFSampleWrappers.erase(it); + continue; + } + it++; + } +} + +void D3D11DXVA2Manager::ReleaseAllIMFSamples() { + for (auto it = mIMFSampleWrappers.begin(); it != mIMFSampleWrappers.end(); + it++) { + RefPtr wrapper = RefPtr(*it); + if (wrapper) { + wrapper->ClearVideoSample(); + } + } +} + +void D3D11DXVA2Manager::BeforeShutdownVideoMFTDecoder() { + ReleaseAllIMFSamples(); +} + +HRESULT +D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D* aInTexture, + uint32_t aArrayIndex, + ID3D11Texture2D** aOutTexture) { + NS_ENSURE_TRUE(aInTexture, E_POINTER); + NS_ENSURE_TRUE(aOutTexture, E_POINTER); + + HRESULT hr; + RefPtr texture, inTexture; + + inTexture = aInTexture; + + CD3D11_TEXTURE2D_DESC desc; + aInTexture->GetDesc(&desc); + + if (!mInputType || desc.Width != mWidth || desc.Height != mHeight) { + RefPtr inputType; + hr = wmf::MFCreateMediaType(getter_AddRefs(inputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + const GUID subType = [&]() { + switch (desc.Format) { + case DXGI_FORMAT_NV12: + return MFVideoFormat_NV12; + case DXGI_FORMAT_P010: + return MFVideoFormat_P010; + case DXGI_FORMAT_P016: + return MFVideoFormat_P016; + default: + MOZ_ASSERT_UNREACHABLE("Unexpected texture type"); + return MFVideoFormat_NV12; + } + }(); + + hr = inputType->SetGUID(MF_MT_SUBTYPE, subType); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, + MFVideoInterlace_Progressive); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = ConfigureForSize(inputType, mYUVColorSpace, mColorRange, desc.Width, + desc.Height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + + RefPtr mutex; + inTexture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex)); + // The rest of this function will not work if inTexture implements + // IDXGIKeyedMutex! In that case case we would have to copy to a + // non-mutex using texture. + + if (mutex) { + RefPtr newTexture; + + desc.MiscFlags = 0; + hr = mDevice->CreateTexture2D(&desc, nullptr, getter_AddRefs(newTexture)); + NS_ENSURE_TRUE(SUCCEEDED(hr) && newTexture, E_FAIL); + + hr = mutex->AcquireSync(0, 2000); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + mContext->CopyResource(newTexture, inTexture); + + mutex->ReleaseSync(0); + inTexture = newTexture; + } + + desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; + desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE; + + hr = mDevice->CreateTexture2D(&desc, nullptr, getter_AddRefs(texture)); + NS_ENSURE_TRUE(SUCCEEDED(hr) && texture, E_FAIL); + + RefPtr inputSample; + wmf::MFCreateSample(getter_AddRefs(inputSample)); + + // If these aren't set the decoder fails. + inputSample->SetSampleTime(10); + inputSample->SetSampleDuration(10000); + + RefPtr inputBuffer; + hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), inTexture, + aArrayIndex, FALSE, + getter_AddRefs(inputBuffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + inputSample->AddBuffer(inputBuffer); + + hr = E_FAIL; + mozilla::mscom::EnsureMTA( + [&]() -> void { hr = mTransform->Input(inputSample); }); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr outputSample; + hr = CreateOutputSample(outputSample, texture); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = E_FAIL; + mozilla::mscom::EnsureMTA( + [&]() -> void { hr = mTransform->Output(&outputSample); }); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + texture.forget(aOutTexture); + + return S_OK; +} + +HRESULT +D3D11DXVA2Manager::ConfigureForSize(IMFMediaType* aInputType, + gfx::YUVColorSpace aColorSpace, + gfx::ColorRange aColorRange, + uint32_t aWidth, uint32_t aHeight) { + GUID subType = {0}; + HRESULT hr = aInputType->GetGUID(MF_MT_SUBTYPE, &subType); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (subType == mInputSubType && aWidth == mWidth && aHeight == mHeight && + mYUVColorSpace == aColorSpace && mColorRange == aColorRange) { + // If the media type hasn't changed, don't reconfigure. + return S_OK; + } + + // Create a copy of our input type. + RefPtr inputType; + hr = wmf::MFCreateMediaType(getter_AddRefs(inputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + hr = aInputType->CopyAllItems(inputType); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr attr; + mozilla::mscom::EnsureMTA( + [&]() -> void { attr = mTransform->GetAttributes(); }); + NS_ENSURE_TRUE(attr != nullptr, E_FAIL); + + hr = attr->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = attr->SetUINT32(MF_LOW_LATENCY, FALSE); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr outputType; + hr = wmf::MFCreateMediaType(getter_AddRefs(outputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = outputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = E_FAIL; + mozilla::mscom::EnsureMTA([&]() -> void { + hr = mTransform->SetMediaTypes( + inputType, outputType, [aWidth, aHeight](IMFMediaType* aOutput) { + HRESULT hr = aOutput->SetUINT32(MF_MT_INTERLACE_MODE, + MFVideoInterlace_Progressive); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, aWidth, aHeight); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + return S_OK; + }); + }); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + mWidth = aWidth; + mHeight = aHeight; + mInputType = inputType; + mInputSubType = subType; + mYUVColorSpace = aColorSpace; + mColorRange = aColorRange; + if (mTextureClientAllocator) { + gfx::SurfaceFormat format = [&]() { + if (subType == MFVideoFormat_NV12) { + return gfx::SurfaceFormat::NV12; + } else if (subType == MFVideoFormat_P010) { + return gfx::SurfaceFormat::P010; + } else if (subType == MFVideoFormat_P016) { + return gfx::SurfaceFormat::P016; + } else { + MOZ_ASSERT_UNREACHABLE("Unexpected texture type"); + return gfx::SurfaceFormat::NV12; + } + }(); + mTextureClientAllocator->SetPreferredSurfaceFormat(format); + } + return S_OK; +} + +bool D3D11DXVA2Manager::CanCreateDecoder( + const D3D11_VIDEO_DECODER_DESC& aDesc) const { + RefPtr decoder = CreateDecoder(aDesc); + return decoder.get() != nullptr; +} + +already_AddRefed D3D11DXVA2Manager::CreateDecoder( + const D3D11_VIDEO_DECODER_DESC& aDesc) const { + RefPtr videoDevice; + HRESULT hr = mDevice->QueryInterface( + static_cast(getter_AddRefs(videoDevice))); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + UINT configCount = 0; + hr = videoDevice->GetVideoDecoderConfigCount(&aDesc, &configCount); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + for (UINT i = 0; i < configCount; i++) { + D3D11_VIDEO_DECODER_CONFIG config; + hr = videoDevice->GetVideoDecoderConfig(&aDesc, i, &config); + if (SUCCEEDED(hr)) { + RefPtr decoder; + hr = videoDevice->CreateVideoDecoder(&aDesc, &config, + decoder.StartAssignment()); + return decoder.forget(); + } + } + return nullptr; +} + +/* static */ +DXVA2Manager* DXVA2Manager::CreateD3D11DXVA( + layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason, + ID3D11Device* aDevice) { + // DXVA processing takes up a lot of GPU resources, so limit the number of + // videos we use DXVA with at any one time. + uint32_t dxvaLimit = StaticPrefs::media_wmf_dxva_max_videos(); + + if (sDXVAVideosCount == dxvaLimit) { + aFailureReason.AssignLiteral("Too many DXVA videos playing"); + return nullptr; + } + + UniquePtr manager(new D3D11DXVA2Manager()); + HRESULT hr = manager->Init(aKnowsCompositor, aFailureReason, aDevice); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + return manager.release(); +} + +DXVA2Manager::DXVA2Manager() : mLock("DXVA2Manager") { ++sDXVAVideosCount; } + +DXVA2Manager::~DXVA2Manager() { --sDXVAVideosCount; } + +bool DXVA2Manager::IsUnsupportedResolution(const uint32_t& aWidth, + const uint32_t& aHeight, + const float& aFramerate) const { + // AMD cards with UVD3 or earlier perform poorly trying to decode 1080p60 in + // hardware, so use software instead. Pick 45 as an arbitrary upper bound for + // the framerate we can handle. + return !StaticPrefs::media_wmf_amd_highres_enabled() && mIsAMDPreUVD4 && + (aWidth >= 1920 || aHeight >= 1088) && aFramerate > 45; +} + +/* static */ +bool DXVA2Manager::IsNV12Supported(uint32_t aVendorID, uint32_t aDeviceID, + const nsAString& aDriverVersionString) { + if (aVendorID == 0x1022 || aVendorID == 0x1002) { + // AMD + // Block old cards regardless of driver version. + for (const auto& model : sAMDPreUVD4) { + if (aDeviceID == model) { + return false; + } + } + // AMD driver earlier than 21.19.411.0 have bugs in their handling of NV12 + // surfaces. + uint64_t driverVersion; + if (!widget::ParseDriverVersion(aDriverVersionString, &driverVersion) || + driverVersion < widget::V(21, 19, 411, 0)) { + return false; + } + } else if (aVendorID == 0x10DE) { + // NVidia + for (const auto& model : sNVIDIABrokenNV12) { + if (aDeviceID == model) { + return false; + } + } + } + return true; +} + +} // namespace mozilla + +#undef LOG diff --git a/dom/media/platforms/wmf/DXVA2Manager.h b/dom/media/platforms/wmf/DXVA2Manager.h new file mode 100644 index 0000000000..8743a9f87f --- /dev/null +++ b/dom/media/platforms/wmf/DXVA2Manager.h @@ -0,0 +1,90 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ +#if !defined(DXVA2Manager_h_) +# define DXVA2Manager_h_ + +# include "MediaInfo.h" +# include "WMF.h" +# include "mozilla/Mutex.h" +# include "mozilla/gfx/Rect.h" +# include "d3d11.h" + +namespace mozilla { + +namespace layers { +class Image; +class ImageContainer; +class KnowsCompositor; +} // namespace layers + +class DXVA2Manager { + public: + // Creates and initializes a DXVA2Manager. We can use DXVA2 via D3D11. + static DXVA2Manager* CreateD3D11DXVA( + layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason, + ID3D11Device* aDevice = nullptr); + + // Returns a pointer to the D3D device manager responsible for managing the + // device we're using for hardware accelerated video decoding. For D3D11 this + // is an IMFDXGIDeviceManager. It is safe to call this on any thread. + virtual IUnknown* GetDXVADeviceManager() = 0; + + // Creates an Image for the video frame stored in aVideoSample. + virtual HRESULT CopyToImage(IMFSample* aVideoSample, + const gfx::IntRect& aRegion, + layers::Image** aOutImage) = 0; + + virtual HRESULT WrapTextureWithImage(IMFSample* aVideoSample, + const gfx::IntRect& aRegion, + layers::Image** aOutImage) { + // Not implemented! + MOZ_CRASH("WrapTextureWithImage not implemented on this manager."); + return E_FAIL; + } + + virtual HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture, + uint32_t aArrayIndex, + ID3D11Texture2D** aOutTexture) { + // Not implemented! + MOZ_CRASH("CopyToBGRATexture not implemented on this manager."); + return E_FAIL; + } + + virtual HRESULT ConfigureForSize(IMFMediaType* aInputType, + gfx::YUVColorSpace aColorSpace, + gfx::ColorRange aColorRange, uint32_t aWidth, + uint32_t aHeight) { + return S_OK; + } + + virtual bool IsD3D11() { return false; } + + virtual ~DXVA2Manager(); + + virtual bool SupportsConfig(const VideoInfo& aInfo, IMFMediaType* aInputType, + IMFMediaType* aOutputType) = 0; + + // Called before shutdown video MFTDecoder. + virtual void BeforeShutdownVideoMFTDecoder() {} + + virtual bool SupportsZeroCopyNV12Texture() { return false; } + + static bool IsNV12Supported(uint32_t aVendorID, uint32_t aDeviceID, + const nsAString& aDriverVersionString); + + protected: + Mutex mLock MOZ_UNANNOTATED; + DXVA2Manager(); + + bool IsUnsupportedResolution(const uint32_t& aWidth, const uint32_t& aHeight, + const float& aFramerate) const; + + bool mIsAMDPreUVD4 = false; +}; + +} // namespace mozilla + +#endif // DXVA2Manager_h_ diff --git a/dom/media/platforms/wmf/MFCDMExtra.h b/dom/media/platforms/wmf/MFCDMExtra.h new file mode 100644 index 0000000000..d0ae1c3b97 --- /dev/null +++ b/dom/media/platforms/wmf/MFCDMExtra.h @@ -0,0 +1,19 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFCDMEXTRA_H +#define DOM_MEDIA_PLATFORM_WMF_MFCDMEXTRA_H + +#include + +// See +// https://github.com/microsoft/media-foundation/issues/37#issuecomment-1198317488 +EXTERN_GUID(GUID_ObjectStream, 0x3e73735c, 0xe6c0, 0x481d, 0x82, 0x60, 0xee, + 0x5d, 0xb1, 0x34, 0x3b, 0x5f); +EXTERN_GUID(GUID_ClassName, 0x77631a31, 0xe5e7, 0x4785, 0xbf, 0x17, 0x20, 0xf5, + 0x7b, 0x22, 0x48, 0x02); +EXTERN_GUID(CLSID_EMEStoreActivate, 0x2df7b51e, 0x797b, 0x4d06, 0xbe, 0x71, + 0xd1, 0x4a, 0x52, 0xcf, 0x84, 0x21); + +#endif // DOM_MEDIA_PLATFORM_WMF_MFCDMEXTRA_H diff --git a/dom/media/platforms/wmf/MFCDMProxy.cpp b/dom/media/platforms/wmf/MFCDMProxy.cpp new file mode 100644 index 0000000000..ea63acd643 --- /dev/null +++ b/dom/media/platforms/wmf/MFCDMProxy.cpp @@ -0,0 +1,97 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFCDMProxy.h" + +#include "MFCDMParent.h" +#include "MFMediaEngineUtils.h" + +namespace mozilla { + +using Microsoft::WRL::ComPtr; + +#define LOG(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \ + ("MFCDMProxy=%p, " msg, this, ##__VA_ARGS__)) + +MFCDMProxy::MFCDMProxy(IMFContentDecryptionModule* aCDM, uint64_t aCDMParentId) + : mCDM(aCDM), mCDMParentId(aCDMParentId) { + LOG("MFCDMProxy created, created by %" PRId64 " MFCDMParent", mCDMParentId); +} + +MFCDMProxy::~MFCDMProxy() { LOG("MFCDMProxy destroyed"); } + +void MFCDMProxy::Shutdown() { + if (mTrustedInput) { + mTrustedInput = nullptr; + } + for (auto& inputAuthorities : mInputTrustAuthorities) { + SHUTDOWN_IF_POSSIBLE(inputAuthorities.second); + } + mInputTrustAuthorities.clear(); + if (auto* parent = MFCDMParent::GetCDMById(mCDMParentId)) { + parent->ShutdownCDM(); + } + mCDM = nullptr; + LOG("MFCDMProxy Shutdowned"); +} + +HRESULT MFCDMProxy::GetPMPServer(REFIID aRiid, LPVOID* aPMPServerOut) { + ComPtr cdmServices; + RETURN_IF_FAILED(mCDM.As(&cdmServices)); + RETURN_IF_FAILED(cdmServices->GetService(MF_CONTENTDECRYPTIONMODULE_SERVICE, + aRiid, aPMPServerOut)); + return S_OK; +} + +HRESULT MFCDMProxy::GetInputTrustAuthority(uint32_t aStreamId, + const uint8_t* aContentInitData, + uint32_t aContentInitDataSize, + REFIID aRiid, + IUnknown** aInputTrustAuthorityOut) { + if (mInputTrustAuthorities.count(aStreamId)) { + RETURN_IF_FAILED( + mInputTrustAuthorities[aStreamId].CopyTo(aInputTrustAuthorityOut)); + return S_OK; + } + + if (!mTrustedInput) { + RETURN_IF_FAILED(mCDM->CreateTrustedInput( + aContentInitData, aContentInitDataSize, &mTrustedInput)); + LOG("Created a trust input for stream %u", aStreamId); + } + + // GetInputTrustAuthority takes IUnknown* as the output. Using other COM + // interface will have a v-table mismatch issue. + ComPtr unknown; + RETURN_IF_FAILED( + mTrustedInput->GetInputTrustAuthority(aStreamId, aRiid, &unknown)); + + ComPtr inputTrustAuthority; + RETURN_IF_FAILED(unknown.As(&inputTrustAuthority)); + RETURN_IF_FAILED(unknown.CopyTo(aInputTrustAuthorityOut)); + + mInputTrustAuthorities[aStreamId] = inputTrustAuthority; + return S_OK; +} + +HRESULT MFCDMProxy::SetContentEnabler(IUnknown* aRequest, + IMFAsyncResult* aResult) { + LOG("SetContentEnabler"); + ComPtr contentEnabler; + RETURN_IF_FAILED(aRequest->QueryInterface(IID_PPV_ARGS(&contentEnabler))); + return mCDM->SetContentEnabler(contentEnabler.Get(), aResult); +} + +void MFCDMProxy::OnHardwareContextReset() { + LOG("OnHardwareContextReset"); + // Hardware context reset happens, all the crypto sessions are in invalid + // states. So drop everything here. + mTrustedInput.Reset(); + mInputTrustAuthorities.clear(); +} + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFCDMProxy.h b/dom/media/platforms/wmf/MFCDMProxy.h new file mode 100644 index 0000000000..a2bb2be243 --- /dev/null +++ b/dom/media/platforms/wmf/MFCDMProxy.h @@ -0,0 +1,75 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFCDMPROXY_H +#define DOM_MEDIA_PLATFORM_WMF_MFCDMPROXY_H + +#include +#include +#include +#include +#include + +#include "MFCDMExtra.h" +#include "nsISupportsImpl.h" + +namespace mozilla { + +/** + * MFCDMProxy wraps a IMFContentDecryptionModule and provides some high level + * helper methods in order to allow caller to interact with the wrapped CDM. + */ +class MFCDMProxy { + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MFCDMProxy); + + MFCDMProxy(IMFContentDecryptionModule* aCDM, uint64_t aCDMParentId); + + public: + // Return a IMediaProtectionPMPServer from the existing CDM. + HRESULT GetPMPServer(REFIID aRiid, LPVOID* aPMPServerOut); + + // Return a IMFInputTrustAuthority for given stream id, the same stream ID + // always maps to the same IMFInputTrustAuthority. In addition, + // `aContentInitData` is optional initialization data as in + // https://www.w3.org/TR/encrypted-media/#initialization-data + HRESULT GetInputTrustAuthority(uint32_t aStreamId, + const uint8_t* aContentInitData, + uint32_t aContentInitDataSize, REFIID aRiid, + IUnknown** aInputTrustAuthorityOut); + + // Set IMFContentEnabler to the existing CDM, `aRequest` should be a inherited + // class of `IMFContentEnabler`. + HRESULT SetContentEnabler(IUnknown* aRequest, IMFAsyncResult* aResult); + + // Notify the CDM on DRM_E_TEE_INVALID_HWDRM_STATE (0x8004cd12), which happens + // in cases like OS Sleep. In this case, the CDM should close all sessions + // because they are in bad state. + void OnHardwareContextReset(); + + void Shutdown(); + + // TODO : set last key id in order to let CDM use the key IDs information to + // perform some optimization. + + private: + ~MFCDMProxy(); + + Microsoft::WRL::ComPtr mCDM; + + // The same ITA is always mapping to the same stream Id. + std::map> + mInputTrustAuthorities; + + Microsoft::WRL::ComPtr mTrustedInput; + + const uint64_t mCDMParentId; + + // TODO : need some events? (Eg. significant playback, error, hardware context + // reset) +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFCDMPROXY_H diff --git a/dom/media/platforms/wmf/MFCDMSession.cpp b/dom/media/platforms/wmf/MFCDMSession.cpp new file mode 100644 index 0000000000..cec783cbc6 --- /dev/null +++ b/dom/media/platforms/wmf/MFCDMSession.cpp @@ -0,0 +1,318 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFCDMSession.h" + +#include +#include +#include + +#include "MFMediaEngineUtils.h" +#include "GMPUtils.h" // ToHexString +#include "mozilla/EMEUtils.h" +#include "mozilla/dom/MediaKeyMessageEventBinding.h" +#include "mozilla/dom/MediaKeyStatusMapBinding.h" +#include "nsThreadUtils.h" + +namespace mozilla { + +using Microsoft::WRL::ComPtr; +using Microsoft::WRL::MakeAndInitialize; + +#define LOG(msg, ...) EME_LOG("MFCDMSession=%p, " msg, this, ##__VA_ARGS__) + +static inline MF_MEDIAKEYSESSION_TYPE ConvertSessionType( + KeySystemConfig::SessionType aType) { + switch (aType) { + case KeySystemConfig::SessionType::Temporary: + return MF_MEDIAKEYSESSION_TYPE_TEMPORARY; + case KeySystemConfig::SessionType::PersistentLicense: + return MF_MEDIAKEYSESSION_TYPE_PERSISTENT_LICENSE; + } +} + +static inline LPCWSTR InitDataTypeToString(const nsAString& aInitDataType) { + // The strings are defined in https://www.w3.org/TR/eme-initdata-registry/ + if (aInitDataType.EqualsLiteral("webm")) { + return L"webm"; + } else if (aInitDataType.EqualsLiteral("cenc")) { + return L"cenc"; + } else if (aInitDataType.EqualsLiteral("keyids")) { + return L"keyids"; + } else { + return L"unknown"; + } +} + +// The callback interface which IMFContentDecryptionModuleSession uses for +// communicating with the session. +class MFCDMSession::SessionCallbacks final + : public Microsoft::WRL::RuntimeClass< + Microsoft::WRL::RuntimeClassFlags, + IMFContentDecryptionModuleSessionCallbacks> { + public: + SessionCallbacks() { MOZ_COUNT_CTOR(SessionCallbacks); }; + SessionCallbacks(const SessionCallbacks&) = delete; + SessionCallbacks& operator=(const SessionCallbacks&) = delete; + ~SessionCallbacks() { MOZ_COUNT_DTOR(SessionCallbacks); } + + HRESULT RuntimeClassInitialize() { return S_OK; } + + // IMFContentDecryptionModuleSessionCallbacks + STDMETHODIMP KeyMessage(MF_MEDIAKEYSESSION_MESSAGETYPE aType, + const BYTE* aMessage, DWORD aMessageSize, + LPCWSTR aUrl) final { + CopyableTArray msg{static_cast(aMessage), + aMessageSize}; + mKeyMessageEvent.Notify(aType, std::move(msg)); + return S_OK; + } + + STDMETHODIMP KeyStatusChanged() final { + mKeyChangeEvent.Notify(); + return S_OK; + } + + MediaEventSource>& + KeyMessageEvent() { + return mKeyMessageEvent; + } + MediaEventSource& KeyChangeEvent() { return mKeyChangeEvent; } + + private: + MediaEventProducer> + mKeyMessageEvent; + MediaEventProducer mKeyChangeEvent; +}; + +/* static*/ +MFCDMSession* MFCDMSession::Create(KeySystemConfig::SessionType aSessionType, + IMFContentDecryptionModule* aCdm, + nsISerialEventTarget* aManagerThread) { + MOZ_ASSERT(aCdm); + MOZ_ASSERT(aManagerThread); + ComPtr callbacks; + RETURN_PARAM_IF_FAILED(MakeAndInitialize(&callbacks), + nullptr); + + ComPtr session; + RETURN_PARAM_IF_FAILED(aCdm->CreateSession(ConvertSessionType(aSessionType), + callbacks.Get(), &session), + nullptr); + return new MFCDMSession(session.Get(), callbacks.Get(), aManagerThread); +} + +MFCDMSession::MFCDMSession(IMFContentDecryptionModuleSession* aSession, + SessionCallbacks* aCallback, + nsISerialEventTarget* aManagerThread) + : mSession(aSession), + mManagerThread(aManagerThread), + mExpiredTimeMilliSecondsSinceEpoch( + std::numeric_limits::quiet_NaN()) { + MOZ_ASSERT(aSession); + MOZ_ASSERT(aCallback); + MOZ_ASSERT(aManagerThread); + MOZ_COUNT_CTOR(MFCDMSession); + LOG("MFCDMSession created"); + mKeyMessageListener = aCallback->KeyMessageEvent().Connect( + mManagerThread, this, &MFCDMSession::OnSessionKeyMessage); + mKeyChangeListener = aCallback->KeyChangeEvent().Connect( + mManagerThread, this, &MFCDMSession::OnSessionKeysChange); +} + +MFCDMSession::~MFCDMSession() { + MOZ_COUNT_DTOR(MFCDMSession); + LOG("MFCDMSession destroyed"); + // TODO : maybe disconnect them in `Close()`? + mKeyChangeListener.DisconnectIfExists(); + mKeyMessageListener.DisconnectIfExists(); +} + +HRESULT MFCDMSession::GenerateRequest(const nsAString& aInitDataType, + const uint8_t* aInitData, + uint32_t aInitDataSize) { + AssertOnManagerThread(); + LOG("GenerateRequest for %s (init sz=%u)", + NS_ConvertUTF16toUTF8(aInitDataType).get(), aInitDataSize); + RETURN_IF_FAILED(mSession->GenerateRequest( + InitDataTypeToString(aInitDataType), aInitData, aInitDataSize)); + Unused << RetrieveSessionId(); + return S_OK; +} + +HRESULT MFCDMSession::Load(const nsAString& aSessionId) { + AssertOnManagerThread(); + // TODO : do we need to implement this? Chromium doesn't implement this one. + // Also, how do we know is this given session ID is equal to the session Id + // asked from CDM session or not? + BOOL rv = FALSE; + mSession->Load(char16ptr_t(aSessionId.BeginReading()), &rv); + LOG("Load, id=%s, rv=%s", NS_ConvertUTF16toUTF8(aSessionId).get(), + rv ? "success" : "fail"); + return rv ? S_OK : S_FALSE; +} + +HRESULT MFCDMSession::Update(const nsTArray& aMessage) { + AssertOnManagerThread(); + LOG("Update"); + RETURN_IF_FAILED(mSession->Update( + static_cast(aMessage.Elements()), aMessage.Length())); + RETURN_IF_FAILED(UpdateExpirationIfNeeded()); + return S_OK; +} + +HRESULT MFCDMSession::Close() { + AssertOnManagerThread(); + LOG("Close"); + RETURN_IF_FAILED(mSession->Close()); + return S_OK; +} + +HRESULT MFCDMSession::Remove() { + AssertOnManagerThread(); + LOG("Remove"); + RETURN_IF_FAILED(mSession->Remove()); + RETURN_IF_FAILED(UpdateExpirationIfNeeded()); + return S_OK; +} + +bool MFCDMSession::RetrieveSessionId() { + AssertOnManagerThread(); + if (mSessionId) { + return true; + } + ScopedCoMem sessionId; + if (FAILED(mSession->GetSessionId(&sessionId)) || !sessionId) { + LOG("Can't get session id or empty session ID!"); + return false; + } + LOG("Set session Id %ls", sessionId.Get()); + mSessionId = Some(sessionId.Get()); + return true; +} + +void MFCDMSession::OnSessionKeysChange() { + AssertOnManagerThread(); + LOG("OnSessionKeysChange"); + + if (!mSessionId) { + LOG("Unexpected session keys change ignored"); + return; + } + + ScopedCoMem keyStatuses; + UINT count = 0; + RETURN_VOID_IF_FAILED(mSession->GetKeyStatuses(&keyStatuses, &count)); + + static auto ToMediaKeyStatus = [](MF_MEDIAKEY_STATUS aStatus) { + // https://learn.microsoft.com/en-us/windows/win32/api/mfidl/ne-mfidl-mf_mediakey_status + switch (aStatus) { + case MF_MEDIAKEY_STATUS_USABLE: + return dom::MediaKeyStatus::Usable; + case MF_MEDIAKEY_STATUS_EXPIRED: + return dom::MediaKeyStatus::Expired; + case MF_MEDIAKEY_STATUS_OUTPUT_DOWNSCALED: + return dom::MediaKeyStatus::Output_downscaled; + // This is for legacy use and should not happen in normal cases. Map it to + // internal error in case it happens. + case MF_MEDIAKEY_STATUS_OUTPUT_NOT_ALLOWED: + return dom::MediaKeyStatus::Internal_error; + case MF_MEDIAKEY_STATUS_STATUS_PENDING: + return dom::MediaKeyStatus::Status_pending; + case MF_MEDIAKEY_STATUS_INTERNAL_ERROR: + return dom::MediaKeyStatus::Internal_error; + case MF_MEDIAKEY_STATUS_RELEASED: + return dom::MediaKeyStatus::Released; + case MF_MEDIAKEY_STATUS_OUTPUT_RESTRICTED: + return dom::MediaKeyStatus::Output_restricted; + } + MOZ_ASSERT_UNREACHABLE("Invalid MF_MEDIAKEY_STATUS enum value"); + return dom::MediaKeyStatus::Internal_error; + }; + + CopyableTArray keyInfos; + for (uint32_t idx = 0; idx < count; idx++) { + const MFMediaKeyStatus& keyStatus = keyStatuses[idx]; + if (keyStatus.cbKeyId != sizeof(GUID)) { + LOG("Key ID with unsupported size ignored"); + continue; + } + CopyableTArray keyId; + ByteArrayFromGUID(reinterpret_cast(keyStatus.pbKeyId), keyId); + + nsAutoCString keyIdString(ToHexString(keyId)); + LOG("Append keyid-sz=%u, keyid=%s, status=%s", keyStatus.cbKeyId, + keyIdString.get(), + ToMediaKeyStatusStr(ToMediaKeyStatus(keyStatus.eMediaKeyStatus))); + keyInfos.AppendElement(MFCDMKeyInformation{ + std::move(keyId), ToMediaKeyStatus(keyStatus.eMediaKeyStatus)}); + } + LOG("Notify 'keychange' for %s", NS_ConvertUTF16toUTF8(*mSessionId).get()); + mKeyChangeEvent.Notify( + MFCDMKeyStatusChange{*mSessionId, std::move(keyInfos)}); + + // ScopedCoMem only releases memory for |keyStatuses|. We + // need to manually release memory for |pbKeyId| here. + for (size_t idx = 0; idx < count; idx++) { + if (const auto& keyStatus = keyStatuses[idx]; keyStatus.pbKeyId) { + CoTaskMemFree(keyStatus.pbKeyId); + } + } +} + +HRESULT MFCDMSession::UpdateExpirationIfNeeded() { + AssertOnManagerThread(); + MOZ_ASSERT(mSessionId); + + // The msdn document doesn't mention the unit for the expiration time, + // follow chromium's implementation to treat them as millisecond. + double newExpiredEpochTimeMs = 0.0; + RETURN_IF_FAILED(mSession->GetExpiration(&newExpiredEpochTimeMs)); + + if (newExpiredEpochTimeMs == mExpiredTimeMilliSecondsSinceEpoch || + (std::isnan(newExpiredEpochTimeMs) && + std::isnan(mExpiredTimeMilliSecondsSinceEpoch))) { + return S_OK; + } + + LOG("Session expiration change from %f to %f, notify 'expiration' for %s", + mExpiredTimeMilliSecondsSinceEpoch, newExpiredEpochTimeMs, + NS_ConvertUTF16toUTF8(*mSessionId).get()); + mExpiredTimeMilliSecondsSinceEpoch = newExpiredEpochTimeMs; + mExpirationEvent.Notify( + MFCDMKeyExpiration{*mSessionId, mExpiredTimeMilliSecondsSinceEpoch}); + return S_OK; +} + +void MFCDMSession::OnSessionKeyMessage( + const MF_MEDIAKEYSESSION_MESSAGETYPE& aType, + const nsTArray& aMessage) { + AssertOnManagerThread(); + // Only send key message after the session Id is ready. + if (!RetrieveSessionId()) { + return; + } + static auto ToMediaKeyMessageType = [](MF_MEDIAKEYSESSION_MESSAGETYPE aType) { + switch (aType) { + case MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_REQUEST: + return dom::MediaKeyMessageType::License_request; + case MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_RENEWAL: + return dom::MediaKeyMessageType::License_renewal; + case MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_RELEASE: + return dom::MediaKeyMessageType::License_release; + case MF_MEDIAKEYSESSION_MESSAGETYPE_INDIVIDUALIZATION_REQUEST: + return dom::MediaKeyMessageType::Individualization_request; + default: + MOZ_ASSERT_UNREACHABLE("Unknown session message type"); + return dom::MediaKeyMessageType::EndGuard_; + } + }; + LOG("Notify 'keymessage' for %s", NS_ConvertUTF16toUTF8(*mSessionId).get()); + mKeyMessageEvent.Notify(MFCDMKeyMessage{ + *mSessionId, ToMediaKeyMessageType(aType), std::move(aMessage)}); +} + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFCDMSession.h b/dom/media/platforms/wmf/MFCDMSession.h new file mode 100644 index 0000000000..44b7c3b239 --- /dev/null +++ b/dom/media/platforms/wmf/MFCDMSession.h @@ -0,0 +1,93 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFCDMSESSION_H +#define DOM_MEDIA_PLATFORM_WMF_MFCDMSESSION_H + +#include +#include +#include + +#include "MFCDMExtra.h" +#include "MediaEventSource.h" +#include "mozilla/PMFCDM.h" +#include "mozilla/KeySystemConfig.h" +#include "nsAString.h" + +namespace mozilla { + +// MFCDMSession represents a key session defined by the EME spec, it operates +// the IMFContentDecryptionModuleSession directly and forward events from +// IMFContentDecryptionModuleSession to its caller. It's not thread-safe and +// can only be used on the manager thread for now. +class MFCDMSession final { + public: + ~MFCDMSession(); + + static MFCDMSession* Create(KeySystemConfig::SessionType aSessionType, + IMFContentDecryptionModule* aCdm, + nsISerialEventTarget* aManagerThread); + + // APIs corresponding to EME APIs (MediaKeySession) + HRESULT GenerateRequest(const nsAString& aInitDataType, + const uint8_t* aInitData, uint32_t aInitDataSize); + HRESULT Load(const nsAString& aSessionId); + HRESULT Update(const nsTArray& aMessage); + HRESULT Close(); + HRESULT Remove(); + + // Session status related events + MediaEventSource& KeyMessageEvent() { + return mKeyMessageEvent; + } + MediaEventSource& KeyChangeEvent() { + return mKeyChangeEvent; + } + MediaEventSource& ExpirationEvent() { + return mExpirationEvent; + } + + const Maybe& SessionID() const { return mSessionId; } + + private: + class SessionCallbacks; + + MFCDMSession(IMFContentDecryptionModuleSession* aSession, + SessionCallbacks* aCallback, + nsISerialEventTarget* aManagerThread); + MFCDMSession(const MFCDMSession&) = delete; + MFCDMSession& operator=(const MFCDMSession&) = delete; + + bool RetrieveSessionId(); + void OnSessionKeysChange(); + void OnSessionKeyMessage(const MF_MEDIAKEYSESSION_MESSAGETYPE& aType, + const nsTArray& aMessage); + + HRESULT UpdateExpirationIfNeeded(); + + void AssertOnManagerThread() const { + MOZ_ASSERT(mManagerThread->IsOnCurrentThread()); + } + + const Microsoft::WRL::ComPtr mSession; + const nsCOMPtr mManagerThread; + + MediaEventProducer mKeyMessageEvent; + MediaEventProducer mKeyChangeEvent; + MediaEventProducer mExpirationEvent; + MediaEventListener mKeyMessageListener; + MediaEventListener mKeyChangeListener; + + // IMFContentDecryptionModuleSession's id might not be ready immediately after + // the session gets created. + Maybe mSessionId; + + // NaN when the CDM doesn't explicitly define the time or the time never + // expires. + double mExpiredTimeMilliSecondsSinceEpoch; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFCDMSESSION_H diff --git a/dom/media/platforms/wmf/MFContentProtectionManager.cpp b/dom/media/platforms/wmf/MFContentProtectionManager.cpp new file mode 100644 index 0000000000..18b7ea4d27 --- /dev/null +++ b/dom/media/platforms/wmf/MFContentProtectionManager.cpp @@ -0,0 +1,171 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFContentProtectionManager.h" + +#include + +#include "MFMediaEngineUtils.h" +#include "WMF.h" +#include "WMFUtils.h" + +namespace mozilla { + +using Microsoft::WRL::ComPtr; + +#define LOG(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \ + ("MFContentProtectionManager=%p, " msg, this, ##__VA_ARGS__)) + +MFContentProtectionManager::MFContentProtectionManager() { + MOZ_COUNT_CTOR(MFContentProtectionManager); + LOG("MFContentProtectionManager created"); +} + +MFContentProtectionManager::~MFContentProtectionManager() { + MOZ_COUNT_DTOR(MFContentProtectionManager); + LOG("MFContentProtectionManager destroyed"); +} + +HRESULT MFContentProtectionManager::RuntimeClassInitialize() { + ScopedHString propertyId( + RuntimeClass_Windows_Foundation_Collections_PropertySet); + RETURN_IF_FAILED(RoActivateInstance(propertyId.Get(), &mPMPServerSet)); + return S_OK; +} + +HRESULT MFContentProtectionManager::BeginEnableContent( + IMFActivate* aEnablerActivate, IMFTopology* aTopology, + IMFAsyncCallback* aCallback, IUnknown* aState) { + LOG("BeginEnableContent"); + ComPtr unknownObject; + ComPtr asyncResult; + RETURN_IF_FAILED( + wmf::MFCreateAsyncResult(nullptr, aCallback, aState, &asyncResult)); + RETURN_IF_FAILED( + aEnablerActivate->ActivateObject(IID_PPV_ARGS(&unknownObject))); + + GUID enablerType = GUID_NULL; + ComPtr contentEnabler; + if (SUCCEEDED(unknownObject.As(&contentEnabler))) { + RETURN_IF_FAILED(contentEnabler->GetEnableType(&enablerType)); + } else { + ComPtr + serviceRequest; + RETURN_IF_FAILED(unknownObject.As(&serviceRequest)); + RETURN_IF_FAILED(serviceRequest->get_Type(&enablerType)); + } + + if (enablerType == MFENABLETYPE_MF_RebootRequired) { + LOG("Error - MFENABLETYPE_MF_RebootRequired"); + return MF_E_REBOOT_REQUIRED; + } else if (enablerType == MFENABLETYPE_MF_UpdateRevocationInformation) { + LOG("Error - MFENABLETYPE_MF_UpdateRevocationInformation"); + return MF_E_GRL_VERSION_TOO_LOW; + } else if (enablerType == MFENABLETYPE_MF_UpdateUntrustedComponent) { + LOG("Error - MFENABLETYPE_MF_UpdateUntrustedComponent"); + return HRESULT_FROM_WIN32(ERROR_INVALID_IMAGE_HASH); + } + + if (!mCDMProxy) { + return MF_E_SHUTDOWN; + } + RETURN_IF_FAILED( + mCDMProxy->SetContentEnabler(unknownObject.Get(), asyncResult.Get())); + + // TODO : maybe need to notify waiting for key status? + LOG("Finished BeginEnableContent"); + return S_OK; +} + +HRESULT MFContentProtectionManager::EndEnableContent( + IMFAsyncResult* aAsyncResult) { + HRESULT hr = aAsyncResult->GetStatus(); + if (FAILED(hr)) { + // Follow Chromium to not to return failure, which avoid doing additional + // work here. + LOG("Content enabling failed. hr=%lx", hr); + } else { + LOG("Content enabling succeeded"); + } + return S_OK; +} + +HRESULT MFContentProtectionManager::add_ServiceRequested( + ABI::Windows::Media::Protection::IServiceRequestedEventHandler* aHandler, + EventRegistrationToken* aCookie) { + return E_NOTIMPL; +} + +HRESULT MFContentProtectionManager::remove_ServiceRequested( + EventRegistrationToken aCookie) { + return E_NOTIMPL; +} + +HRESULT MFContentProtectionManager::add_RebootNeeded( + ABI::Windows::Media::Protection::IRebootNeededEventHandler* aHandler, + EventRegistrationToken* aCookie) { + return E_NOTIMPL; +} + +HRESULT MFContentProtectionManager::remove_RebootNeeded( + EventRegistrationToken aCookie) { + return E_NOTIMPL; +} + +HRESULT MFContentProtectionManager::add_ComponentLoadFailed( + ABI::Windows::Media::Protection::IComponentLoadFailedEventHandler* aHandler, + EventRegistrationToken* aCookie) { + return E_NOTIMPL; +} + +HRESULT MFContentProtectionManager::remove_ComponentLoadFailed( + EventRegistrationToken aCookie) { + return E_NOTIMPL; +} + +HRESULT MFContentProtectionManager::get_Properties( + ABI::Windows::Foundation::Collections::IPropertySet** properties) { + if (!mPMPServerSet) { + return E_POINTER; + } + return mPMPServerSet.CopyTo(properties); +} + +HRESULT MFContentProtectionManager::SetCDMProxy(MFCDMProxy* aCDMProxy) { + MOZ_ASSERT(aCDMProxy); + mCDMProxy = aCDMProxy; + ComPtr pmpServer; + RETURN_IF_FAILED(mCDMProxy->GetPMPServer(IID_PPV_ARGS(&pmpServer))); + RETURN_IF_FAILED(SetPMPServer(pmpServer.Get())); + return S_OK; +} + +HRESULT MFContentProtectionManager::SetPMPServer( + ABI::Windows::Media::Protection::IMediaProtectionPMPServer* aPMPServer) { + MOZ_ASSERT(aPMPServer); + + ComPtr> + serverMap; + RETURN_IF_FAILED(mPMPServerSet.As(&serverMap)); + + // MFMediaEngine uses |serverKey| to get the Protected Media Path (PMP) + // server used for playing protected content. This is not currently documented + // in MSDN. + boolean replaced = false; + ScopedHString serverKey{L"Windows.Media.Protection.MediaProtectionPMPServer"}; + RETURN_IF_FAILED(serverMap->Insert(serverKey.Get(), aPMPServer, &replaced)); + return S_OK; +} + +void MFContentProtectionManager::Shutdown() { + if (mCDMProxy) { + mCDMProxy->Shutdown(); + mCDMProxy = nullptr; + } +} + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFContentProtectionManager.h b/dom/media/platforms/wmf/MFContentProtectionManager.h new file mode 100644 index 0000000000..1428a21af7 --- /dev/null +++ b/dom/media/platforms/wmf/MFContentProtectionManager.h @@ -0,0 +1,81 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFCONTENTPROTECTIONMANAGER_H +#define DOM_MEDIA_PLATFORM_WMF_MFCONTENTPROTECTIONMANAGER_H + +#include +#include +#include +#include + +#include "MFCDMProxy.h" + +namespace mozilla { + +/** + * MFContentProtectionManager is used to enable the encrypted playback for the + * media engine. + * https://docs.microsoft.com/en-us/windows/win32/api/mfidl/nn-mfidl-imfcontentprotectionmanager + * https://docs.microsoft.com/en-us/uwp/api/windows.media.protection.mediaprotectionmanager + */ +class MFContentProtectionManager + : public Microsoft::WRL::RuntimeClass< + Microsoft::WRL::RuntimeClassFlags< + Microsoft::WRL::RuntimeClassType::WinRtClassicComMix | + Microsoft::WRL::RuntimeClassType::InhibitRoOriginateError>, + IMFContentProtectionManager, + ABI::Windows::Media::Protection::IMediaProtectionManager> { + public: + MFContentProtectionManager(); + ~MFContentProtectionManager(); + + HRESULT RuntimeClassInitialize(); + + void Shutdown(); + + // IMFContentProtectionManager. + IFACEMETHODIMP BeginEnableContent(IMFActivate* aEnablerActivate, + IMFTopology* aTopology, + IMFAsyncCallback* aCallback, + IUnknown* aState) override; + IFACEMETHODIMP EndEnableContent(IMFAsyncResult* aAsyncResult) override; + + // IMediaProtectionManager. + // MFMediaEngine can query this interface to invoke get_Properties(). + IFACEMETHODIMP add_ServiceRequested( + ABI::Windows::Media::Protection::IServiceRequestedEventHandler* aHandler, + EventRegistrationToken* aCookie) override; + IFACEMETHODIMP remove_ServiceRequested( + EventRegistrationToken aCookie) override; + IFACEMETHODIMP add_RebootNeeded( + ABI::Windows::Media::Protection::IRebootNeededEventHandler* aHandler, + EventRegistrationToken* aCookie) override; + IFACEMETHODIMP remove_RebootNeeded(EventRegistrationToken aCookie) override; + IFACEMETHODIMP add_ComponentLoadFailed( + ABI::Windows::Media::Protection::IComponentLoadFailedEventHandler* + aHandler, + EventRegistrationToken* aCookie) override; + IFACEMETHODIMP remove_ComponentLoadFailed( + EventRegistrationToken aCookie) override; + IFACEMETHODIMP get_Properties( + ABI::Windows::Foundation::Collections::IPropertySet** aValue) override; + + HRESULT SetCDMProxy(MFCDMProxy* aCDMProxy); + + MFCDMProxy* GetCDMProxy() const { return mCDMProxy; } + + private: + HRESULT SetPMPServer( + ABI::Windows::Media::Protection::IMediaProtectionPMPServer* aPMPServer); + + RefPtr mCDMProxy; + + Microsoft::WRL::ComPtr + mPMPServerSet; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFCONTENTPROTECTIONMANAGER_H diff --git a/dom/media/platforms/wmf/MFMediaEngineAudioStream.cpp b/dom/media/platforms/wmf/MFMediaEngineAudioStream.cpp new file mode 100644 index 0000000000..4acf26e041 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineAudioStream.cpp @@ -0,0 +1,137 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFMediaEngineAudioStream.h" + +#include +#include + +#include "MFMediaEngineUtils.h" +#include "WMFUtils.h" +#include "mozilla/StaticPrefs_media.h" + +namespace mozilla { + +#define LOG(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \ + ("MFMediaStream=%p (%s), " msg, this, \ + this->GetDescriptionName().get(), ##__VA_ARGS__)) + +using Microsoft::WRL::ComPtr; +using Microsoft::WRL::MakeAndInitialize; + +/* static */ +MFMediaEngineAudioStream* MFMediaEngineAudioStream::Create( + uint64_t aStreamId, const TrackInfo& aInfo, MFMediaSource* aParentSource) { + MOZ_ASSERT(aInfo.IsAudio()); + MFMediaEngineAudioStream* stream; + if (FAILED(MakeAndInitialize( + &stream, aStreamId, aInfo, aParentSource))) { + return nullptr; + } + return stream; +} + +HRESULT MFMediaEngineAudioStream::CreateMediaType(const TrackInfo& aInfo, + IMFMediaType** aMediaType) { + const AudioInfo& info = *aInfo.GetAsAudioInfo(); + mAudioInfo = info; + GUID subType = AudioMimeTypeToMediaFoundationSubtype(info.mMimeType); + NS_ENSURE_TRUE(subType != GUID_NULL, MF_E_TOPO_CODEC_NOT_FOUND); + + // https://docs.microsoft.com/en-us/windows/win32/medfound/media-type-attributes + ComPtr mediaType; + RETURN_IF_FAILED(wmf::MFCreateMediaType(&mediaType)); + RETURN_IF_FAILED(mediaType->SetGUID(MF_MT_SUBTYPE, subType)); + RETURN_IF_FAILED(mediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio)); + RETURN_IF_FAILED( + mediaType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, info.mChannels)); + RETURN_IF_FAILED( + mediaType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, info.mRate)); + uint64_t bitDepth = info.mBitDepth != 0 ? info.mBitDepth : 16; + RETURN_IF_FAILED(mediaType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitDepth)); + if (subType == MFAudioFormat_AAC) { + if (mAACUserData.IsEmpty()) { + MOZ_ASSERT(info.mCodecSpecificConfig.is() || + info.mCodecSpecificConfig.is()); + RefPtr blob; + if (info.mCodecSpecificConfig.is()) { + blob = info.mCodecSpecificConfig.as() + .mDecoderConfigDescriptorBinaryBlob; + } else { + blob = info.mCodecSpecificConfig.as() + .mBinaryBlob; + } + AACAudioSpecificConfigToUserData(info.mExtendedProfile, blob->Elements(), + blob->Length(), mAACUserData); + LOG("Generated AAC user data"); + } + RETURN_IF_FAILED( + mediaType->SetUINT32(MF_MT_AAC_PAYLOAD_TYPE, 0x0)); // Raw AAC packet + RETURN_IF_FAILED(mediaType->SetBlob( + MF_MT_USER_DATA, mAACUserData.Elements(), mAACUserData.Length())); + } + LOG("Created audio type, subtype=%s, channel=%" PRIu32 ", rate=%" PRIu32 + ", bitDepth=%" PRIu64 ", encrypted=%d", + GUIDToStr(subType), info.mChannels, info.mRate, bitDepth, + mAudioInfo.mCrypto.IsEncrypted()); + + if (IsEncrypted()) { + ComPtr protectedMediaType; + RETURN_IF_FAILED(wmf::MFWrapMediaType(mediaType.Get(), + MFMediaType_Protected, subType, + protectedMediaType.GetAddressOf())); + LOG("Wrap MFMediaType_Audio into MFMediaType_Protected"); + *aMediaType = protectedMediaType.Detach(); + } else { + *aMediaType = mediaType.Detach(); + } + return S_OK; +} + +bool MFMediaEngineAudioStream::HasEnoughRawData() const { + // If more than this much raw audio is queued, we'll hold off request more + // audio. + return mRawDataQueueForFeedingEngine.Duration() >= + StaticPrefs::media_wmf_media_engine_raw_data_threshold_audio(); +} + +already_AddRefed MFMediaEngineAudioStream::OutputDataInternal() { + AssertOnTaskQueue(); + if (mRawDataQueueForGeneratingOutput.GetSize() == 0) { + return nullptr; + } + // The media engine doesn't provide a way to allow us to access decoded audio + // frames, and the audio playback will be handled internally inside the media + // engine. So we simply return fake audio data. + RefPtr input = mRawDataQueueForGeneratingOutput.PopFront(); + RefPtr output = + new AudioData(input->mOffset, input->mTime, AlignedAudioBuffer{}, + mAudioInfo.mChannels, mAudioInfo.mRate); + return output.forget(); +} + +nsCString MFMediaEngineAudioStream::GetCodecName() const { + WMFStreamType type = GetStreamTypeFromMimeType(mAudioInfo.mMimeType); + switch (type) { + case WMFStreamType::MP3: + return "mp3"_ns; + case WMFStreamType::AAC: + return "aac"_ns; + case WMFStreamType::OPUS: + return "opus"_ns; + case WMFStreamType::VORBIS: + return "vorbis"_ns; + default: + return "unknown"_ns; + } +} + +bool MFMediaEngineAudioStream::IsEncrypted() const { + return mAudioInfo.mCrypto.IsEncrypted(); +} + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFMediaEngineAudioStream.h b/dom/media/platforms/wmf/MFMediaEngineAudioStream.h new file mode 100644 index 0000000000..14a72b9f63 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineAudioStream.h @@ -0,0 +1,51 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEAUDIOSTREAM_H +#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEAUDIOSTREAM_H + +#include "MFMediaEngineStream.h" + +namespace mozilla { + +class MFMediaSource; + +class MFMediaEngineAudioStream final : public MFMediaEngineStream { + public: + MFMediaEngineAudioStream() = default; + + static MFMediaEngineAudioStream* Create(uint64_t aStreamId, + const TrackInfo& aInfo, + MFMediaSource* aParentSource); + + nsCString GetDescriptionName() const override { + return "media engine audio stream"_ns; + } + + nsCString GetCodecName() const override; + + TrackInfo::TrackType TrackType() override { + return TrackInfo::TrackType::kAudioTrack; + } + + bool IsEncrypted() const override; + + private: + HRESULT CreateMediaType(const TrackInfo& aInfo, + IMFMediaType** aMediaType) override; + + bool HasEnoughRawData() const override; + + already_AddRefed OutputDataInternal() override; + + // For MF_MT_USER_DATA. Currently only used for AAC. + nsTArray mAACUserData; + + // Set when `CreateMediaType()` is called. + AudioInfo mAudioInfo; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEAUDIOSTREAM_H diff --git a/dom/media/platforms/wmf/MFMediaEngineDecoderModule.cpp b/dom/media/platforms/wmf/MFMediaEngineDecoderModule.cpp new file mode 100644 index 0000000000..5b99fb0f2c --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineDecoderModule.cpp @@ -0,0 +1,185 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFMediaEngineDecoderModule.h" + +#include "MFTDecoder.h" +#include "VideoUtils.h" +#include "mozilla/MFMediaEngineParent.h" +#include "mozilla/MFMediaEngineUtils.h" +#include "mozilla/RemoteDecoderManagerChild.h" +#include "mozilla/RemoteDecoderModule.h" +#include "mozilla/StaticPrefs_media.h" +#include "mozilla/WindowsVersion.h" +#include "mozilla/mscom/EnsureMTA.h" + +namespace mozilla { + +#define LOG(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, (msg, ##__VA_ARGS__)) + +/* static */ +void MFMediaEngineDecoderModule::Init() { + // TODO : Init any thing that media engine would need. Implement this when we + // start implementing media engine in following patches. +} + +/* static */ +already_AddRefed MFMediaEngineDecoderModule::Create() { + RefPtr module = new MFMediaEngineDecoderModule(); + return module.forget(); +} + +/* static */ +bool MFMediaEngineDecoderModule::SupportsConfig(const TrackInfo& aConfig) { + RefPtr module = RemoteDecoderModule::Create( + RemoteDecodeIn::UtilityProcess_MFMediaEngineCDM); + return !module->Supports(SupportDecoderParams(aConfig), nullptr).isEmpty(); +} + +already_AddRefed +MFMediaEngineDecoderModule::CreateVideoDecoder( + const CreateDecoderParams& aParams) { + if (!aParams.mMediaEngineId || + !StaticPrefs::media_wmf_media_engine_enabled()) { + return nullptr; + } + RefPtr mediaEngine = + MFMediaEngineParent::GetMediaEngineById(*aParams.mMediaEngineId); + if (!mediaEngine) { + LOG("Can't find media engine %" PRIu64 " for video decoder", + *aParams.mMediaEngineId); + return nullptr; + } + LOG("MFMediaEngineDecoderModule, CreateVideoDecoder"); + RefPtr decoder = mediaEngine->GetMediaEngineStream( + TrackInfo::TrackType::kVideoTrack, aParams); + return decoder.forget(); +} + +already_AddRefed +MFMediaEngineDecoderModule::CreateAudioDecoder( + const CreateDecoderParams& aParams) { + if (!aParams.mMediaEngineId || + !StaticPrefs::media_wmf_media_engine_enabled()) { + return nullptr; + } + RefPtr mediaEngine = + MFMediaEngineParent::GetMediaEngineById(*aParams.mMediaEngineId); + if (!mediaEngine) { + LOG("Can't find media engine %" PRIu64 " for audio decoder", + *aParams.mMediaEngineId); + return nullptr; + } + LOG("MFMediaEngineDecoderModule, CreateAudioDecoder"); + RefPtr decoder = mediaEngine->GetMediaEngineStream( + TrackInfo::TrackType::kAudioTrack, aParams); + return decoder.forget(); +} + +media::DecodeSupportSet MFMediaEngineDecoderModule::SupportsMimeType( + const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const { + UniquePtr trackInfo = CreateTrackInfoWithMIMEType(aMimeType); + if (!trackInfo) { + return media::DecodeSupportSet{}; + } + return SupportInternal(SupportDecoderParams(*trackInfo), aDiagnostics); +} + +media::DecodeSupportSet MFMediaEngineDecoderModule::Supports( + const SupportDecoderParams& aParams, + DecoderDoctorDiagnostics* aDiagnostics) const { + return SupportInternal(aParams, aDiagnostics); +} + +media::DecodeSupportSet MFMediaEngineDecoderModule::SupportInternal( + const SupportDecoderParams& aParams, + DecoderDoctorDiagnostics* aDiagnostics) const { + if (!StaticPrefs::media_wmf_media_engine_enabled()) { + return media::DecodeSupportSet{}; + } + bool supports = false; + WMFStreamType type = GetStreamTypeFromMimeType(aParams.MimeType()); + if (type != WMFStreamType::Unknown) { + supports = CanCreateMFTDecoder(type); + } + MOZ_LOG(sPDMLog, LogLevel::Debug, + ("MFMediaEngine decoder %s requested type '%s'", + supports ? "supports" : "rejects", aParams.MimeType().get())); + // We only support HEVC hardware decoding. + if (supports && type == WMFStreamType::HEVC) { + return media::DecodeSupport::HardwareDecode; + } + // TODO : find a way to report accurate result. + return supports ? media::DecodeSupport::SoftwareDecode + : media::DecodeSupportSet{}; +} + +static bool CreateMFTDecoderOnMTA(const WMFStreamType& aType) { + RefPtr decoder = new MFTDecoder(); + static std::unordered_map sResults; + if (auto rv = sResults.find(aType); rv != sResults.end()) { + return rv->second; + } + + bool result = false; + switch (aType) { + case WMFStreamType::MP3: + result = SUCCEEDED(decoder->Create(CLSID_CMP3DecMediaObject)); + break; + case WMFStreamType::AAC: + result = SUCCEEDED(decoder->Create(CLSID_CMSAACDecMFT)); + break; + // Opus and vorbis are supported via extension. + // https://www.microsoft.com/en-us/p/web-media-extensions/9n5tdp8vcmhs + case WMFStreamType::OPUS: + result = SUCCEEDED(decoder->Create(CLSID_MSOpusDecoder)); + break; + case WMFStreamType::VORBIS: + result = SUCCEEDED(decoder->Create( + MFT_CATEGORY_AUDIO_DECODER, MFAudioFormat_Vorbis, MFAudioFormat_PCM)); + break; + case WMFStreamType::H264: + result = SUCCEEDED(decoder->Create(CLSID_CMSH264DecoderMFT)); + break; + case WMFStreamType::VP8: + case WMFStreamType::VP9: { + static const uint32_t VPX_USABLE_BUILD = 16287; + if (IsWindows10BuildOrLater(VPX_USABLE_BUILD)) { + result = SUCCEEDED(decoder->Create(CLSID_CMSVPXDecMFT)); + } + break; + } +#ifdef MOZ_AV1 + case WMFStreamType::AV1: + result = SUCCEEDED(decoder->Create(MFT_CATEGORY_VIDEO_DECODER, + MFVideoFormat_AV1, GUID_NULL)); + break; +#endif + case WMFStreamType::HEVC: + if (StaticPrefs::media_wmf_hevc_enabled()) { + result = + SUCCEEDED(decoder->Create(MFT_CATEGORY_VIDEO_DECODER, + MFVideoFormat_HEVC, MFVideoFormat_NV12)); + } + break; + default: + MOZ_ASSERT_UNREACHABLE("Unexpected type"); + } + sResults.insert({aType, result}); + return result; +} + +bool MFMediaEngineDecoderModule::CanCreateMFTDecoder( + const WMFStreamType& aType) const { + // TODO : caching the result to prevent performing on MTA thread everytime. + bool canCreateDecoder = false; + mozilla::mscom::EnsureMTA( + [&]() { canCreateDecoder = CreateMFTDecoderOnMTA(aType); }); + return canCreateDecoder; +} + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFMediaEngineDecoderModule.h b/dom/media/platforms/wmf/MFMediaEngineDecoderModule.h new file mode 100644 index 0000000000..c23b9010cc --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineDecoderModule.h @@ -0,0 +1,47 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEDECODERMODULE_H +#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEDECODERMODULE_H + +#include "PlatformDecoderModule.h" +#include "WMFUtils.h" + +namespace mozilla { + +class MFMediaEngineDecoderModule final : public PlatformDecoderModule { + public: + static void Init(); + + static already_AddRefed Create(); + + // Used in the content process to query if the config is supported or not. + // If in the MFCDM process, should use SupportsMimeType or Supports instead. + static bool SupportsConfig(const TrackInfo& aConfig); + + already_AddRefed CreateVideoDecoder( + const CreateDecoderParams& aParams) override; + + already_AddRefed CreateAudioDecoder( + const CreateDecoderParams& aParams) override; + + media::DecodeSupportSet SupportsMimeType( + const nsACString& aMimeType, + DecoderDoctorDiagnostics* aDiagnostics) const override; + media::DecodeSupportSet Supports( + const SupportDecoderParams& aParams, + DecoderDoctorDiagnostics* aDiagnostics) const override; + + private: + media::DecodeSupportSet SupportInternal( + const SupportDecoderParams& aParams, + DecoderDoctorDiagnostics* aDiagnostics) const; + bool CanCreateMFTDecoder(const WMFStreamType& aType) const; + MFMediaEngineDecoderModule() = default; + ~MFMediaEngineDecoderModule() = default; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEDECODERMODULE_H diff --git a/dom/media/platforms/wmf/MFMediaEngineExtension.cpp b/dom/media/platforms/wmf/MFMediaEngineExtension.cpp new file mode 100644 index 0000000000..eb761da364 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineExtension.cpp @@ -0,0 +1,88 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFMediaEngineExtension.h" + +#include +#include + +#include "MFMediaSource.h" +#include "MFMediaEngineUtils.h" +#include "WMF.h" + +namespace mozilla { + +#define LOG(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \ + ("MFMediaEngineExtension=%p, " msg, this, ##__VA_ARGS__)) + +using Microsoft::WRL::ComPtr; + +void MFMediaEngineExtension::SetMediaSource(IMFMediaSource* aMediaSource) { + LOG("SetMediaSource=%p", aMediaSource); + mMediaSource = aMediaSource; +} + +// https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/nf-mfmediaengine-imfmediaengineextension-begincreateobject +IFACEMETHODIMP MFMediaEngineExtension::BeginCreateObject( + BSTR aUrl, IMFByteStream* aByteStream, MF_OBJECT_TYPE aType, + IUnknown** aCancelCookie, IMFAsyncCallback* aCallback, IUnknown* aState) { + if (aCancelCookie) { + // We don't support a cancel cookie. + *aCancelCookie = nullptr; + } + + if (aType != MF_OBJECT_MEDIASOURCE) { + LOG("Only support media source type"); + return MF_E_UNEXPECTED; + } + + MOZ_ASSERT(mMediaSource); + ComPtr result; + ComPtr sourceUnknown = mMediaSource; + RETURN_IF_FAILED(wmf::MFCreateAsyncResult(sourceUnknown.Get(), aCallback, + aState, &result)); + RETURN_IF_FAILED(result->SetStatus(S_OK)); + + LOG("Creating object"); + mIsObjectCreating = true; + + RETURN_IF_FAILED(aCallback->Invoke(result.Get())); + return S_OK; +} + +IFACEMETHODIMP MFMediaEngineExtension::CancelObjectCreation( + IUnknown* aCancelCookie) { + return MF_E_UNEXPECTED; +} + +IFACEMETHODIMP MFMediaEngineExtension::EndCreateObject(IMFAsyncResult* aResult, + IUnknown** aRetObj) { + *aRetObj = nullptr; + if (!mIsObjectCreating) { + LOG("No object is creating, not an expected call"); + return MF_E_UNEXPECTED; + } + + RETURN_IF_FAILED(aResult->GetStatus()); + RETURN_IF_FAILED(aResult->GetObject(aRetObj)); + + LOG("End of creating object"); + mIsObjectCreating = false; + return S_OK; +} + +IFACEMETHODIMP MFMediaEngineExtension::CanPlayType( + BOOL aIsAudioOnly, BSTR aMimeType, MF_MEDIA_ENGINE_CANPLAY* aResult) { + // We use MF_MEDIA_ENGINE_EXTENSION to resolve as custom media source for + // MFMediaEngine, MIME types are not used. + *aResult = MF_MEDIA_ENGINE_CANPLAY_NOT_SUPPORTED; + return S_OK; +} + +// TODO : break cycle of mMediaSource + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFMediaEngineExtension.h b/dom/media/platforms/wmf/MFMediaEngineExtension.h new file mode 100644 index 0000000000..e6b9dde96d --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineExtension.h @@ -0,0 +1,49 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTENSION_H +#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTENSION_H + +#include + +#include "MFMediaEngineExtra.h" + +namespace mozilla { + +/** + * MFMediaEngineNotify is used to load media resources in the media engine. + * https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/nn-mfmediaengine-imfmediaengineextension + */ +class MFMediaEngineExtension final + : public Microsoft::WRL::RuntimeClass< + Microsoft::WRL::RuntimeClassFlags< + Microsoft::WRL::RuntimeClassType::ClassicCom>, + IMFMediaEngineExtension> { + public: + MFMediaEngineExtension() = default; + + HRESULT RuntimeClassInitialize() { return S_OK; } + + void SetMediaSource(IMFMediaSource* aMediaSource); + + // Method for MFMediaEngineExtension + IFACEMETHODIMP BeginCreateObject(BSTR aUrl, IMFByteStream* aByteStream, + MF_OBJECT_TYPE aType, + IUnknown** aCancelCookie, + IMFAsyncCallback* aCallback, + IUnknown* aState) override; + IFACEMETHODIMP CancelObjectCreation(IUnknown* aCancelCookie) override; + IFACEMETHODIMP EndCreateObject(IMFAsyncResult* aResult, + IUnknown** aRetObj) override; + IFACEMETHODIMP CanPlayType(BOOL aIsAudioOnly, BSTR aMimeType, + MF_MEDIA_ENGINE_CANPLAY* aResult) override; + + private: + bool mIsObjectCreating = false; + Microsoft::WRL::ComPtr mMediaSource; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTENSION_H diff --git a/dom/media/platforms/wmf/MFMediaEngineExtra.h b/dom/media/platforms/wmf/MFMediaEngineExtra.h new file mode 100644 index 0000000000..145d5dff68 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineExtra.h @@ -0,0 +1,12 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTRA_H +#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTRA_H + +#include +#include +#include + +#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINENOTIFY_H diff --git a/dom/media/platforms/wmf/MFMediaEngineNotify.cpp b/dom/media/platforms/wmf/MFMediaEngineNotify.cpp new file mode 100644 index 0000000000..4c47b7baec --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineNotify.cpp @@ -0,0 +1,25 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFMediaEngineNotify.h" + +#include "MFMediaEngineUtils.h" + +namespace mozilla { + +IFACEMETHODIMP MFMediaEngineNotify::EventNotify(DWORD aEvent, DWORD_PTR aParam1, + DWORD aParam2) { + auto event = static_cast(aEvent); + MFMediaEngineEventWrapper engineEvent{event}; + if (event == MF_MEDIA_ENGINE_EVENT_ERROR || + event == MF_MEDIA_ENGINE_EVENT_FORMATCHANGE || + event == MF_MEDIA_ENGINE_EVENT_NOTIFYSTABLESTATE) { + engineEvent.mParam1 = Some(aParam1); + engineEvent.mParam2 = Some(aParam2); + } + mEngineEvents.Notify(engineEvent); + return S_OK; +} + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFMediaEngineNotify.h b/dom/media/platforms/wmf/MFMediaEngineNotify.h new file mode 100644 index 0000000000..9e42e115c0 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineNotify.h @@ -0,0 +1,55 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINENOTIFY_H +#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINENOTIFY_H + +#include + +#include "MediaEventSource.h" +#include "MFMediaEngineExtra.h" +#include "mozilla/Maybe.h" + +namespace mozilla { + +const char* MediaEngineEventToStr(MF_MEDIA_ENGINE_EVENT aEvent); + +// https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/ne-mfmediaengine-mf_media_engine_event +struct MFMediaEngineEventWrapper final { + explicit MFMediaEngineEventWrapper(MF_MEDIA_ENGINE_EVENT aEvent) + : mEvent(aEvent) {} + MF_MEDIA_ENGINE_EVENT mEvent; + Maybe mParam1; + Maybe mParam2; +}; + +/** + * MFMediaEngineNotify is used to handle the event sent from the media engine. + * https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/nn-mfmediaengine-imfmediaenginenotify + */ +class MFMediaEngineNotify final + : public Microsoft::WRL::RuntimeClass< + Microsoft::WRL::RuntimeClassFlags< + Microsoft::WRL::RuntimeClassType::ClassicCom>, + IMFMediaEngineNotify> { + public: + MFMediaEngineNotify() = default; + + HRESULT RuntimeClassInitialize() { return S_OK; } + + // Method for IMFMediaEngineNotify + IFACEMETHODIMP EventNotify(DWORD aEvent, DWORD_PTR aParam1, + DWORD aParam2) override; + + MediaEventSource& MediaEngineEvent() { + return mEngineEvents; + } + + private: + MediaEventProducer mEngineEvents; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINENOTIFY_H diff --git a/dom/media/platforms/wmf/MFMediaEngineStream.cpp b/dom/media/platforms/wmf/MFMediaEngineStream.cpp new file mode 100644 index 0000000000..6dce37ee35 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineStream.cpp @@ -0,0 +1,596 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFMediaEngineStream.h" +#include + +#include "AudioConverter.h" +#include "MFMediaSource.h" +#include "MFMediaEngineUtils.h" +#include "TimeUnits.h" +#include "mozilla/ProfilerLabels.h" +#include "mozilla/ProfilerMarkerTypes.h" +#include "WMF.h" +#include "WMFUtils.h" + +namespace mozilla { + +// Don't use this log on the task queue, because it would be racy for `mStream`. +#define WLOGV(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Verbose, \ + ("MFMediaEngineStreamWrapper for stream %p (%s, id=%lu), " msg, \ + mStream.Get(), mStream->GetDescriptionName().get(), \ + mStream->DescriptorId(), ##__VA_ARGS__)) + +#define SLOG(msg, ...) \ + MOZ_LOG( \ + gMFMediaEngineLog, LogLevel::Debug, \ + ("MFMediaStream=%p (%s, id=%lu), " msg, this, \ + this->GetDescriptionName().get(), this->DescriptorId(), ##__VA_ARGS__)) + +#define SLOGV(msg, ...) \ + MOZ_LOG( \ + gMFMediaEngineLog, LogLevel::Verbose, \ + ("MFMediaStream=%p (%s, id=%lu), " msg, this, \ + this->GetDescriptionName().get(), this->DescriptorId(), ##__VA_ARGS__)) + +using Microsoft::WRL::ComPtr; + +RefPtr MFMediaEngineStreamWrapper::Init() { + MOZ_ASSERT(mStream->DescriptorId(), "Stream hasn't been initialized!"); + WLOGV("Init"); + return InitPromise::CreateAndResolve(mStream->TrackType(), __func__); +} + +RefPtr MFMediaEngineStreamWrapper::Decode( + MediaRawData* aSample) { + WLOGV("Decode"); + if (!mStream || mStream->IsShutdown()) { + return DecodePromise::CreateAndReject( + MediaResult(NS_ERROR_FAILURE, "MFMediaEngineStreamWrapper is shutdown"), + __func__); + } + RefPtr sample = aSample; + return InvokeAsync(mTaskQueue, mStream.Get(), __func__, + &MFMediaEngineStream::OutputData, std::move(sample)); +} + +RefPtr MFMediaEngineStreamWrapper::Drain() { + WLOGV("Drain"); + if (!mStream || mStream->IsShutdown()) { + return DecodePromise::CreateAndReject( + MediaResult(NS_ERROR_FAILURE, "MFMediaEngineStreamWrapper is shutdown"), + __func__); + } + return InvokeAsync(mTaskQueue, mStream.Get(), __func__, + &MFMediaEngineStream::Drain); +} + +RefPtr MFMediaEngineStreamWrapper::Flush() { + WLOGV("Flush"); + if (!mStream || mStream->IsShutdown()) { + return FlushPromise::CreateAndReject( + MediaResult(NS_ERROR_FAILURE, "MFMediaEngineStreamWrapper is shutdown"), + __func__); + } + return InvokeAsync(mTaskQueue, mStream.Get(), __func__, + &MFMediaEngineStream::Flush); +} + +RefPtr MFMediaEngineStreamWrapper::Shutdown() { + // Stream shutdown is controlled by the media source, so we don't need to call + // its shutdown. + WLOGV("Disconnect wrapper"); + if (!mStream) { + // This promise must only ever be resolved. See the definition of the + // original abstract function. + return ShutdownPromise::CreateAndResolve(false, __func__); + } + mStream = nullptr; + mTaskQueue = nullptr; + return ShutdownPromise::CreateAndResolve(true, __func__); +} + +nsCString MFMediaEngineStreamWrapper::GetDescriptionName() const { + return mStream ? mStream->GetDescriptionName() : nsLiteralCString("none"); +} + +nsCString MFMediaEngineStreamWrapper::GetCodecName() const { + return mStream ? mStream->GetCodecName() : nsLiteralCString("none"); +} + +MediaDataDecoder::ConversionRequired +MFMediaEngineStreamWrapper::NeedsConversion() const { + return mStream ? mStream->NeedsConversion() + : MediaDataDecoder::ConversionRequired::kNeedNone; +} + +MFMediaEngineStream::MFMediaEngineStream() + : mIsShutdown(false), mIsSelected(false), mReceivedEOS(false) { + MOZ_COUNT_CTOR(MFMediaEngineStream); +} + +MFMediaEngineStream::~MFMediaEngineStream() { + MOZ_ASSERT(IsShutdown()); + MOZ_COUNT_DTOR(MFMediaEngineStream); +} + +HRESULT MFMediaEngineStream::RuntimeClassInitialize( + uint64_t aStreamId, const TrackInfo& aInfo, MFMediaSource* aParentSource) { + mParentSource = aParentSource; + mTaskQueue = aParentSource->GetTaskQueue(); + MOZ_ASSERT(mTaskQueue); + mStreamId = aStreamId; + RETURN_IF_FAILED(wmf::MFCreateEventQueue(&mMediaEventQueue)); + + ComPtr mediaType; + // The inherited stream would return different type based on their media info. + RETURN_IF_FAILED(CreateMediaType(aInfo, mediaType.GetAddressOf())); + RETURN_IF_FAILED(GenerateStreamDescriptor(mediaType)); + SLOG("Initialized %s (id=%" PRIu64 ", descriptorId=%lu)", + GetDescriptionName().get(), aStreamId, mStreamDescriptorId); + return S_OK; +} + +HRESULT MFMediaEngineStream::GenerateStreamDescriptor( + ComPtr& aMediaType) { + RETURN_IF_FAILED(wmf::MFCreateStreamDescriptor( + mStreamId, 1 /* stream amount */, aMediaType.GetAddressOf(), + &mStreamDescriptor)); + RETURN_IF_FAILED( + mStreamDescriptor->GetStreamIdentifier(&mStreamDescriptorId)); + if (IsEncrypted()) { + RETURN_IF_FAILED(mStreamDescriptor->SetUINT32(MF_SD_PROTECTED, 1)); + } + return S_OK; +} + +HRESULT MFMediaEngineStream::Start(const PROPVARIANT* aPosition) { + AssertOnMFThreadPool(); + if (!IsSelected()) { + SLOG("No need to start non-selected stream"); + return S_OK; + } + if (IsShutdown()) { + return MF_E_SHUTDOWN; + } + SLOG("Start"); + const bool isFromCurrentPosition = aPosition->vt == VT_EMPTY; + RETURN_IF_FAILED(QueueEvent(MEStreamStarted, GUID_NULL, S_OK, aPosition)); + MOZ_ASSERT(mTaskQueue); + Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction( + "MFMediaEngineStream::Start", + [self = RefPtr{this}, isFromCurrentPosition, this]() { + if (!isFromCurrentPosition && IsEnded()) { + SLOG("Stream restarts again from a new position, reset EOS"); + mReceivedEOS = false; + } + // Process pending requests (if any) which happened when the stream + // wasn't allowed to serve samples. Eg. stream is paused. Or resend the + // ended event if the stream is ended already. + ReplySampleRequestIfPossible(); + })); + return S_OK; +} + +HRESULT MFMediaEngineStream::Seek(const PROPVARIANT* aPosition) { + AssertOnMFThreadPool(); + if (!IsSelected()) { + SLOG("No need to seek non-selected stream"); + return S_OK; + } + SLOG("Seek"); + RETURN_IF_FAILED(QueueEvent(MEStreamSeeked, GUID_NULL, S_OK, aPosition)); + return S_OK; +} + +HRESULT MFMediaEngineStream::Stop() { + AssertOnMFThreadPool(); + if (!IsSelected()) { + SLOG("No need to stop non-selected stream"); + return S_OK; + } + SLOG("Stop"); + RETURN_IF_FAILED(QueueEvent(MEStreamStopped, GUID_NULL, S_OK, nullptr)); + return S_OK; +} + +HRESULT MFMediaEngineStream::Pause() { + AssertOnMFThreadPool(); + if (!IsSelected()) { + SLOG("No need to pause non-selected stream"); + return S_OK; + } + SLOG("Pause"); + RETURN_IF_FAILED(QueueEvent(MEStreamPaused, GUID_NULL, S_OK, nullptr)); + return S_OK; +} + +void MFMediaEngineStream::Shutdown() { + AssertOnMFThreadPool(); + if (IsShutdown()) { + return; + } + SLOG("Shutdown"); + mIsShutdown = true; + // After this method is called, all IMFMediaEventQueue methods return + // MF_E_SHUTDOWN. + RETURN_VOID_IF_FAILED(mMediaEventQueue->Shutdown()); + ComPtr self = this; + MOZ_ASSERT(mTaskQueue); + Unused << mTaskQueue->Dispatch( + NS_NewRunnableFunction("MFMediaEngineStream::Shutdown", [self]() { + self->mParentSource = nullptr; + self->mRawDataQueueForFeedingEngine.Reset(); + self->mRawDataQueueForGeneratingOutput.Reset(); + self->ShutdownCleanUpOnTaskQueue(); + self->mTaskQueue = nullptr; + })); +} + +IFACEMETHODIMP +MFMediaEngineStream::GetMediaSource(IMFMediaSource** aMediaSource) { + AssertOnMFThreadPool(); + if (IsShutdown()) { + return MF_E_SHUTDOWN; + } + RETURN_IF_FAILED(mParentSource.CopyTo(aMediaSource)); + return S_OK; +} + +IFACEMETHODIMP MFMediaEngineStream::GetStreamDescriptor( + IMFStreamDescriptor** aStreamDescriptor) { + AssertOnMFThreadPool(); + if (IsShutdown()) { + return MF_E_SHUTDOWN; + } + if (!mStreamDescriptor) { + SLOG("Hasn't initialized stream descriptor"); + return MF_E_NOT_INITIALIZED; + } + RETURN_IF_FAILED(mStreamDescriptor.CopyTo(aStreamDescriptor)); + return S_OK; +} + +IFACEMETHODIMP MFMediaEngineStream::RequestSample(IUnknown* aToken) { + AssertOnMFThreadPool(); + if (IsShutdown()) { + return MF_E_SHUTDOWN; + } + + ComPtr token = aToken; + ComPtr self = this; + MOZ_ASSERT(mTaskQueue); + Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction( + "MFMediaEngineStream::RequestSample", [token, self, this]() { + AssertOnTaskQueue(); + mSampleRequestTokens.push(token); + SLOGV("RequestSample, token amount=%zu", mSampleRequestTokens.size()); + ReplySampleRequestIfPossible(); + if (!HasEnoughRawData() && mParentSource && !IsEnded()) { + SendRequestSampleEvent(false /* isEnough */); + } + })); + return S_OK; +} + +void MFMediaEngineStream::ReplySampleRequestIfPossible() { + AssertOnTaskQueue(); + if (IsEnded()) { + // We have no more sample to return, clean all pending requests. + while (!mSampleRequestTokens.empty()) { + mSampleRequestTokens.pop(); + } + + SLOG("Notify end events"); + MOZ_ASSERT(mRawDataQueueForFeedingEngine.GetSize() == 0); + MOZ_ASSERT(mSampleRequestTokens.empty()); + RETURN_VOID_IF_FAILED(mMediaEventQueue->QueueEventParamUnk( + MEEndOfStream, GUID_NULL, S_OK, nullptr)); + mEndedEvent.Notify(TrackType()); + PROFILER_MARKER_TEXT( + "MFMediaEngineStream:NotifyEnd", MEDIA_PLAYBACK, {}, + nsPrintfCString("stream=%s, id=%" PRIu64, GetDescriptionName().get(), + mStreamId)); + return; + } + + if (mSampleRequestTokens.empty() || + mRawDataQueueForFeedingEngine.GetSize() == 0) { + return; + } + + if (!ShouldServeSamples()) { + SLOGV("Not deliver samples if the stream is not started"); + return; + } + + // Push data into the mf media event queue if the media engine is already + // waiting for data. + ComPtr inputSample; + RETURN_VOID_IF_FAILED(CreateInputSample(inputSample.GetAddressOf())); + ComPtr token = mSampleRequestTokens.front(); + RETURN_VOID_IF_FAILED( + inputSample->SetUnknown(MFSampleExtension_Token, token.Get())); + mSampleRequestTokens.pop(); + RETURN_VOID_IF_FAILED(mMediaEventQueue->QueueEventParamUnk( + MEMediaSample, GUID_NULL, S_OK, inputSample.Get())); +} + +bool MFMediaEngineStream::ShouldServeSamples() const { + AssertOnTaskQueue(); + return mParentSource && + mParentSource->GetState() == MFMediaSource::State::Started && + mIsSelected; +} + +HRESULT MFMediaEngineStream::CreateInputSample(IMFSample** aSample) { + AssertOnTaskQueue(); + + ComPtr sample; + RETURN_IF_FAILED(wmf::MFCreateSample(&sample)); + + MOZ_ASSERT(mRawDataQueueForFeedingEngine.GetSize() != 0); + RefPtr data = mRawDataQueueForFeedingEngine.PopFront(); + SLOGV("CreateInputSample, pop data [%" PRId64 ", %" PRId64 + "] (duration=%" PRId64 ", kf=%d), queue size=%zu", + data->mTime.ToMicroseconds(), data->GetEndTime().ToMicroseconds(), + data->mDuration.ToMicroseconds(), data->mKeyframe, + mRawDataQueueForFeedingEngine.GetSize()); + PROFILER_MARKER( + nsPrintfCString( + "pop %s (stream=%" PRIu64 ")", + TrackType() == TrackInfo::TrackType::kVideoTrack ? "video" : "audio", + mStreamId), + MEDIA_PLAYBACK, {}, MediaSampleMarker, data->mTime.ToMicroseconds(), + data->GetEndTime().ToMicroseconds(), + mRawDataQueueForFeedingEngine.GetSize()); + + // Copy data into IMFMediaBuffer + ComPtr buffer; + BYTE* dst = nullptr; + DWORD maxLength = 0; + RETURN_IF_FAILED( + wmf::MFCreateMemoryBuffer(data->Size(), buffer.GetAddressOf())); + RETURN_IF_FAILED(buffer->Lock(&dst, &maxLength, 0)); + memcpy(dst, data->Data(), data->Size()); + RETURN_IF_FAILED(buffer->Unlock()); + RETURN_IF_FAILED(buffer->SetCurrentLength(data->Size())); + + // Setup sample attributes + RETURN_IF_FAILED(sample->AddBuffer(buffer.Get())); + RETURN_IF_FAILED( + sample->SetSampleTime(UsecsToHNs(data->mTime.ToMicroseconds()))); + RETURN_IF_FAILED( + sample->SetSampleDuration(UsecsToHNs(data->mDuration.ToMicroseconds()))); + if (data->mKeyframe) { + RETURN_IF_FAILED(sample->SetUINT32(MFSampleExtension_CleanPoint, 1)); + } + + // Setup encrypt attributes + if (data->mCrypto.IsEncrypted()) { + RETURN_IF_FAILED(AddEncryptAttributes(sample.Get(), data->mCrypto)); + } + + *aSample = sample.Detach(); + return S_OK; +} + +HRESULT MFMediaEngineStream::AddEncryptAttributes( + IMFSample* aSample, const CryptoSample& aCryptoConfig) { + // Scheme + MFSampleEncryptionProtectionScheme protectionScheme; + if (aCryptoConfig.mCryptoScheme == CryptoScheme::Cenc) { + protectionScheme = MFSampleEncryptionProtectionScheme:: + MF_SAMPLE_ENCRYPTION_PROTECTION_SCHEME_AES_CTR; + } else if (aCryptoConfig.mCryptoScheme == CryptoScheme::Cbcs) { + protectionScheme = MFSampleEncryptionProtectionScheme:: + MF_SAMPLE_ENCRYPTION_PROTECTION_SCHEME_AES_CBC; + } else { + SLOG("Unexpected encryption scheme"); + return MF_E_UNEXPECTED; + } + RETURN_IF_FAILED(aSample->SetUINT32( + MFSampleExtension_Encryption_ProtectionScheme, protectionScheme)); + + // KID + if (aCryptoConfig.mKeyId.Length() != sizeof(GUID)) { + SLOG("Unsupported key ID size (%zu)", aCryptoConfig.mKeyId.Length()); + return MF_E_UNEXPECTED; + } + GUID keyId; + GUIDFromByteArray(aCryptoConfig.mKeyId, keyId); + RETURN_IF_FAILED(aSample->SetGUID(MFSampleExtension_Content_KeyID, keyId)); + // TODO : if we want to suspend/resume the media engine, then we can consider + // to store last key id and set it in CDM to refresh the decryptor. + + // IV + RETURN_IF_FAILED(aSample->SetBlob( + MFSampleExtension_Encryption_SampleID, + reinterpret_cast(aCryptoConfig.mIV.Elements()), + aCryptoConfig.mIVSize)); + + // Subsample entries. + MOZ_ASSERT(aCryptoConfig.mEncryptedSizes.Length() == + aCryptoConfig.mPlainSizes.Length()); + size_t numSubsamples = aCryptoConfig.mEncryptedSizes.Length(); + if (numSubsamples != 0) { + std::vector subsampleEntries; + for (size_t idx = 0; idx < numSubsamples; idx++) { + subsampleEntries.push_back(MediaFoundationSubsampleEntry{ + aCryptoConfig.mPlainSizes[idx], aCryptoConfig.mEncryptedSizes[idx]}); + } + const uint32_t entriesSize = + sizeof(MediaFoundationSubsampleEntry) * numSubsamples; + RETURN_IF_FAILED(aSample->SetBlob( + MFSampleExtension_Encryption_SubSample_Mapping, + reinterpret_cast(subsampleEntries.data()), + entriesSize)); + } + + return S_OK; +} + +IFACEMETHODIMP MFMediaEngineStream::GetEvent(DWORD aFlags, + IMFMediaEvent** aEvent) { + AssertOnMFThreadPool(); + MOZ_ASSERT(mMediaEventQueue); + RETURN_IF_FAILED(mMediaEventQueue->GetEvent(aFlags, aEvent)); + return S_OK; +} + +IFACEMETHODIMP MFMediaEngineStream::BeginGetEvent(IMFAsyncCallback* aCallback, + IUnknown* aState) { + AssertOnMFThreadPool(); + MOZ_ASSERT(mMediaEventQueue); + RETURN_IF_FAILED(mMediaEventQueue->BeginGetEvent(aCallback, aState)); + return S_OK; +} + +IFACEMETHODIMP MFMediaEngineStream::EndGetEvent(IMFAsyncResult* aResult, + IMFMediaEvent** aEvent) { + AssertOnMFThreadPool(); + MOZ_ASSERT(mMediaEventQueue); + RETURN_IF_FAILED(mMediaEventQueue->EndGetEvent(aResult, aEvent)); + return S_OK; +} + +IFACEMETHODIMP MFMediaEngineStream::QueueEvent(MediaEventType aType, + REFGUID aExtendedType, + HRESULT aStatus, + const PROPVARIANT* aValue) { + AssertOnMFThreadPool(); + MOZ_ASSERT(mMediaEventQueue); + RETURN_IF_FAILED(mMediaEventQueue->QueueEventParamVar(aType, aExtendedType, + aStatus, aValue)); + SLOG("Queued event %s", MediaEventTypeToStr(aType)); + return S_OK; +} + +void MFMediaEngineStream::SetSelected(bool aSelected) { + AssertOnMFThreadPool(); + SLOG("Select=%d", aSelected); + mIsSelected = aSelected; +} + +void MFMediaEngineStream::NotifyNewData(MediaRawData* aSample) { + AssertOnTaskQueue(); + if (IsShutdown()) { + return; + } + const bool wasEnough = HasEnoughRawData(); + mRawDataQueueForFeedingEngine.Push(aSample); + mRawDataQueueForGeneratingOutput.Push(aSample); + SLOGV("NotifyNewData, push data [%" PRId64 ", %" PRId64 + "], queue size=%zu, queue duration=%" PRId64, + aSample->mTime.ToMicroseconds(), aSample->GetEndTime().ToMicroseconds(), + mRawDataQueueForFeedingEngine.GetSize(), + mRawDataQueueForFeedingEngine.Duration()); + if (mReceivedEOS) { + SLOG("Receive a new data, cancel old EOS flag"); + mReceivedEOS = false; + } + ReplySampleRequestIfPossible(); + if (!wasEnough && HasEnoughRawData()) { + SendRequestSampleEvent(true /* isEnough */); + } +} + +void MFMediaEngineStream::SendRequestSampleEvent(bool aIsEnough) { + AssertOnTaskQueue(); + SLOGV("data is %s, queue duration=%" PRId64, + aIsEnough ? "enough" : "not enough", + mRawDataQueueForFeedingEngine.Duration()); + mParentSource->mRequestSampleEvent.Notify( + SampleRequest{TrackType(), aIsEnough}); +} + +void MFMediaEngineStream::NotifyEndOfStreamInternal() { + AssertOnTaskQueue(); + if (mReceivedEOS) { + return; + } + SLOG("EOS"); + mReceivedEOS = true; + ReplySampleRequestIfPossible(); +} + +bool MFMediaEngineStream::IsEnded() const { + AssertOnTaskQueue(); + return mReceivedEOS && mRawDataQueueForFeedingEngine.GetSize() == 0; +} + +RefPtr MFMediaEngineStream::Flush() { + if (IsShutdown()) { + return MediaDataDecoder::FlushPromise::CreateAndReject( + MediaResult(NS_ERROR_FAILURE, + RESULT_DETAIL("MFMediaEngineStream is shutdown")), + __func__); + } + AssertOnTaskQueue(); + SLOG("Flush"); + mRawDataQueueForFeedingEngine.Reset(); + mRawDataQueueForGeneratingOutput.Reset(); + mReceivedEOS = false; + return MediaDataDecoder::FlushPromise::CreateAndResolve(true, __func__); +} + +RefPtr MFMediaEngineStream::OutputData( + RefPtr aSample) { + if (IsShutdown()) { + return MediaDataDecoder::DecodePromise::CreateAndReject( + MediaResult(NS_ERROR_FAILURE, + RESULT_DETAIL("MFMediaEngineStream is shutdown")), + __func__); + } + AssertOnTaskQueue(); + NotifyNewData(aSample); + MediaDataDecoder::DecodedData outputs; + if (RefPtr outputData = OutputDataInternal()) { + outputs.AppendElement(outputData); + SLOGV("Output data [%" PRId64 ",%" PRId64 "]", + outputData->mTime.ToMicroseconds(), + outputData->GetEndTime().ToMicroseconds()); + } + return MediaDataDecoder::DecodePromise::CreateAndResolve(std::move(outputs), + __func__); +}; + +RefPtr MFMediaEngineStream::Drain() { + if (IsShutdown()) { + return MediaDataDecoder::DecodePromise::CreateAndReject( + MediaResult(NS_ERROR_FAILURE, + RESULT_DETAIL("MFMediaEngineStream is shutdown")), + __func__); + } + AssertOnTaskQueue(); + MediaDataDecoder::DecodedData outputs; + while (RefPtr outputData = OutputDataInternal()) { + outputs.AppendElement(outputData); + SLOGV("Output data [%" PRId64 ",%" PRId64 "]", + outputData->mTime.ToMicroseconds(), + outputData->GetEndTime().ToMicroseconds()); + } + return MediaDataDecoder::DecodePromise::CreateAndResolve(std::move(outputs), + __func__); +} + +void MFMediaEngineStream::AssertOnTaskQueue() const { + MOZ_ASSERT(mTaskQueue && mTaskQueue->IsCurrentThreadIn()); +} + +void MFMediaEngineStream::AssertOnMFThreadPool() const { + // We can't really assert the thread id from thread pool, because it would + // change any time. So we just assert this is not the task queue, and use the + // explicit function name to indicate what thread we should run on. + // TODO : this assertion is not precise, because the running thread could be + // the stream wrapper thread as well, + MOZ_ASSERT(!mTaskQueue || !mTaskQueue->IsCurrentThreadIn()); +} + +#undef WLOGV +#undef SLOG +#undef SLOGV + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFMediaEngineStream.h b/dom/media/platforms/wmf/MFMediaEngineStream.h new file mode 100644 index 0000000000..aa3bf7e65d --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineStream.h @@ -0,0 +1,228 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINESTREAM_H +#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINESTREAM_H + +#include +#include + +#include + +#include "BlankDecoderModule.h" +#include "MediaQueue.h" +#include "PlatformDecoderModule.h" +#include "mozilla/Atomics.h" +#include "mozilla/Mutex.h" +#include "mozilla/SPSCQueue.h" + +namespace mozilla { + +class MFMediaEngineVideoStream; +class MFMediaSource; + +/** + * MFMediaEngineStream represents a track which would be responsible to provide + * encoded data into the media engine. The media engine can access this stream + * by the presentation descriptor which was acquired from the custom media + * source. + */ +class MFMediaEngineStream + : public Microsoft::WRL::RuntimeClass< + Microsoft::WRL::RuntimeClassFlags< + Microsoft::WRL::RuntimeClassType::ClassicCom>, + IMFMediaStream> { + public: + MFMediaEngineStream(); + ~MFMediaEngineStream(); + + virtual nsCString GetDescriptionName() const = 0; + + virtual nsCString GetCodecName() const = 0; + + HRESULT RuntimeClassInitialize(uint64_t aStreamId, const TrackInfo& aInfo, + MFMediaSource* aParentSource); + + // Called by MFMediaSource. + HRESULT Start(const PROPVARIANT* aPosition); + HRESULT Seek(const PROPVARIANT* aPosition); + HRESULT Stop(); + HRESULT Pause(); + void Shutdown(); + + void SetSelected(bool aSelected); + bool IsSelected() const { return mIsSelected; } + DWORD DescriptorId() const { return mStreamDescriptorId; } + + // Methods for IMFMediaStream + IFACEMETHODIMP GetMediaSource(IMFMediaSource** aMediaSource) override; + IFACEMETHODIMP GetStreamDescriptor( + IMFStreamDescriptor** aStreamDescriptor) override; + IFACEMETHODIMP RequestSample(IUnknown* aToken) override; + + // Methods for IMFMediaEventGenerator, IMFMediaStream derives from + // IMFMediaEventGenerator. + IFACEMETHODIMP GetEvent(DWORD aFlags, IMFMediaEvent** aEvent) override; + IFACEMETHODIMP BeginGetEvent(IMFAsyncCallback* aCallback, + IUnknown* aState) override; + IFACEMETHODIMP EndGetEvent(IMFAsyncResult* aResult, + IMFMediaEvent** aEvent) override; + IFACEMETHODIMP QueueEvent(MediaEventType aType, REFGUID aExtendedType, + HRESULT aStatus, + const PROPVARIANT* aValue) override; + + TaskQueue* GetTaskQueue() { return mTaskQueue; } + + void NotifyEndOfStream() { + Microsoft::WRL::ComPtr self = this; + Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction( + "MFMediaEngineStream::NotifyEndOfStream", + [self]() { self->NotifyEndOfStreamInternal(); })); + } + + // Return the type of the track, the result should be either audio or video. + virtual TrackInfo::TrackType TrackType() = 0; + + RefPtr Flush(); + + MediaEventProducer& EndedEvent() { return mEndedEvent; } + + // True if the stream has been shutdown, it's a thread safe method. + bool IsShutdown() const { return mIsShutdown; } + + virtual MFMediaEngineVideoStream* AsVideoStream() { return nullptr; } + + RefPtr OutputData( + RefPtr aSample); + + virtual RefPtr Drain(); + + virtual MediaDataDecoder::ConversionRequired NeedsConversion() const { + return MediaDataDecoder::ConversionRequired::kNeedNone; + } + + virtual bool IsEncrypted() const = 0; + + protected: + HRESULT GenerateStreamDescriptor( + Microsoft::WRL::ComPtr& aMediaType); + + // Create a IMFMediaType which includes the details about the stream. + // https://docs.microsoft.com/en-us/windows/win32/medfound/media-type-attributes + virtual HRESULT CreateMediaType(const TrackInfo& aInfo, + IMFMediaType** aMediaType) = 0; + + // True if the stream already has enough raw data. + virtual bool HasEnoughRawData() const = 0; + + HRESULT CreateInputSample(IMFSample** aSample); + void ReplySampleRequestIfPossible(); + bool ShouldServeSamples() const; + + void NotifyNewData(MediaRawData* aSample); + void NotifyEndOfStreamInternal(); + + virtual bool IsEnded() const; + + // Overwrite this method if inherited class needs to perform clean up on the + // task queue when the stream gets shutdowned. + virtual void ShutdownCleanUpOnTaskQueue(){}; + + // Inherited class must implement this method to return decoded data. it + // should uses `mRawDataQueueForGeneratingOutput` to generate output. + virtual already_AddRefed OutputDataInternal() = 0; + + void SendRequestSampleEvent(bool aIsEnough); + + HRESULT AddEncryptAttributes(IMFSample* aSample, + const CryptoSample& aCryptoConfig); + + void AssertOnTaskQueue() const; + void AssertOnMFThreadPool() const; + + // IMFMediaEventQueue is thread-safe. + Microsoft::WRL::ComPtr mMediaEventQueue; + Microsoft::WRL::ComPtr mStreamDescriptor; + Microsoft::WRL::ComPtr mParentSource; + + // This an unique ID retrieved from the IMFStreamDescriptor. + DWORD mStreamDescriptorId = 0; + + // A unique ID assigned by MFMediaSource, which won't be changed after first + // assignment. + uint64_t mStreamId = 0; + + RefPtr mTaskQueue; + + // This class would be run on three threads, MF thread pool, the source's + // task queue and MediaPDecoder (wrapper thread). Following members would be + // used across both threads so they need to be thread-safe. + + // Modify on the MF thread pool, access from any threads. + Atomic mIsShutdown; + + // True if the stream is selected by the media source. + // Modify on MF thread pool, access from any threads. + Atomic mIsSelected; + + // A thread-safe queue storing input samples, which provides samples to the + // media engine. + MediaQueue mRawDataQueueForFeedingEngine; + + // A thread-safe queue storing input samples, which would be used to generate + // decoded data. + MediaQueue mRawDataQueueForGeneratingOutput; + + // Thread-safe members END + + // Store sample request token, one token should be related with one output + // data. It's used on the task queue only. + std::queue> mSampleRequestTokens; + + // Notify when playback reachs the end for this track. + MediaEventProducer mEndedEvent; + + // True if the stream has received the last data, but it could be reset if the + // stream starts delivering more data. Used on the task queue only. + bool mReceivedEOS; +}; + +/** + * This wrapper helps to dispatch task onto the stream's task queue. Its methods + * are not thread-safe and would only be called on the IPC decoder manager + * thread. + */ +class MFMediaEngineStreamWrapper final : public MediaDataDecoder { + public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MFMediaEngineStreamWrapper, final); + + MFMediaEngineStreamWrapper(MFMediaEngineStream* aStream, + TaskQueue* aTaskQueue, + const CreateDecoderParams& aParams) + : mStream(aStream), mTaskQueue(aTaskQueue) { + MOZ_ASSERT(mStream); + MOZ_ASSERT(mTaskQueue); + } + + // Methods for MediaDataDecoder, they are all called on the remote + // decoder manager thread. + RefPtr Init() override; + RefPtr Decode(MediaRawData* aSample) override; + RefPtr Drain() override; + RefPtr Flush() override; + RefPtr Shutdown() override; + nsCString GetDescriptionName() const override; + nsCString GetCodecName() const override; + ConversionRequired NeedsConversion() const override; + + private: + ~MFMediaEngineStreamWrapper() = default; + + Microsoft::WRL::ComPtr mStream; + RefPtr mTaskQueue; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINESTREAM_H diff --git a/dom/media/platforms/wmf/MFMediaEngineVideoStream.cpp b/dom/media/platforms/wmf/MFMediaEngineVideoStream.cpp new file mode 100644 index 0000000000..ca043478f0 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineVideoStream.cpp @@ -0,0 +1,375 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFMediaEngineVideoStream.h" + +#include "mozilla/layers/DcompSurfaceImage.h" +#include "MFMediaEngineUtils.h" +#include "mozilla/StaticPrefs_media.h" + +namespace mozilla { + +#define LOG(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \ + ("MFMediaStream=%p (%s), " msg, this, \ + this->GetDescriptionName().get(), ##__VA_ARGS__)) + +#define LOGV(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Verbose, \ + ("MFMediaStream=%p (%s), " msg, this, \ + this->GetDescriptionName().get(), ##__VA_ARGS__)) + +using Microsoft::WRL::ComPtr; +using Microsoft::WRL::MakeAndInitialize; + +/* static */ +MFMediaEngineVideoStream* MFMediaEngineVideoStream::Create( + uint64_t aStreamId, const TrackInfo& aInfo, MFMediaSource* aParentSource) { + MFMediaEngineVideoStream* stream; + MOZ_ASSERT(aInfo.IsVideo()); + if (FAILED(MakeAndInitialize( + &stream, aStreamId, aInfo, aParentSource))) { + return nullptr; + } + stream->mStreamType = + GetStreamTypeFromMimeType(aInfo.GetAsVideoInfo()->mMimeType); + MOZ_ASSERT(StreamTypeIsVideo(stream->mStreamType)); + stream->mHasReceivedInitialCreateDecoderConfig = false; + stream->SetDCompSurfaceHandle(INVALID_HANDLE_VALUE, gfx::IntSize{}); + return stream; +} + +void MFMediaEngineVideoStream::SetKnowsCompositor( + layers::KnowsCompositor* aKnowsCompositor) { + ComPtr self = this; + Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction( + "MFMediaEngineStream::SetKnowsCompositor", + [self, knowCompositor = RefPtr{aKnowsCompositor}, + this]() { + mKnowsCompositor = knowCompositor; + LOG("Set SetKnowsCompositor=%p", mKnowsCompositor.get()); + ResolvePendingDrainPromiseIfNeeded(); + })); +} + +void MFMediaEngineVideoStream::SetDCompSurfaceHandle(HANDLE aDCompSurfaceHandle, + gfx::IntSize aDisplay) { + ComPtr self = this; + Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction( + "MFMediaEngineStream::SetDCompSurfaceHandle", + [self, aDCompSurfaceHandle, aDisplay, this]() { + if (mDCompSurfaceHandle == aDCompSurfaceHandle) { + return; + } + mDCompSurfaceHandle = aDCompSurfaceHandle; + mNeedRecreateImage = true; + { + MutexAutoLock lock(mMutex); + if (aDCompSurfaceHandle != INVALID_HANDLE_VALUE && + aDisplay != mDisplay) { + LOG("Update display [%dx%d] -> [%dx%d]", mDisplay.Width(), + mDisplay.Height(), aDisplay.Width(), aDisplay.Height()); + mDisplay = aDisplay; + } + } + LOG("Set DCompSurfaceHandle, handle=%p", mDCompSurfaceHandle); + ResolvePendingDrainPromiseIfNeeded(); + })); +} + +HRESULT MFMediaEngineVideoStream::CreateMediaType(const TrackInfo& aInfo, + IMFMediaType** aMediaType) { + auto& videoInfo = *aInfo.GetAsVideoInfo(); + mIsEncrypted = videoInfo.mCrypto.IsEncrypted(); + + GUID subType = VideoMimeTypeToMediaFoundationSubtype(videoInfo.mMimeType); + NS_ENSURE_TRUE(subType != GUID_NULL, MF_E_TOPO_CODEC_NOT_FOUND); + + // https://docs.microsoft.com/en-us/windows/win32/medfound/media-type-attributes + ComPtr mediaType; + RETURN_IF_FAILED(wmf::MFCreateMediaType(&mediaType)); + RETURN_IF_FAILED(mediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)); + RETURN_IF_FAILED(mediaType->SetGUID(MF_MT_SUBTYPE, subType)); + + const auto& image = videoInfo.mImage; + UINT32 imageWidth = image.Width(); + UINT32 imageHeight = image.Height(); + RETURN_IF_FAILED(MFSetAttributeSize(mediaType.Get(), MF_MT_FRAME_SIZE, + imageWidth, imageHeight)); + + UINT32 displayWidth = videoInfo.mDisplay.Width(); + UINT32 displayHeight = videoInfo.mDisplay.Height(); + { + MutexAutoLock lock(mMutex); + mDisplay = videoInfo.mDisplay; + } + // PAR = DAR / SAR = (DW / DH) / (SW / SH) = (DW * SH) / (DH * SW) + RETURN_IF_FAILED(MFSetAttributeRatio( + mediaType.Get(), MF_MT_PIXEL_ASPECT_RATIO, displayWidth * imageHeight, + displayHeight * imageWidth)); + + // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/ns-mfobjects-mfoffset + // The value of the MFOffset number is value + (fract / 65536.0f). + static const auto ToMFOffset = [](float aValue) { + MFOffset offset; + offset.value = static_cast(aValue); + offset.fract = static_cast(65536 * (aValue - offset.value)); + return offset; + }; + MFVideoArea area; + area.OffsetX = ToMFOffset(videoInfo.ImageRect().x); + area.OffsetY = ToMFOffset(videoInfo.ImageRect().y); + area.Area = {(LONG)imageWidth, (LONG)imageHeight}; + RETURN_IF_FAILED(mediaType->SetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&area, + sizeof(area))); + + // https://docs.microsoft.com/en-us/windows/win32/api/mfapi/ne-mfapi-mfvideorotationformat + static const auto ToMFVideoRotationFormat = [](VideoRotation aRotation) { + using Rotation = VideoRotation; + switch (aRotation) { + case Rotation::kDegree_0: + return MFVideoRotationFormat_0; + case Rotation::kDegree_90: + return MFVideoRotationFormat_90; + case Rotation::kDegree_180: + return MFVideoRotationFormat_180; + default: + MOZ_ASSERT(aRotation == Rotation::kDegree_270); + return MFVideoRotationFormat_270; + } + }; + const auto rotation = ToMFVideoRotationFormat(videoInfo.mRotation); + RETURN_IF_FAILED(mediaType->SetUINT32(MF_MT_VIDEO_ROTATION, rotation)); + + static const auto ToMFVideoTransFunc = + [](const Maybe& aColorSpace) { + using YUVColorSpace = gfx::YUVColorSpace; + if (!aColorSpace) { + return MFVideoTransFunc_Unknown; + } + // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/ne-mfobjects-mfvideotransferfunction + switch (*aColorSpace) { + case YUVColorSpace::BT601: + case YUVColorSpace::BT709: + return MFVideoTransFunc_709; + case YUVColorSpace::BT2020: + return MFVideoTransFunc_2020; + case YUVColorSpace::Identity: + return MFVideoTransFunc_sRGB; + default: + return MFVideoTransFunc_Unknown; + } + }; + const auto transFunc = ToMFVideoTransFunc(videoInfo.mColorSpace); + RETURN_IF_FAILED(mediaType->SetUINT32(MF_MT_TRANSFER_FUNCTION, transFunc)); + + static const auto ToMFVideoPrimaries = + [](const Maybe& aColorSpace) { + using YUVColorSpace = gfx::YUVColorSpace; + if (!aColorSpace) { + return MFVideoPrimaries_Unknown; + } + // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/ne-mfobjects-mfvideoprimaries + switch (*aColorSpace) { + case YUVColorSpace::BT601: + return MFVideoPrimaries_Unknown; + case YUVColorSpace::BT709: + return MFVideoPrimaries_BT709; + case YUVColorSpace::BT2020: + return MFVideoPrimaries_BT2020; + case YUVColorSpace::Identity: + return MFVideoPrimaries_BT709; + default: + return MFVideoPrimaries_Unknown; + } + }; + const auto videoPrimaries = ToMFVideoPrimaries(videoInfo.mColorSpace); + RETURN_IF_FAILED(mediaType->SetUINT32(MF_MT_VIDEO_PRIMARIES, videoPrimaries)); + + LOG("Created video type, subtype=%s, image=[%ux%u], display=[%ux%u], " + "rotation=%s, tranFuns=%s, primaries=%s, encrypted=%d", + GUIDToStr(subType), imageWidth, imageHeight, displayWidth, displayHeight, + MFVideoRotationFormatToStr(rotation), + MFVideoTransferFunctionToStr(transFunc), + MFVideoPrimariesToStr(videoPrimaries), mIsEncrypted); + if (IsEncrypted()) { + ComPtr protectedMediaType; + RETURN_IF_FAILED(wmf::MFWrapMediaType(mediaType.Get(), + MFMediaType_Protected, subType, + protectedMediaType.GetAddressOf())); + LOG("Wrap MFMediaType_Video into MFMediaType_Protected"); + *aMediaType = protectedMediaType.Detach(); + } else { + *aMediaType = mediaType.Detach(); + } + return S_OK; +} + +bool MFMediaEngineVideoStream::HasEnoughRawData() const { + // If more than this much raw video is queued, we'll hold off request more + // video. + return mRawDataQueueForFeedingEngine.Duration() >= + StaticPrefs::media_wmf_media_engine_raw_data_threshold_video(); +} + +bool MFMediaEngineVideoStream::IsDCompImageReady() { + AssertOnTaskQueue(); + if (!mDCompSurfaceHandle || mDCompSurfaceHandle == INVALID_HANDLE_VALUE) { + LOGV("Can't create image without a valid dcomp surface handle"); + return false; + } + + if (!mKnowsCompositor) { + LOGV("Can't create image without the knows compositor"); + return false; + } + + if (!mDcompSurfaceImage || mNeedRecreateImage) { + MutexAutoLock lock(mMutex); + // DirectComposition only supports RGBA. We use DXGI_FORMAT_B8G8R8A8_UNORM + // as a default because we can't know what format the dcomp surface is. + // https://docs.microsoft.com/en-us/windows/win32/api/dcomp/nf-dcomp-idcompositionsurfacefactory-createsurface + mDcompSurfaceImage = new layers::DcompSurfaceImage( + mDCompSurfaceHandle, mDisplay, gfx::SurfaceFormat::B8G8R8A8, + mKnowsCompositor); + mNeedRecreateImage = false; + LOG("Created dcomp surface image, handle=%p, size=[%u,%u]", + mDCompSurfaceHandle, mDisplay.Width(), mDisplay.Height()); + } + return true; +} + +already_AddRefed MFMediaEngineVideoStream::OutputDataInternal() { + AssertOnTaskQueue(); + if (mRawDataQueueForGeneratingOutput.GetSize() == 0 || !IsDCompImageReady()) { + return nullptr; + } + RefPtr sample = mRawDataQueueForGeneratingOutput.PopFront(); + RefPtr output; + { + MutexAutoLock lock(mMutex); + output = VideoData::CreateFromImage( + mDisplay, sample->mOffset, sample->mTime, sample->mDuration, + mDcompSurfaceImage, sample->mKeyframe, sample->mTimecode); + } + return output.forget(); +} + +RefPtr MFMediaEngineVideoStream::Drain() { + AssertOnTaskQueue(); + MediaDataDecoder::DecodedData outputs; + if (!IsDCompImageReady()) { + LOGV("Waiting for dcomp image for draining"); + return mPendingDrainPromise.Ensure(__func__); + } + return MFMediaEngineStream::Drain(); +} + +void MFMediaEngineVideoStream::ResolvePendingDrainPromiseIfNeeded() { + AssertOnTaskQueue(); + if (mPendingDrainPromise.IsEmpty()) { + return; + } + if (!IsDCompImageReady()) { + return; + } + MediaDataDecoder::DecodedData outputs; + while (RefPtr outputData = OutputDataInternal()) { + outputs.AppendElement(outputData); + LOGV("Output data [%" PRId64 ",%" PRId64 "]", + outputData->mTime.ToMicroseconds(), + outputData->GetEndTime().ToMicroseconds()); + } + mPendingDrainPromise.Resolve(std::move(outputs), __func__); + LOG("Resolved pending drain promise"); +} + +MediaDataDecoder::ConversionRequired MFMediaEngineVideoStream::NeedsConversion() + const { + return mStreamType == WMFStreamType::H264 || + mStreamType == WMFStreamType::HEVC + ? MediaDataDecoder::ConversionRequired::kNeedAnnexB + : MediaDataDecoder::ConversionRequired::kNeedNone; +} + +void MFMediaEngineVideoStream::SetConfig(const TrackInfo& aConfig) { + MOZ_ASSERT(aConfig.IsVideo()); + ComPtr self = this; + Unused << mTaskQueue->Dispatch( + NS_NewRunnableFunction("MFMediaEngineStream::SetConfig", + [self, info = *aConfig.GetAsVideoInfo(), this]() { + if (mHasReceivedInitialCreateDecoderConfig) { + // Here indicating a new config for video, + // which is triggered by the media change + // monitor, so we need to update the config. + UpdateConfig(info); + } + mHasReceivedInitialCreateDecoderConfig = true; + })); +} + +void MFMediaEngineVideoStream::UpdateConfig(const VideoInfo& aInfo) { + AssertOnTaskQueue(); + // Disable explicit format change event for H264/HEVC to allow switching to + // the new stream without a full re-create, which will be much faster. This is + // also due to the fact that the MFT decoder can handle some format changes + // without a format change event. For format changes that the MFT decoder + // cannot support (e.g. codec change), the playback will fail later with + // MF_E_INVALIDMEDIATYPE (0xC00D36B4). + if (mStreamType == WMFStreamType::H264 || + mStreamType == WMFStreamType::HEVC) { + return; + } + + LOG("Video config changed, will update stream descriptor"); + PROFILER_MARKER_TEXT("VideoConfigChange", MEDIA_PLAYBACK, {}, + nsPrintfCString("stream=%s, id=%" PRIu64, + GetDescriptionName().get(), mStreamId)); + ComPtr mediaType; + RETURN_VOID_IF_FAILED(CreateMediaType(aInfo, mediaType.GetAddressOf())); + RETURN_VOID_IF_FAILED(GenerateStreamDescriptor(mediaType)); + RETURN_VOID_IF_FAILED(mMediaEventQueue->QueueEventParamUnk( + MEStreamFormatChanged, GUID_NULL, S_OK, mediaType.Get())); +} + +void MFMediaEngineVideoStream::ShutdownCleanUpOnTaskQueue() { + AssertOnTaskQueue(); + mPendingDrainPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__); +} + +bool MFMediaEngineVideoStream::IsEnded() const { + AssertOnTaskQueue(); + // If a video only contains one frame, the media engine won't return a decoded + // frame before we tell it the track is already ended. However, due to the + // constraint of our media pipeline, the format reader won't notify EOS until + // the draining finishes, which causes a deadlock. Therefore, we would + // consider having pending drain promise as a sign of EOS as well, in order to + // get the decoded frame and revolve the drain promise. + return (mReceivedEOS || !mPendingDrainPromise.IsEmpty()) && + mRawDataQueueForFeedingEngine.GetSize() == 0; +} + +bool MFMediaEngineVideoStream::IsEncrypted() const { return mIsEncrypted; } + +nsCString MFMediaEngineVideoStream::GetCodecName() const { + switch (mStreamType) { + case WMFStreamType::H264: + return "h264"_ns; + case WMFStreamType::VP8: + return "vp8"_ns; + case WMFStreamType::VP9: + return "vp9"_ns; + case WMFStreamType::AV1: + return "av1"_ns; + case WMFStreamType::HEVC: + return "hevc"_ns; + default: + return "unknown"_ns; + }; +} + +#undef LOG +#undef LOGV + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFMediaEngineVideoStream.h b/dom/media/platforms/wmf/MFMediaEngineVideoStream.h new file mode 100644 index 0000000000..df17c264e4 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaEngineVideoStream.h @@ -0,0 +1,107 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEVIDEOSTREAM_H +#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEVIDEOSTREAM_H + +#include "MFMediaEngineStream.h" +#include "WMFUtils.h" +#include "mozilla/Atomics.h" +#include "mozilla/Mutex.h" + +namespace mozilla { +namespace layers { + +class Image; +class DcompSurfaceImage; + +} // namespace layers + +class MFMediaSource; + +class MFMediaEngineVideoStream final : public MFMediaEngineStream { + public: + MFMediaEngineVideoStream() = default; + + static MFMediaEngineVideoStream* Create(uint64_t aStreamId, + const TrackInfo& aInfo, + MFMediaSource* aParentSource); + nsCString GetDescriptionName() const override { + return "media engine video stream"_ns; + } + + nsCString GetCodecName() const override; + + TrackInfo::TrackType TrackType() override { + return TrackInfo::TrackType::kVideoTrack; + } + + void SetKnowsCompositor(layers::KnowsCompositor* aKnowsCompositor); + + void SetDCompSurfaceHandle(HANDLE aDCompSurfaceHandle, gfx::IntSize aDisplay); + + MFMediaEngineVideoStream* AsVideoStream() override { return this; } + + MediaDataDecoder::ConversionRequired NeedsConversion() const override; + + // Called by MFMediaEngineParent when we are creating a video decoder for + // the remote decoder. This is used to detect if the inband video config + // change happens during playback. + void SetConfig(const TrackInfo& aConfig); + + RefPtr Drain() override; + + bool IsEncrypted() const override; + + private: + HRESULT + CreateMediaType(const TrackInfo& aInfo, IMFMediaType** aMediaType) override; + + bool HasEnoughRawData() const override; + + void UpdateConfig(const VideoInfo& aInfo); + + already_AddRefed OutputDataInternal() override; + + bool IsDCompImageReady(); + + void ResolvePendingDrainPromiseIfNeeded(); + + void ShutdownCleanUpOnTaskQueue() override; + + bool IsEnded() const override; + + // Task queue only members. + HANDLE mDCompSurfaceHandle; + bool mNeedRecreateImage; + RefPtr mKnowsCompositor; + + Mutex mMutex{"MFMediaEngineVideoStream"}; + gfx::IntSize mDisplay MOZ_GUARDED_BY(mMutex); + + // Set on the initialization, won't be changed after that. + WMFStreamType mStreamType; + + // Created and accessed in the decoder thread. + RefPtr mDcompSurfaceImage; + + // This flag is used to check if the video config changes detected by the + // media config monitor. When the video decoder get created first, we will set + // this flag to true, then we know any config being set afterward indicating + // a new config change. + bool mHasReceivedInitialCreateDecoderConfig; + + // When draining, the track should return all decoded data. However, if the + // dcomp image hasn't been ready yet, then we won't have any decoded data to + // return. This promise is used for that case, and will be resolved once we + // have dcomp image. + MozPromiseHolder mPendingDrainPromise; + + // Set when `CreateMediaType()` is called. + bool mIsEncrypted = false; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEVIDEOSTREAM_H diff --git a/dom/media/platforms/wmf/MFMediaSource.cpp b/dom/media/platforms/wmf/MFMediaSource.cpp new file mode 100644 index 0000000000..d8e33328f2 --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaSource.cpp @@ -0,0 +1,606 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFMediaSource.h" + +#include +#include +#include + +#include "MFCDMProxy.h" +#include "MFMediaEngineAudioStream.h" +#include "MFMediaEngineUtils.h" +#include "MFMediaEngineVideoStream.h" +#include "VideoUtils.h" +#include "WMF.h" +#include "mozilla/Atomics.h" +#include "mozilla/StaticPrefs_media.h" +#include "mozilla/TaskQueue.h" + +namespace mozilla { + +#define LOG(msg, ...) \ + MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \ + ("MFMediaSource=%p, " msg, this, ##__VA_ARGS__)) + +using Microsoft::WRL::ComPtr; + +MFMediaSource::MFMediaSource() + : mPresentationEnded(false), mIsAudioEnded(false), mIsVideoEnded(false) { + MOZ_COUNT_CTOR(MFMediaSource); + LOG("media source created"); +} + +MFMediaSource::~MFMediaSource() { + // TODO : notify cdm about the last key id? + MOZ_COUNT_DTOR(MFMediaSource); + LOG("media source destroyed"); +} + +HRESULT MFMediaSource::RuntimeClassInitialize( + const Maybe& aAudio, const Maybe& aVideo, + nsISerialEventTarget* aManagerThread) { + // On manager thread. + MutexAutoLock lock(mMutex); + + static uint64_t streamId = 1; + + mTaskQueue = TaskQueue::Create( + GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER), "MFMediaSource"); + mManagerThread = aManagerThread; + MOZ_ASSERT(mManagerThread, "manager thread shouldn't be nullptr!"); + + if (aAudio) { + mAudioStream.Attach( + MFMediaEngineAudioStream::Create(streamId++, *aAudio, this)); + if (!mAudioStream) { + NS_WARNING("Failed to create audio stream"); + return E_FAIL; + } + mAudioStreamEndedListener = mAudioStream->EndedEvent().Connect( + mManagerThread, this, &MFMediaSource::HandleStreamEnded); + } else { + mIsAudioEnded = true; + } + + if (aVideo) { + mVideoStream.Attach( + MFMediaEngineVideoStream::Create(streamId++, *aVideo, this)); + if (!mVideoStream) { + NS_WARNING("Failed to create video stream"); + return E_FAIL; + } + mVideoStreamEndedListener = mVideoStream->EndedEvent().Connect( + mManagerThread, this, &MFMediaSource::HandleStreamEnded); + } else { + mIsVideoEnded = true; + } + + RETURN_IF_FAILED(wmf::MFCreateEventQueue(&mMediaEventQueue)); + + LOG("Initialized a media source"); + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::GetCharacteristics(DWORD* aCharacteristics) { + // This could be run on both mf thread pool and manager thread. + { + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + } + // https://docs.microsoft.com/en-us/windows/win32/api/mfidl/ne-mfidl-mfmediasource_characteristics + *aCharacteristics = MFMEDIASOURCE_CAN_SEEK | MFMEDIASOURCE_CAN_PAUSE; + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::CreatePresentationDescriptor( + IMFPresentationDescriptor** aPresentationDescriptor) { + AssertOnMFThreadPool(); + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + + LOG("CreatePresentationDescriptor"); + // See steps of creating the presentation descriptor + // https://docs.microsoft.com/en-us/windows/win32/medfound/writing-a-custom-media-source#creating-the-presentation-descriptor + ComPtr presentationDescriptor; + nsTArray> streamDescriptors; + + DWORD audioDescriptorId = 0, videoDescriptorId = 0; + if (mAudioStream) { + ComPtr* descriptor = streamDescriptors.AppendElement(); + RETURN_IF_FAILED( + mAudioStream->GetStreamDescriptor(descriptor->GetAddressOf())); + audioDescriptorId = mAudioStream->DescriptorId(); + } + + if (mVideoStream) { + ComPtr* descriptor = streamDescriptors.AppendElement(); + RETURN_IF_FAILED( + mVideoStream->GetStreamDescriptor(descriptor->GetAddressOf())); + videoDescriptorId = mVideoStream->DescriptorId(); + } + + const DWORD descCount = static_cast(streamDescriptors.Length()); + MOZ_ASSERT(descCount <= 2); + RETURN_IF_FAILED(wmf::MFCreatePresentationDescriptor( + descCount, + reinterpret_cast(streamDescriptors.Elements()), + &presentationDescriptor)); + + // Select default streams for the presentation descriptor. + for (DWORD idx = 0; idx < descCount; idx++) { + ComPtr streamDescriptor; + BOOL selected; + RETURN_IF_FAILED(presentationDescriptor->GetStreamDescriptorByIndex( + idx, &selected, &streamDescriptor)); + if (selected) { + continue; + } + RETURN_IF_FAILED(presentationDescriptor->SelectStream(idx)); + DWORD streamId; + streamDescriptor->GetStreamIdentifier(&streamId); + LOG(" Select stream (id=%lu)", streamId); + } + + LOG("Created a presentation descriptor (a=%lu,v=%lu)", audioDescriptorId, + videoDescriptorId); + *aPresentationDescriptor = presentationDescriptor.Detach(); + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::Start( + IMFPresentationDescriptor* aPresentationDescriptor, + const GUID* aGuidTimeFormat, const PROPVARIANT* aStartPosition) { + AssertOnMFThreadPool(); + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + + // See detailed steps in following documents. + // https://docs.microsoft.com/en-us/windows/win32/api/mfidl/nf-mfidl-imfmediasource-start + // https://docs.microsoft.com/en-us/windows/win32/medfound/writing-a-custom-media-source#starting-the-media-source + + // A call to Start results in a seek if the previous state was started or + // paused, and the new starting position is not VT_EMPTY. + const bool isSeeking = + IsSeekable() && ((mState == State::Started || mState == State::Paused) && + aStartPosition->vt != VT_EMPTY); + nsAutoCString startPosition; + if (aStartPosition->vt == VT_I8) { + startPosition.AppendInt(aStartPosition->hVal.QuadPart); + } else if (aStartPosition->vt == VT_EMPTY) { + startPosition.AppendLiteral("empty"); + } + LOG("Start, start position=%s, isSeeking=%d", startPosition.get(), isSeeking); + + // Ask IMFMediaStream to send stream events. + DWORD streamDescCount = 0; + RETURN_IF_FAILED( + aPresentationDescriptor->GetStreamDescriptorCount(&streamDescCount)); + + // TODO : should event orders be exactly same as msdn's order? + for (DWORD idx = 0; idx < streamDescCount; idx++) { + ComPtr streamDescriptor; + BOOL selected; + RETURN_IF_FAILED(aPresentationDescriptor->GetStreamDescriptorByIndex( + idx, &selected, &streamDescriptor)); + + DWORD streamId; + RETURN_IF_FAILED(streamDescriptor->GetStreamIdentifier(&streamId)); + + ComPtr stream; + if (mAudioStream && mAudioStream->DescriptorId() == streamId) { + stream = mAudioStream; + } else if (mVideoStream && mVideoStream->DescriptorId() == streamId) { + stream = mVideoStream; + } + NS_ENSURE_TRUE(stream, MF_E_INVALIDREQUEST); + + if (selected) { + RETURN_IF_FAILED(mMediaEventQueue->QueueEventParamUnk( + stream->IsSelected() ? MEUpdatedStream : MENewStream, GUID_NULL, S_OK, + stream.Get())); + // Need to select stream first before doing other operations. + stream->SetSelected(true); + if (isSeeking) { + RETURN_IF_FAILED(stream->Seek(aStartPosition)); + } else { + RETURN_IF_FAILED(stream->Start(aStartPosition)); + } + } else { + stream->SetSelected(false); + } + } + + // Send source event. + RETURN_IF_FAILED(QueueEvent(isSeeking ? MESourceSeeked : MESourceStarted, + GUID_NULL, S_OK, aStartPosition)); + mState = State::Started; + mPresentationEnded = false; + if (mAudioStream && mAudioStream->IsSelected()) { + mIsAudioEnded = false; + } + if (mVideoStream && mVideoStream->IsSelected()) { + mIsVideoEnded = false; + } + LOG("Started media source"); + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::Stop() { + AssertOnMFThreadPool(); + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + + LOG("Stop"); + RETURN_IF_FAILED(QueueEvent(MESourceStopped, GUID_NULL, S_OK, nullptr)); + if (mAudioStream) { + RETURN_IF_FAILED(mAudioStream->Stop()); + } + if (mVideoStream) { + RETURN_IF_FAILED(mVideoStream->Stop()); + } + + mState = State::Stopped; + LOG("Stopped media source"); + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::Pause() { + AssertOnMFThreadPool(); + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + if (mState != State::Started) { + return MF_E_INVALID_STATE_TRANSITION; + } + + LOG("Pause"); + RETURN_IF_FAILED(QueueEvent(MESourcePaused, GUID_NULL, S_OK, nullptr)); + if (mAudioStream) { + RETURN_IF_FAILED(mAudioStream->Pause()); + } + if (mVideoStream) { + RETURN_IF_FAILED(mVideoStream->Pause()); + } + + mState = State::Paused; + LOG("Paused media source"); + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::Shutdown() { + // Could be called on either manager thread or MF thread pool. + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + + LOG("Shutdown"); + // After this method is called, all IMFMediaEventQueue methods return + // MF_E_SHUTDOWN. + RETURN_IF_FAILED(mMediaEventQueue->Shutdown()); + mState = State::Shutdowned; + LOG("Shutdowned media source"); + return S_OK; +} + +void MFMediaSource::ShutdownTaskQueue() { + AssertOnManagerThread(); + LOG("ShutdownTaskQueue"); + MutexAutoLock lock(mMutex); + if (mAudioStream) { + mAudioStream->Shutdown(); + mAudioStream = nullptr; + mAudioStreamEndedListener.DisconnectIfExists(); + } + if (mVideoStream) { + mVideoStream->Shutdown(); + mVideoStream = nullptr; + mVideoStreamEndedListener.DisconnectIfExists(); + } + Unused << mTaskQueue->BeginShutdown(); + mTaskQueue = nullptr; +} + +IFACEMETHODIMP MFMediaSource::GetEvent(DWORD aFlags, IMFMediaEvent** aEvent) { + MOZ_ASSERT(mMediaEventQueue); + return mMediaEventQueue->GetEvent(aFlags, aEvent); +} + +IFACEMETHODIMP MFMediaSource::BeginGetEvent(IMFAsyncCallback* aCallback, + IUnknown* aState) { + MOZ_ASSERT(mMediaEventQueue); + return mMediaEventQueue->BeginGetEvent(aCallback, aState); +} + +IFACEMETHODIMP MFMediaSource::EndGetEvent(IMFAsyncResult* aResult, + IMFMediaEvent** aEvent) { + MOZ_ASSERT(mMediaEventQueue); + return mMediaEventQueue->EndGetEvent(aResult, aEvent); +} + +IFACEMETHODIMP MFMediaSource::QueueEvent(MediaEventType aType, + REFGUID aExtendedType, HRESULT aStatus, + const PROPVARIANT* aValue) { + MOZ_ASSERT(mMediaEventQueue); + LOG("Queued event %s", MediaEventTypeToStr(aType)); + PROFILER_MARKER_TEXT("MFMediaSource::QueueEvent", MEDIA_PLAYBACK, {}, + nsPrintfCString("%s", MediaEventTypeToStr(aType))); + RETURN_IF_FAILED(mMediaEventQueue->QueueEventParamVar(aType, aExtendedType, + aStatus, aValue)); + return S_OK; +} + +bool MFMediaSource::IsSeekable() const { + // TODO : check seekable from info. + return true; +} + +void MFMediaSource::NotifyEndOfStream(TrackInfo::TrackType aType) { + AssertOnManagerThread(); + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return; + } + if (aType == TrackInfo::TrackType::kAudioTrack) { + MOZ_ASSERT(mAudioStream); + mAudioStream->NotifyEndOfStream(); + } else if (aType == TrackInfo::TrackType::kVideoTrack) { + MOZ_ASSERT(mVideoStream); + mVideoStream->NotifyEndOfStream(); + } +} + +void MFMediaSource::HandleStreamEnded(TrackInfo::TrackType aType) { + AssertOnManagerThread(); + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return; + } + if (mPresentationEnded) { + LOG("Presentation is ended already"); + RETURN_VOID_IF_FAILED( + QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, nullptr)); + return; + } + + LOG("Handle %s stream ended", TrackTypeToStr(aType)); + if (aType == TrackInfo::TrackType::kAudioTrack) { + mIsAudioEnded = true; + } else if (aType == TrackInfo::TrackType::kVideoTrack) { + mIsVideoEnded = true; + } else { + MOZ_ASSERT_UNREACHABLE("Incorrect track type!"); + } + mPresentationEnded = mIsAudioEnded && mIsVideoEnded; + LOG("PresentationEnded=%d, audioEnded=%d, videoEnded=%d", + !!mPresentationEnded, mIsAudioEnded, mIsVideoEnded); + PROFILER_MARKER_TEXT( + " MFMediaSource::HandleStreamEnded", MEDIA_PLAYBACK, {}, + nsPrintfCString("PresentationEnded=%d, audioEnded=%d, videoEnded=%d", + !!mPresentationEnded, mIsAudioEnded, mIsVideoEnded)); + if (mPresentationEnded) { + RETURN_VOID_IF_FAILED( + QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, nullptr)); + } +} + +void MFMediaSource::SetDCompSurfaceHandle(HANDLE aDCompSurfaceHandle, + gfx::IntSize aDisplay) { + AssertOnManagerThread(); + MutexAutoLock lock(mMutex); + if (mVideoStream) { + mVideoStream->AsVideoStream()->SetDCompSurfaceHandle(aDCompSurfaceHandle, + aDisplay); + } +} + +IFACEMETHODIMP MFMediaSource::GetService(REFGUID aGuidService, REFIID aRiid, + LPVOID* aResult) { + if (!IsEqualGUID(aGuidService, MF_RATE_CONTROL_SERVICE)) { + return MF_E_UNSUPPORTED_SERVICE; + } + return QueryInterface(aRiid, aResult); +} + +IFACEMETHODIMP MFMediaSource::GetSlowestRate(MFRATE_DIRECTION aDirection, + BOOL aSupportsThinning, + float* aRate) { + AssertOnMFThreadPool(); + MOZ_ASSERT(aRate); + *aRate = 0.0f; + { + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + } + if (aDirection == MFRATE_REVERSE) { + return MF_E_REVERSE_UNSUPPORTED; + } + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::GetFastestRate(MFRATE_DIRECTION aDirection, + BOOL aSupportsThinning, + float* aRate) { + AssertOnMFThreadPool(); + MOZ_ASSERT(aRate); + { + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + *aRate = 0.0f; + return MF_E_SHUTDOWN; + } + } + if (aDirection == MFRATE_REVERSE) { + return MF_E_REVERSE_UNSUPPORTED; + } + *aRate = 16.0f; + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::IsRateSupported(BOOL aSupportsThinning, + float aNewRate, + float* aSupportedRate) { + AssertOnMFThreadPool(); + { + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + } + + if (aSupportedRate) { + *aSupportedRate = 0.0f; + } + + MFRATE_DIRECTION direction = aNewRate >= 0 ? MFRATE_FORWARD : MFRATE_REVERSE; + float fastestRate = 0.0f, slowestRate = 0.0f; + GetFastestRate(direction, aSupportsThinning, &fastestRate); + GetSlowestRate(direction, aSupportsThinning, &slowestRate); + + if (aSupportsThinning) { + return MF_E_THINNING_UNSUPPORTED; + } else if (aNewRate < slowestRate) { + return MF_E_REVERSE_UNSUPPORTED; + } else if (aNewRate > fastestRate) { + return MF_E_UNSUPPORTED_RATE; + } + + if (aSupportedRate) { + *aSupportedRate = aNewRate; + } + return S_OK; +} + +IFACEMETHODIMP MFMediaSource::SetRate(BOOL aSupportsThinning, float aRate) { + AssertOnMFThreadPool(); + { + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + } + + HRESULT hr = IsRateSupported(aSupportsThinning, aRate, &mPlaybackRate); + if (FAILED(hr)) { + LOG("Unsupported playback rate %f, error=%lX", aRate, hr); + return hr; + } + + PROPVARIANT varRate; + varRate.vt = VT_R4; + varRate.fltVal = mPlaybackRate; + LOG("Set playback rate %f", mPlaybackRate); + return QueueEvent(MESourceRateChanged, GUID_NULL, S_OK, &varRate); +} + +IFACEMETHODIMP MFMediaSource::GetRate(BOOL* aSupportsThinning, float* aRate) { + AssertOnMFThreadPool(); + { + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + } + *aSupportsThinning = FALSE; + *aRate = mPlaybackRate; + return S_OK; +} + +HRESULT MFMediaSource::GetInputTrustAuthority(DWORD aStreamId, REFIID aRiid, + IUnknown** aITAOut) { + // TODO : add threading assertion, not sure what thread it would be running on + // now. + { + MutexAutoLock lock(mMutex); + if (mState == State::Shutdowned) { + return MF_E_SHUTDOWN; + } + } +#ifdef MOZ_WMF_CDM + if (!mCDMProxy) { + return MF_E_NOT_PROTECTED; + } + + // TODO : verify if this aStreamId is really matching our stream id or not. + ComPtr stream = GetStreamByIndentifier(aStreamId); + if (!stream) { + return E_INVALIDARG; + } + + if (!stream->IsEncrypted()) { + return MF_E_NOT_PROTECTED; + } + + RETURN_IF_FAILED( + mCDMProxy->GetInputTrustAuthority(aStreamId, nullptr, 0, aRiid, aITAOut)); +#endif + return S_OK; +} + +MFMediaSource::State MFMediaSource::GetState() const { + MutexAutoLock lock(mMutex); + return mState; +} + +MFMediaEngineStream* MFMediaSource::GetAudioStream() { + MutexAutoLock lock(mMutex); + return mAudioStream.Get(); +} +MFMediaEngineStream* MFMediaSource::GetVideoStream() { + MutexAutoLock lock(mMutex); + return mVideoStream.Get(); +} + +MFMediaEngineStream* MFMediaSource::GetStreamByIndentifier( + DWORD aStreamId) const { + MutexAutoLock lock(mMutex); + if (mAudioStream && mAudioStream->DescriptorId() == aStreamId) { + return mAudioStream.Get(); + } + if (mVideoStream && mVideoStream->DescriptorId() == aStreamId) { + return mVideoStream.Get(); + } + return nullptr; +} + +#ifdef MOZ_WMF_CDM +void MFMediaSource::SetCDMProxy(MFCDMProxy* aCDMProxy) { + AssertOnManagerThread(); + mCDMProxy = aCDMProxy; + // TODO : ask cdm proxy to refresh trusted input +} +#endif + +bool MFMediaSource::IsEncrypted() const { + MutexAutoLock lock(mMutex); + return (mAudioStream && mAudioStream->IsEncrypted()) || + (mVideoStream && mVideoStream->IsEncrypted()); +} + +void MFMediaSource::AssertOnManagerThread() const { + MOZ_ASSERT(mManagerThread->IsOnCurrentThread()); +} + +void MFMediaSource::AssertOnMFThreadPool() const { + // We can't really assert the thread id from thread pool, because it would + // change any time. So we just assert this is not the manager thread, and use + // the explicit function name to indicate what thread we should run on. + MOZ_ASSERT(!mManagerThread->IsOnCurrentThread()); +} + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFMediaSource.h b/dom/media/platforms/wmf/MFMediaSource.h new file mode 100644 index 0000000000..735d53579e --- /dev/null +++ b/dom/media/platforms/wmf/MFMediaSource.h @@ -0,0 +1,188 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIASOURCE_H +#define DOM_MEDIA_PLATFORM_WMF_MFMEDIASOURCE_H + +#include +#include + +#include "MediaInfo.h" +#include "MediaEventSource.h" +#include "MFMediaEngineExtra.h" +#include "MFMediaEngineStream.h" +#include "mozilla/EnumSet.h" +#include "mozilla/TaskQueue.h" + +namespace mozilla { + +class MFCDMProxy; + +// An event to indicate a need for a certain type of sample. +struct SampleRequest { + SampleRequest(TrackInfo::TrackType aType, bool aIsEnough) + : mType(aType), mIsEnough(aIsEnough) {} + TrackInfo::TrackType mType; + bool mIsEnough; +}; + +/** + * MFMediaSource is a custom source for the media engine, the media engine would + * ask the source for the characteristics and the presentation descriptor to + * know how to react with the source. This source is also responsible to + * dispatch events to the media engine to notify the status changes. + * + * https://docs.microsoft.com/en-us/windows/win32/api/mfidl/nn-mfidl-imfmediasource + */ +class MFMediaSource : public Microsoft::WRL::RuntimeClass< + Microsoft::WRL::RuntimeClassFlags< + Microsoft::WRL::RuntimeClassType::ClassicCom>, + IMFMediaSource, IMFRateControl, IMFRateSupport, + IMFGetService, IMFTrustedInput> { + public: + MFMediaSource(); + ~MFMediaSource(); + + HRESULT RuntimeClassInitialize(const Maybe& aAudio, + const Maybe& aVideo, + nsISerialEventTarget* aManagerThread); + + // Methods for IMFMediaSource + IFACEMETHODIMP GetCharacteristics(DWORD* aCharacteristics) override; + IFACEMETHODIMP CreatePresentationDescriptor( + IMFPresentationDescriptor** aPresentationDescriptor) override; + IFACEMETHODIMP Start(IMFPresentationDescriptor* aPresentationDescriptor, + const GUID* aGuidTimeFormat, + const PROPVARIANT* aStartPosition) override; + IFACEMETHODIMP Stop() override; + IFACEMETHODIMP Pause() override; + IFACEMETHODIMP Shutdown() override; + + // Methods for IMFMediaEventGenerator, IMFMediaSource derives from + // IMFMediaEventGenerator. + IFACEMETHODIMP GetEvent(DWORD aFlags, IMFMediaEvent** aEvent) override; + IFACEMETHODIMP BeginGetEvent(IMFAsyncCallback* aCallback, + IUnknown* aState) override; + IFACEMETHODIMP EndGetEvent(IMFAsyncResult* aResult, + IMFMediaEvent** aEvent) override; + IFACEMETHODIMP QueueEvent(MediaEventType aType, REFGUID aExtendedType, + HRESULT aStatus, + const PROPVARIANT* aValue) override; + + // IMFGetService + IFACEMETHODIMP GetService(REFGUID aGuidService, REFIID aRiid, + LPVOID* aResult) override; + + // IMFRateSupport + IFACEMETHODIMP GetSlowestRate(MFRATE_DIRECTION aDirection, + BOOL aSupportsThinning, float* aRate) override; + IFACEMETHODIMP GetFastestRate(MFRATE_DIRECTION aDirection, + BOOL aSupportsThinning, float* aRate) override; + IFACEMETHODIMP IsRateSupported(BOOL aSupportsThinning, float aNewRate, + float* aSupportedRate) override; + + // IMFRateControl + IFACEMETHODIMP SetRate(BOOL aSupportsThinning, float aRate) override; + IFACEMETHODIMP GetRate(BOOL* aSupportsThinning, float* aRate) override; + + // IMFTrustedInput + IFACEMETHODIMP GetInputTrustAuthority(DWORD aStreamId, REFIID aRiid, + IUnknown** aITAOut) override; + + MFMediaEngineStream* GetAudioStream(); + MFMediaEngineStream* GetVideoStream(); + + MFMediaEngineStream* GetStreamByIndentifier(DWORD aStreamId) const; + +#ifdef MOZ_WMF_CDM + void SetCDMProxy(MFCDMProxy* aCDMProxy); +#endif + + TaskQueue* GetTaskQueue() const { return mTaskQueue; } + + MediaEventSource& RequestSampleEvent() { + return mRequestSampleEvent; + } + + // Called from the content process to notify that no more encoded data in that + // type of track. + void NotifyEndOfStream(TrackInfo::TrackType aType); + + // Called from the MF stream to indicate that the stream has provided last + // encoded sample to the media engine. + void HandleStreamEnded(TrackInfo::TrackType aType); + + enum class State { + Initialized, + Started, + Stopped, + Paused, + Shutdowned, + }; + State GetState() const; + + void SetDCompSurfaceHandle(HANDLE aDCompSurfaceHandle, gfx::IntSize aDisplay); + + void ShutdownTaskQueue(); + + bool IsEncrypted() const; + + private: + void AssertOnManagerThread() const; + void AssertOnMFThreadPool() const; + + void NotifyEndOfStreamInternal(TrackInfo::TrackType aType); + + bool IsSeekable() const; + + // A thread-safe event queue. + // https://docs.microsoft.com/en-us/windows/win32/medfound/media-event-generators#implementing-imfmediaeventgenerator + Microsoft::WRL::ComPtr mMediaEventQueue; + + // The thread used to run the engine streams' tasks. + RefPtr mTaskQueue; + + // The thread used to run the media source's tasks. + RefPtr mManagerThread; + + // MFMediaEngineStream will notify us when we need more sample. + friend class MFMediaEngineStream; + MediaEventProducer mRequestSampleEvent; + + MediaEventListener mAudioStreamEndedListener; + MediaEventListener mVideoStreamEndedListener; + + // This class would be run/accessed on two threads, MF thread pool and the + // manager thread. Following members could be used across threads so they need + // to be thread-safe. + + mutable Mutex mMutex{"MFMediaEngineSource"}; + + // True if the playback is ended. Use and modify on both the manager thread + // and MF thread pool. + bool mPresentationEnded MOZ_GUARDED_BY(mMutex); + bool mIsAudioEnded MOZ_GUARDED_BY(mMutex); + bool mIsVideoEnded MOZ_GUARDED_BY(mMutex); + + // Modify on MF thread pool and the manager thread, read on any threads. + State mState MOZ_GUARDED_BY(mMutex); + + Microsoft::WRL::ComPtr mAudioStream + MOZ_GUARDED_BY(mMutex); + Microsoft::WRL::ComPtr mVideoStream + MOZ_GUARDED_BY(mMutex); + + // Thread-safe members END + + // Modify and access on MF thread pool. + float mPlaybackRate = 0.0f; + +#ifdef MOZ_WMF_CDM + RefPtr mCDMProxy; +#endif +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIASOURCE_H diff --git a/dom/media/platforms/wmf/MFPMPHostWrapper.cpp b/dom/media/platforms/wmf/MFPMPHostWrapper.cpp new file mode 100644 index 0000000000..1e7ba89e7b --- /dev/null +++ b/dom/media/platforms/wmf/MFPMPHostWrapper.cpp @@ -0,0 +1,92 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFPMPHostWrapper.h" + +#include "MFMediaEngineUtils.h" +#include "WMF.h" +#include "mozilla/EMEUtils.h" + +namespace mozilla { + +using Microsoft::WRL::ComPtr; + +#define LOG(msg, ...) EME_LOG("MFPMPHostWrapper=%p, " msg, this, ##__VA_ARGS__) + +HRESULT MFPMPHostWrapper::RuntimeClassInitialize( + Microsoft::WRL::ComPtr& aHost) { + mPMPHost = aHost; + return S_OK; +} + +MFPMPHostWrapper::MFPMPHostWrapper() { + MOZ_COUNT_CTOR(MFPMPHostWrapper); + LOG("MFPMPHostWrapper created"); +} + +MFPMPHostWrapper::~MFPMPHostWrapper() { + MOZ_COUNT_DTOR(MFPMPHostWrapper); + LOG("MFPMPHostWrapper destroyed"); +}; + +STDMETHODIMP MFPMPHostWrapper::LockProcess() { + LOG("LockProcess"); + return mPMPHost->LockProcess(); +} + +STDMETHODIMP MFPMPHostWrapper::UnlockProcess() { + LOG("UnlockProcess"); + return mPMPHost->UnlockProcess(); +} + +STDMETHODIMP MFPMPHostWrapper::ActivateClassById(LPCWSTR aId, IStream* aStream, + REFIID aRiid, + void** aActivatedClass) { + LOG("ActivateClassById, id=%ls", aId); + ComPtr creationAttributes; + RETURN_IF_FAILED(wmf::MFCreateAttributes(&creationAttributes, 2)); + RETURN_IF_FAILED(creationAttributes->SetString(GUID_ClassName, aId)); + + if (aStream) { + STATSTG statstg; + RETURN_IF_FAILED( + aStream->Stat(&statstg, STATFLAG_NOOPEN | STATFLAG_NONAME)); + nsTArray streamBlob; + streamBlob.SetLength(statstg.cbSize.LowPart); + unsigned long readSize = 0; + RETURN_IF_FAILED( + aStream->Read(&streamBlob[0], streamBlob.Length(), &readSize)); + RETURN_IF_FAILED(creationAttributes->SetBlob(GUID_ObjectStream, + &streamBlob[0], readSize)); + } + + ComPtr outputStream; + RETURN_IF_FAILED(CreateStreamOnHGlobal(nullptr, TRUE, &outputStream)); + RETURN_IF_FAILED(wmf::MFSerializeAttributesToStream(creationAttributes.Get(), + 0, outputStream.Get())); + RETURN_IF_FAILED(outputStream->Seek({}, STREAM_SEEK_SET, nullptr)); + + ComPtr activator; + RETURN_IF_FAILED(mPMPHost->CreateObjectByCLSID( + CLSID_EMEStoreActivate, outputStream.Get(), IID_PPV_ARGS(&activator))); + RETURN_IF_FAILED(activator->ActivateObject(aRiid, aActivatedClass)); + if (aActivatedClass) { + LOG("Get class %p for id=%ls", *aActivatedClass, aId); + } else { + LOG("No class for id=%ls", aId); + } + LOG("Done ActivateClassById, id=%ls", aId); + return S_OK; +} + +void MFPMPHostWrapper::Shutdown() { + LOG("Shutdown"); + if (mPMPHost) { + mPMPHost = nullptr; + } +} + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFPMPHostWrapper.h b/dom/media/platforms/wmf/MFPMPHostWrapper.h new file mode 100644 index 0000000000..1036fd25d8 --- /dev/null +++ b/dom/media/platforms/wmf/MFPMPHostWrapper.h @@ -0,0 +1,44 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_PLATFORM_WMF_MPMPHOSTWRAPPER_H +#define DOM_MEDIA_PLATFORM_WMF_MPMPHOSTWRAPPER_H + +#include +#include + +#include "MFCDMExtra.h" + +namespace mozilla { + +// This class is used to create and manage PMP sessions. For PlayReady CDM, +// it needs to connect with IMFPMPHostApp first before generating any request. +// That behavior is undocumented on the mdsn, see more details in +// https://github.com/microsoft/media-foundation/issues/37#issuecomment-1197321484 +class MFPMPHostWrapper : public Microsoft::WRL::RuntimeClass< + Microsoft::WRL::RuntimeClassFlags< + Microsoft::WRL::RuntimeClassType::ClassicCom>, + IMFPMPHostApp> { + public: + MFPMPHostWrapper(); + ~MFPMPHostWrapper(); + + HRESULT RuntimeClassInitialize(Microsoft::WRL::ComPtr& aHost); + + STDMETHODIMP LockProcess() override; + + STDMETHODIMP UnlockProcess() override; + + STDMETHODIMP ActivateClassById(LPCWSTR aId, IStream* aStream, REFIID aRiid, + void** aActivatedClass) override; + + void Shutdown(); + + private: + Microsoft::WRL::ComPtr mPMPHost; +}; + +} // namespace mozilla + +#endif // DOM_MEDIA_PLATFORM_WMF_MPMPHOSTWRAPPER_H diff --git a/dom/media/platforms/wmf/MFTDecoder.cpp b/dom/media/platforms/wmf/MFTDecoder.cpp new file mode 100644 index 0000000000..6b66a9e399 --- /dev/null +++ b/dom/media/platforms/wmf/MFTDecoder.cpp @@ -0,0 +1,430 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFTDecoder.h" +#include "WMFUtils.h" +#include "mozilla/Logging.h" +#include "nsThreadUtils.h" +#include "mozilla/mscom/COMWrappers.h" +#include "mozilla/mscom/Utils.h" +#include "PlatformDecoderModule.h" + +#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__)) + +namespace mozilla { +MFTDecoder::MFTDecoder() { + memset(&mInputStreamInfo, 0, sizeof(MFT_INPUT_STREAM_INFO)); + memset(&mOutputStreamInfo, 0, sizeof(MFT_OUTPUT_STREAM_INFO)); +} + +MFTDecoder::~MFTDecoder() { + if (mActivate) { + // Releases all internal references to the created IMFTransform. + // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/nf-mfobjects-imfactivate-shutdownobject + mActivate->ShutdownObject(); + } +} + +HRESULT MFTDecoder::Create(const GUID& aCLSID) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + + HRESULT hr = mscom::wrapped::CoCreateInstance( + aCLSID, nullptr, CLSCTX_INPROC_SERVER, + IID_PPV_ARGS(static_cast(getter_AddRefs(mDecoder)))); + NS_WARNING_ASSERTION(SUCCEEDED(hr), "Failed to create MFT by CLSID"); + return hr; +} + +HRESULT +MFTDecoder::Create(const GUID& aCategory, const GUID& aInSubtype, + const GUID& aOutSubtype) { + // Note: IMFTransform is documented to only be safe on MTA threads. + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + + // Use video by default, but select audio if necessary. + const GUID major = aCategory == MFT_CATEGORY_AUDIO_DECODER + ? MFMediaType_Audio + : MFMediaType_Video; + + // Ignore null GUIDs to allow searching for all decoders supporting + // just one input or output type. + auto createInfo = [&major](const GUID& subtype) -> MFT_REGISTER_TYPE_INFO* { + if (IsEqualGUID(subtype, GUID_NULL)) { + return nullptr; + } + + MFT_REGISTER_TYPE_INFO* info = new MFT_REGISTER_TYPE_INFO(); + info->guidMajorType = major; + info->guidSubtype = subtype; + return info; + }; + const MFT_REGISTER_TYPE_INFO* inInfo = createInfo(aInSubtype); + const MFT_REGISTER_TYPE_INFO* outInfo = createInfo(aOutSubtype); + + // Request a decoder from the Windows API. + HRESULT hr; + IMFActivate** acts = nullptr; + UINT32 actsNum = 0; + + hr = wmf::MFTEnumEx(aCategory, MFT_ENUM_FLAG_SORTANDFILTER, inInfo, outInfo, + &acts, &actsNum); + delete inInfo; + delete outInfo; + if (FAILED(hr)) { + NS_WARNING(nsPrintfCString("MFTEnumEx failed with code %lx", hr).get()); + return hr; + } + if (actsNum == 0) { + NS_WARNING("MFTEnumEx returned no IMFActivate instances"); + return WINCODEC_ERR_COMPONENTNOTFOUND; + } + auto guard = MakeScopeExit([&] { + // Start from index 1, acts[0] will be stored as a RefPtr to release later. + for (UINT32 i = 1; i < actsNum; i++) { + acts[i]->Release(); + } + CoTaskMemFree(acts); + }); + + // Create the IMFTransform to do the decoding. + // Note: Ideally we would cache the IMFActivate and call + // IMFActivate::DetachObject, but doing so causes the MFTs to fail on + // MFT_MESSAGE_SET_D3D_MANAGER. + mActivate = RefPtr(acts[0]); + hr = mActivate->ActivateObject( + IID_PPV_ARGS(static_cast(getter_AddRefs(mDecoder)))); + NS_WARNING_ASSERTION( + SUCCEEDED(hr), + nsPrintfCString("IMFActivate::ActivateObject failed with code %lx", hr) + .get()); + return hr; +} + +HRESULT +MFTDecoder::SetMediaTypes(IMFMediaType* aInputType, IMFMediaType* aOutputType, + std::function&& aCallback) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + + // Set the input type to the one the caller gave us... + HRESULT hr = mDecoder->SetInputType(0, aInputType, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + GUID currentSubtype = {0}; + hr = aOutputType->GetGUID(MF_MT_SUBTYPE, ¤tSubtype); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = SetDecoderOutputType(currentSubtype, aOutputType, std::move(aCallback)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = mDecoder->GetInputStreamInfo(0, &mInputStreamInfo); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + return S_OK; +} + +already_AddRefed MFTDecoder::GetAttributes() { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + RefPtr attr; + HRESULT hr = mDecoder->GetAttributes(getter_AddRefs(attr)); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + return attr.forget(); +} + +already_AddRefed MFTDecoder::GetOutputStreamAttributes() { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + RefPtr attr; + HRESULT hr = mDecoder->GetOutputStreamAttributes(0, getter_AddRefs(attr)); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + return attr.forget(); +} + +HRESULT +MFTDecoder::FindDecoderOutputType() { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mOutputType, "SetDecoderTypes must have been called once"); + + return FindDecoderOutputTypeWithSubtype(mOutputSubType); +} + +HRESULT +MFTDecoder::FindDecoderOutputTypeWithSubtype(const GUID& aSubType) { + return SetDecoderOutputType(aSubType, nullptr, + [](IMFMediaType*) { return S_OK; }); +} + +HRESULT +MFTDecoder::SetDecoderOutputType( + const GUID& aSubType, IMFMediaType* aTypeToUse, + std::function&& aCallback) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER); + + if (!aTypeToUse) { + aTypeToUse = mOutputType; + } + + // Iterate the enumerate the output types, until we find one compatible + // with what we need. + RefPtr outputType; + UINT32 typeIndex = 0; + while (SUCCEEDED(mDecoder->GetOutputAvailableType( + 0, typeIndex++, getter_AddRefs(outputType)))) { + GUID outSubtype = {0}; + HRESULT hr = outputType->GetGUID(MF_MT_SUBTYPE, &outSubtype); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (aSubType == outSubtype) { + hr = aCallback(outputType); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = mDecoder->SetOutputType(0, outputType, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = mDecoder->GetOutputStreamInfo(0, &mOutputStreamInfo); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + mMFTProvidesOutputSamples = IsFlagSet(mOutputStreamInfo.dwFlags, + MFT_OUTPUT_STREAM_PROVIDES_SAMPLES); + + mOutputType = outputType; + mOutputSubType = outSubtype; + + return S_OK; + } + outputType = nullptr; + } + return E_FAIL; +} + +HRESULT +MFTDecoder::SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER); + LOG("Send message '%s'", MFTMessageTypeToStr(aMsg)); + HRESULT hr = mDecoder->ProcessMessage(aMsg, aData); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + return S_OK; +} + +HRESULT +MFTDecoder::CreateInputSample(const uint8_t* aData, uint32_t aDataSize, + int64_t aTimestamp, int64_t aDuration, + RefPtr* aOutSample) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER); + + HRESULT hr; + RefPtr sample; + hr = wmf::MFCreateSample(getter_AddRefs(sample)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr buffer; + int32_t bufferSize = + std::max(uint32_t(mInputStreamInfo.cbSize), aDataSize); + UINT32 alignment = + (mInputStreamInfo.cbAlignment > 1) ? mInputStreamInfo.cbAlignment - 1 : 0; + hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, + getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + DWORD maxLength = 0; + DWORD currentLength = 0; + BYTE* dst = nullptr; + hr = buffer->Lock(&dst, &maxLength, ¤tLength); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + // Copy data into sample's buffer. + memcpy(dst, aData, aDataSize); + + hr = buffer->Unlock(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = buffer->SetCurrentLength(aDataSize); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = sample->AddBuffer(buffer); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = sample->SetSampleTime(UsecsToHNs(aTimestamp)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (aDuration == 0) { + // If the sample duration is 0, the decoder will try and estimate the + // duration. In practice this can lead to some wildly incorrect durations, + // as in bug 1560440. The Microsoft docs seem conflicting here with + // `IMFSample::SetSampleDuration` stating 'The duration can also be zero. + // This might be valid for some types of data.' However, + // `IMFSample::GetSampleDuration method` states 'If the retrieved duration + // is zero, or if the method returns MF_E_NO_SAMPLE_DURATION, the duration + // is unknown. In that case, it might be possible to calculate the duration + // from the media type--for example, by using the video frame rate or the + // audio sampling rate.' The latter of those seems to be how the decoder + // handles 0 duration, hence why it estimates. + // + // Since our demuxing pipeline can create 0 duration samples, and since the + // decoder will override them to something positive anyway, setting them to + // have a trivial duration seems like the lesser of evils. + aDuration = 1; + } + hr = sample->SetSampleDuration(UsecsToHNs(aDuration)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + *aOutSample = sample.forget(); + + return S_OK; +} + +HRESULT +MFTDecoder::CreateOutputSample(RefPtr* aOutSample) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER); + + HRESULT hr; + RefPtr sample; + hr = wmf::MFCreateSample(getter_AddRefs(sample)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr buffer; + int32_t bufferSize = mOutputStreamInfo.cbSize; + UINT32 alignment = (mOutputStreamInfo.cbAlignment > 1) + ? mOutputStreamInfo.cbAlignment - 1 + : 0; + hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment, + getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = sample->AddBuffer(buffer); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + *aOutSample = sample.forget(); + + return S_OK; +} + +HRESULT +MFTDecoder::Output(RefPtr* aOutput) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER); + + HRESULT hr; + + MFT_OUTPUT_DATA_BUFFER output = {0}; + + bool providedSample = false; + RefPtr sample; + if (*aOutput) { + output.pSample = *aOutput; + providedSample = true; + } else if (!mMFTProvidesOutputSamples) { + hr = CreateOutputSample(&sample); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + output.pSample = sample; + } + + DWORD status = 0; + hr = mDecoder->ProcessOutput(0, 1, &output, &status); + if (output.pEvents) { + // We must release this, as per the IMFTransform::ProcessOutput() + // MSDN documentation. + output.pEvents->Release(); + output.pEvents = nullptr; + } + + if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { + return MF_E_TRANSFORM_STREAM_CHANGE; + } + + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + // Not enough input to produce output. This is an expected failure, + // so don't warn on encountering it. + return hr; + } + // Treat other errors as unexpected, and warn. + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (!output.pSample) { + return S_OK; + } + + if (mDiscontinuity) { + output.pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE); + mDiscontinuity = false; + } + + *aOutput = output.pSample; // AddRefs + if (mMFTProvidesOutputSamples && !providedSample) { + // If the MFT is providing samples, we must release the sample here. + // Typically only the H.264 MFT provides samples when using DXVA, + // and it always re-uses the same sample, so if we don't release it + // MFT::ProcessOutput() deadlocks waiting for the sample to be released. + output.pSample->Release(); + output.pSample = nullptr; + } + + return S_OK; +} + +HRESULT +MFTDecoder::Input(const uint8_t* aData, uint32_t aDataSize, int64_t aTimestamp, + int64_t aDuration) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER); + + RefPtr input; + HRESULT hr = + CreateInputSample(aData, aDataSize, aTimestamp, aDuration, &input); + NS_ENSURE_TRUE(SUCCEEDED(hr) && input != nullptr, hr); + + return Input(input); +} + +HRESULT +MFTDecoder::Input(IMFSample* aSample) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + HRESULT hr = mDecoder->ProcessInput(0, aSample, 0); + if (hr == MF_E_NOTACCEPTING) { + // MFT *already* has enough data to produce a sample. Retrieve it. + return MF_E_NOTACCEPTING; + } + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + return S_OK; +} + +HRESULT +MFTDecoder::Flush() { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + HRESULT hr = SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + mDiscontinuity = true; + + return S_OK; +} + +HRESULT +MFTDecoder::GetInputMediaType(RefPtr& aMediaType) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + NS_ENSURE_TRUE(mDecoder, E_POINTER); + return mDecoder->GetInputCurrentType(0, getter_AddRefs(aMediaType)); +} + +HRESULT +MFTDecoder::GetOutputMediaType(RefPtr& aMediaType) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + NS_ENSURE_TRUE(mDecoder, E_POINTER); + return mDecoder->GetOutputCurrentType(0, getter_AddRefs(aMediaType)); +} + +#undef LOG + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/MFTDecoder.h b/dom/media/platforms/wmf/MFTDecoder.h new file mode 100644 index 0000000000..7af99e550d --- /dev/null +++ b/dom/media/platforms/wmf/MFTDecoder.h @@ -0,0 +1,132 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(MFTDecoder_h_) +# define MFTDecoder_h_ + +# include "WMF.h" +# include "mozilla/ReentrantMonitor.h" +# include "mozilla/RefPtr.h" +# include "nsIThread.h" + +namespace mozilla { + +class MFTDecoder final { + ~MFTDecoder(); + + public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MFTDecoder) + + MFTDecoder(); + + // Creates the MFT by COM class ID. + // + // Params: + // - aCLSID The COM class ID of the decoder. + HRESULT Create(const GUID& aCLSID); + + // Creates the MFT by querying a category and media subtype. + // First thing to do as part of setup. + // + // Params: + // - aCategory the GUID of the MFT category to use. + // - aInSubType the GUID of the input MFT media type to use. + // GUID_NULL may be used as a wildcard. + // - aOutSubType the GUID of the output MFT media type to use. + // GUID_NULL may be used as a wildcard. + HRESULT Create(const GUID& aCategory, const GUID& aInSubtype, + const GUID& aOutSubtype); + + // Sets the input and output media types. Call after Init(). + // + // Params: + // - aInputType needs at least major and minor types set. + // - aOutputType needs at least major and minor types set. + // This is used to select the matching output type out + // of all the available output types of the MFT. + HRESULT SetMediaTypes( + IMFMediaType* aInputType, IMFMediaType* aOutputType, + std::function&& aCallback = + [](IMFMediaType* aOutput) { return S_OK; }); + + // Returns the MFT's global IMFAttributes object. + already_AddRefed GetAttributes(); + + // Returns the MFT's IMFAttributes object for an output stream. + already_AddRefed GetOutputStreamAttributes(); + + // Retrieves the media type being input. + HRESULT GetInputMediaType(RefPtr& aMediaType); + + // Retrieves the media type being output. This may not be valid until + // the first sample is decoded. + HRESULT GetOutputMediaType(RefPtr& aMediaType); + const GUID& GetOutputMediaSubType() const { return mOutputSubType; } + + // Submits data into the MFT for processing. + // + // Returns: + // - MF_E_NOTACCEPTING if the decoder can't accept input. The data + // must be resubmitted after Output() stops producing output. + HRESULT Input(const uint8_t* aData, uint32_t aDataSize, + int64_t aTimestampUsecs, int64_t aDurationUsecs); + HRESULT Input(IMFSample* aSample); + + HRESULT CreateInputSample(const uint8_t* aData, uint32_t aDataSize, + int64_t aTimestampUsecs, int64_t aDurationUsecs, + RefPtr* aOutSample); + + // Retrieves output from the MFT. Call this once Input() returns + // MF_E_NOTACCEPTING. Some MFTs with hardware acceleration (the H.264 + // decoder MFT in particular) can't handle it if clients hold onto + // references to the output IMFSample, so don't do that. + // + // Returns: + // - MF_E_TRANSFORM_STREAM_CHANGE if the underlying stream output + // type changed. Retrieve the output media type and reconfig client, + // else you may misinterpret the MFT's output. + // - MF_E_TRANSFORM_NEED_MORE_INPUT if no output can be produced + // due to lack of input. + // - S_OK if an output frame is produced. + HRESULT Output(RefPtr* aOutput); + + // Sends a flush message to the MFT. This causes it to discard all + // input data. Use before seeking. + HRESULT Flush(); + + // Sends a message to the MFT. + HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData); + + HRESULT FindDecoderOutputTypeWithSubtype(const GUID& aSubType); + HRESULT FindDecoderOutputType(); + + private: + // Will search a suitable MediaType using aTypeToUse if set, if not will + // use the current mOutputType. + HRESULT SetDecoderOutputType( + const GUID& aSubType, IMFMediaType* aTypeToUse, + std::function&& aCallback); + HRESULT CreateOutputSample(RefPtr* aOutSample); + + MFT_INPUT_STREAM_INFO mInputStreamInfo; + MFT_OUTPUT_STREAM_INFO mOutputStreamInfo; + + RefPtr mActivate; + RefPtr mDecoder; + + RefPtr mOutputType; + GUID mOutputSubType; + + // True if the IMFTransform allocates the samples that it returns. + bool mMFTProvidesOutputSamples = false; + + // True if we need to mark the next sample as a discontinuity. + bool mDiscontinuity = true; +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/platforms/wmf/MFTEncoder.cpp b/dom/media/platforms/wmf/MFTEncoder.cpp new file mode 100644 index 0000000000..410da2733c --- /dev/null +++ b/dom/media/platforms/wmf/MFTEncoder.cpp @@ -0,0 +1,754 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MFTEncoder.h" +#include "mozilla/Logging.h" +#include "mozilla/WindowsProcessMitigations.h" +#include "mozilla/StaticPrefs_media.h" +#include "mozilla/mscom/Utils.h" +#include "WMFUtils.h" + +// Missing from MinGW. +#ifndef CODECAPI_AVEncAdaptiveMode +# define STATIC_CODECAPI_AVEncAdaptiveMode \ + 0x4419b185, 0xda1f, 0x4f53, 0xbc, 0x76, 0x9, 0x7d, 0xc, 0x1e, 0xfb, 0x1e +DEFINE_CODECAPI_GUID(AVEncAdaptiveMode, "4419b185-da1f-4f53-bc76-097d0c1efb1e", + 0x4419b185, 0xda1f, 0x4f53, 0xbc, 0x76, 0x9, 0x7d, 0xc, + 0x1e, 0xfb, 0x1e) +# define CODECAPI_AVEncAdaptiveMode \ + DEFINE_CODECAPI_GUIDNAMED(AVEncAdaptiveMode) +#endif +#ifndef MF_E_NO_EVENTS_AVAILABLE +# define MF_E_NO_EVENTS_AVAILABLE _HRESULT_TYPEDEF_(0xC00D3E80L) +#endif + +#define MFT_ENC_LOGD(arg, ...) \ + MOZ_LOG(mozilla::sPEMLog, mozilla::LogLevel::Debug, \ + ("MFTEncoder(0x%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) +#define MFT_ENC_LOGE(arg, ...) \ + MOZ_LOG(mozilla::sPEMLog, mozilla::LogLevel::Error, \ + ("MFTEncoder(0x%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) +#define MFT_ENC_SLOGD(arg, ...) \ + MOZ_LOG(mozilla::sPEMLog, mozilla::LogLevel::Debug, \ + ("MFTEncoder::%s: " arg, __func__, ##__VA_ARGS__)) +#define MFT_ENC_SLOGE(arg, ...) \ + MOZ_LOG(mozilla::sPEMLog, mozilla::LogLevel::Error, \ + ("MFTEncoder::%s: " arg, __func__, ##__VA_ARGS__)) + +namespace mozilla { +extern LazyLogModule sPEMLog; + +static const char* ErrorStr(HRESULT hr) { + switch (hr) { + case S_OK: + return "OK"; + case MF_E_INVALIDMEDIATYPE: + return "INVALIDMEDIATYPE"; + case MF_E_INVALIDSTREAMNUMBER: + return "INVALIDSTREAMNUMBER"; + case MF_E_INVALIDTYPE: + return "INVALIDTYPE"; + case MF_E_TRANSFORM_CANNOT_CHANGE_MEDIATYPE_WHILE_PROCESSING: + return "TRANSFORM_PROCESSING"; + case MF_E_TRANSFORM_TYPE_NOT_SET: + return "TRANSFORM_TYPE_NO_SET"; + case MF_E_UNSUPPORTED_D3D_TYPE: + return "UNSUPPORTED_D3D_TYPE"; + case E_INVALIDARG: + return "INVALIDARG"; + case MF_E_NO_SAMPLE_DURATION: + return "NO_SAMPLE_DURATION"; + case MF_E_NO_SAMPLE_TIMESTAMP: + return "NO_SAMPLE_TIMESTAMP"; + case MF_E_NOTACCEPTING: + return "NOTACCEPTING"; + case MF_E_ATTRIBUTENOTFOUND: + return "NOTFOUND"; + case MF_E_BUFFERTOOSMALL: + return "BUFFERTOOSMALL"; + case E_NOTIMPL: + return "NOTIMPL"; + default: + return "OTHER"; + } +} + +static const char* CodecStr(const GUID& aGUID) { + if (IsEqualGUID(aGUID, MFVideoFormat_H264)) { + return "H.264"; + } else if (IsEqualGUID(aGUID, MFVideoFormat_VP80)) { + return "VP8"; + } else if (IsEqualGUID(aGUID, MFVideoFormat_VP90)) { + return "VP9"; + } else { + return "Unsupported codec"; + } +} + +static UINT32 EnumEncoders(const GUID& aSubtype, IMFActivate**& aActivates, + const bool aUseHW = true) { + UINT32 num = 0; + MFT_REGISTER_TYPE_INFO inType = {.guidMajorType = MFMediaType_Video, + .guidSubtype = MFVideoFormat_NV12}; + MFT_REGISTER_TYPE_INFO outType = {.guidMajorType = MFMediaType_Video, + .guidSubtype = aSubtype}; + HRESULT hr = S_OK; + if (aUseHW) { + if (IsWin32kLockedDown()) { + // Some HW encoders use DXGI API and crash when locked down. + // TODO: move HW encoding out of content process (bug 1754531). + MFT_ENC_SLOGD("Don't use HW encoder when win32k locked down."); + return 0; + } + + hr = wmf::MFTEnumEx(MFT_CATEGORY_VIDEO_ENCODER, + MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER, + &inType, &outType, &aActivates, &num); + if (FAILED(hr)) { + MFT_ENC_SLOGE("enumerate HW encoder for %s: error=%s", CodecStr(aSubtype), + ErrorStr(hr)); + return 0; + } + if (num > 0) { + return num; + } + } + + // Try software MFTs. + hr = wmf::MFTEnumEx(MFT_CATEGORY_VIDEO_ENCODER, + MFT_ENUM_FLAG_SYNCMFT | MFT_ENUM_FLAG_ASYNCMFT | + MFT_ENUM_FLAG_SORTANDFILTER, + &inType, &outType, &aActivates, &num); + if (FAILED(hr)) { + MFT_ENC_SLOGE("enumerate SW encoder for %s: error=%s", CodecStr(aSubtype), + ErrorStr(hr)); + return 0; + } + if (num == 0) { + MFT_ENC_SLOGD("cannot find encoder for %s", CodecStr(aSubtype)); + } + return num; +} + +static HRESULT GetFriendlyName(IMFActivate* aAttributes, nsCString& aName) { + UINT32 len = 0; + HRESULT hr = aAttributes->GetStringLength(MFT_FRIENDLY_NAME_Attribute, &len); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + if (len > 0) { + ++len; // '\0'. + WCHAR name[len]; + if (SUCCEEDED(aAttributes->GetString(MFT_FRIENDLY_NAME_Attribute, name, len, + nullptr))) { + aName.Append(NS_ConvertUTF16toUTF8(name)); + } + } + + if (aName.Length() == 0) { + aName.Append("Unknown MFT"); + } + + return S_OK; +} + +static void PopulateEncoderInfo(const GUID& aSubtype, + nsTArray& aInfos) { + IMFActivate** activates = nullptr; + UINT32 num = EnumEncoders(aSubtype, activates); + for (UINT32 i = 0; i < num; ++i) { + MFTEncoder::Info info = {.mSubtype = aSubtype}; + GetFriendlyName(activates[i], info.mName); + aInfos.AppendElement(info); + MFT_ENC_SLOGD(" [%s] %s\n", CodecStr(aSubtype), info.mName.Data()); + activates[i]->Release(); + activates[i] = nullptr; + } + CoTaskMemFree(activates); +} + +Maybe MFTEncoder::GetInfo(const GUID& aSubtype) { + nsTArray& infos = Infos(); + + for (auto i : infos) { + if (IsEqualGUID(aSubtype, i.mSubtype)) { + return Some(i); + } + } + return Nothing(); +} + +nsCString MFTEncoder::GetFriendlyName(const GUID& aSubtype) { + Maybe info = GetInfo(aSubtype); + + return info ? info.ref().mName : "???"_ns; +} + +// Called only once by Infos(). +nsTArray MFTEncoder::Enumerate() { + nsTArray infos; + + if (!wmf::MediaFoundationInitializer::HasInitialized()) { + MFT_ENC_SLOGE("cannot init Media Foundation"); + return infos; + } + + PopulateEncoderInfo(MFVideoFormat_H264, infos); + PopulateEncoderInfo(MFVideoFormat_VP90, infos); + PopulateEncoderInfo(MFVideoFormat_VP80, infos); + + return infos; +} + +nsTArray& MFTEncoder::Infos() { + static nsTArray infos = Enumerate(); + return infos; +} + +already_AddRefed MFTEncoder::CreateFactory(const GUID& aSubtype) { + IMFActivate** activates = nullptr; + UINT32 num = EnumEncoders(aSubtype, activates, !mHardwareNotAllowed); + if (num == 0) { + return nullptr; + } + + // Keep the first and throw out others, if there is any. + RefPtr factory = activates[0]; + activates[0] = nullptr; + for (UINT32 i = 1; i < num; ++i) { + activates[i]->Release(); + activates[i] = nullptr; + } + CoTaskMemFree(activates); + + return factory.forget(); +} + +HRESULT MFTEncoder::Create(const GUID& aSubtype) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(!mEncoder); + + RefPtr factory = CreateFactory(aSubtype); + if (!factory) { + return E_FAIL; + } + + // Create MFT via the activation object. + RefPtr encoder; + HRESULT hr = factory->ActivateObject( + IID_PPV_ARGS(static_cast(getter_AddRefs(encoder)))); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr config; + // Avoid IID_PPV_ARGS() here for MingGW fails to declare UUID for ICodecAPI. + hr = encoder->QueryInterface(IID_ICodecAPI, getter_AddRefs(config)); + if (FAILED(hr)) { + encoder = nullptr; + factory->ShutdownObject(); + return hr; + } + + mFactory = std::move(factory); + mEncoder = std::move(encoder); + mConfig = std::move(config); + return S_OK; +} + +HRESULT +MFTEncoder::Destroy() { + if (!mEncoder) { + return S_OK; + } + + mEncoder = nullptr; + mConfig = nullptr; + // Release MFT resources via activation object. + HRESULT hr = mFactory->ShutdownObject(); + mFactory = nullptr; + + return hr; +} + +HRESULT +MFTEncoder::SetMediaTypes(IMFMediaType* aInputType, IMFMediaType* aOutputType) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(aInputType && aOutputType); + + AsyncMFTResult asyncMFT = AttemptEnableAsync(); + NS_ENSURE_TRUE(asyncMFT.isOk(), asyncMFT.unwrapErr()); + + HRESULT hr = GetStreamIDs(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + // Always set encoder output type before input. + hr = mEncoder->SetOutputType(mOutputStreamID, aOutputType, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + NS_ENSURE_TRUE(MatchInputSubtype(aInputType) != GUID_NULL, + MF_E_INVALIDMEDIATYPE); + + hr = mEncoder->SetInputType(mInputStreamID, aInputType, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = mEncoder->GetInputStreamInfo(mInputStreamID, &mInputStreamInfo); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = mEncoder->GetOutputStreamInfo(mInputStreamID, &mOutputStreamInfo); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + mOutputStreamProvidesSample = + IsFlagSet(mOutputStreamInfo.dwFlags, MFT_OUTPUT_STREAM_PROVIDES_SAMPLES); + + hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (asyncMFT.unwrap()) { + RefPtr source; + hr = mEncoder->QueryInterface(IID_PPV_ARGS( + static_cast(getter_AddRefs(source)))); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + mEventSource.SetAsyncEventGenerator(source.forget()); + } else { + mEventSource.InitSyncMFTEventQueue(); + } + + mNumNeedInput = 0; + return S_OK; +} + +// Async MFT won't work without unlocking. See +// https://docs.microsoft.com/en-us/windows/win32/medfound/asynchronous-mfts#unlocking-asynchronous-mfts +MFTEncoder::AsyncMFTResult MFTEncoder::AttemptEnableAsync() { + IMFAttributes* pAttributes = nullptr; + HRESULT hr = mEncoder->GetAttributes(&pAttributes); + if (FAILED(hr)) { + return AsyncMFTResult(hr); + } + + bool async = + MFGetAttributeUINT32(pAttributes, MF_TRANSFORM_ASYNC, FALSE) == TRUE; + if (async) { + hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE); + } else { + hr = S_OK; + } + pAttributes->Release(); + + return SUCCEEDED(hr) ? AsyncMFTResult(async) : AsyncMFTResult(hr); +} + +HRESULT MFTEncoder::GetStreamIDs() { + DWORD numIns; + DWORD numOuts; + HRESULT hr = mEncoder->GetStreamCount(&numIns, &numOuts); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + if (numIns < 1 || numOuts < 1) { + MFT_ENC_LOGE("stream count error"); + return MF_E_INVALIDSTREAMNUMBER; + } + + DWORD inIDs[numIns]; + DWORD outIDs[numOuts]; + hr = mEncoder->GetStreamIDs(numIns, inIDs, numOuts, outIDs); + if (SUCCEEDED(hr)) { + mInputStreamID = inIDs[0]; + mOutputStreamID = outIDs[0]; + } else if (hr == E_NOTIMPL) { + mInputStreamID = 0; + mOutputStreamID = 0; + } else { + MFT_ENC_LOGE("failed to get stream IDs"); + return hr; + } + return S_OK; +} + +GUID MFTEncoder::MatchInputSubtype(IMFMediaType* aInputType) { + MOZ_ASSERT(mEncoder); + MOZ_ASSERT(aInputType); + + GUID desired = GUID_NULL; + HRESULT hr = aInputType->GetGUID(MF_MT_SUBTYPE, &desired); + NS_ENSURE_TRUE(SUCCEEDED(hr), GUID_NULL); + MOZ_ASSERT(desired != GUID_NULL); + + DWORD i = 0; + IMFMediaType* inputType = nullptr; + GUID preferred = GUID_NULL; + while (true) { + hr = mEncoder->GetInputAvailableType(mInputStreamID, i, &inputType); + if (hr == MF_E_NO_MORE_TYPES) { + break; + } + NS_ENSURE_TRUE(SUCCEEDED(hr), GUID_NULL); + + GUID sub = GUID_NULL; + hr = inputType->GetGUID(MF_MT_SUBTYPE, &sub); + NS_ENSURE_TRUE(SUCCEEDED(hr), GUID_NULL); + + if (IsEqualGUID(desired, sub)) { + preferred = desired; + break; + } + ++i; + } + + return IsEqualGUID(preferred, desired) ? preferred : GUID_NULL; +} + +HRESULT +MFTEncoder::SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mEncoder); + + return mEncoder->ProcessMessage(aMsg, aData); +} + +HRESULT MFTEncoder::SetModes(UINT32 aBitsPerSec) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mConfig); + + VARIANT var; + var.vt = VT_UI4; + var.ulVal = eAVEncCommonRateControlMode_CBR; + HRESULT hr = mConfig->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + var.ulVal = aBitsPerSec; + hr = mConfig->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (SUCCEEDED(mConfig->IsModifiable(&CODECAPI_AVEncAdaptiveMode))) { + var.ulVal = eAVEncAdaptiveMode_Resolution; + hr = mConfig->SetValue(&CODECAPI_AVEncAdaptiveMode, &var); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + + if (SUCCEEDED(mConfig->IsModifiable(&CODECAPI_AVLowLatencyMode))) { + var.vt = VT_BOOL; + var.boolVal = VARIANT_TRUE; + hr = mConfig->SetValue(&CODECAPI_AVLowLatencyMode, &var); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + + return S_OK; +} + +HRESULT +MFTEncoder::SetBitrate(UINT32 aBitsPerSec) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mConfig); + + VARIANT var = {.vt = VT_UI4, .ulVal = aBitsPerSec}; + return mConfig->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var); +} + +static HRESULT CreateSample(RefPtr* aOutSample, DWORD aSize, + DWORD aAlignment) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + + HRESULT hr; + RefPtr sample; + hr = wmf::MFCreateSample(getter_AddRefs(sample)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + RefPtr buffer; + hr = wmf::MFCreateAlignedMemoryBuffer(aSize, aAlignment, + getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = sample->AddBuffer(buffer); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + *aOutSample = sample.forget(); + + return S_OK; +} + +HRESULT +MFTEncoder::CreateInputSample(RefPtr* aSample, size_t aSize) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + + return CreateSample( + aSample, aSize, + mInputStreamInfo.cbAlignment > 0 ? mInputStreamInfo.cbAlignment - 1 : 0); +} + +HRESULT +MFTEncoder::PushInput(RefPtr&& aInput) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mEncoder); + MOZ_ASSERT(aInput); + + mPendingInputs.Push(aInput.forget()); + if (mEventSource.IsSync() && mNumNeedInput == 0) { + // To step 2 in + // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data + mNumNeedInput++; + } + + HRESULT hr = ProcessInput(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + return ProcessEvents(); +} + +HRESULT MFTEncoder::ProcessInput() { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mEncoder); + + if (mNumNeedInput == 0 || mPendingInputs.GetSize() == 0) { + return S_OK; + } + + RefPtr input = mPendingInputs.PopFront(); + HRESULT hr = mEncoder->ProcessInput(mInputStreamID, input, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + --mNumNeedInput; + + if (!mEventSource.IsSync()) { + return S_OK; + } + // For sync MFT: Step 3 in + // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data + DWORD flags = 0; + hr = mEncoder->GetOutputStatus(&flags); + MediaEventType evType = MEUnknown; + switch (hr) { + case S_OK: + evType = flags == MFT_OUTPUT_STATUS_SAMPLE_READY + ? METransformHaveOutput // To step 4: ProcessOutput(). + : METransformNeedInput; // To step 2: ProcessInput(). + break; + case E_NOTIMPL: + evType = METransformHaveOutput; // To step 4: ProcessOutput(). + break; + default: + MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("undefined output status"); + return hr; + } + return mEventSource.QueueSyncMFTEvent(evType); +} + +HRESULT MFTEncoder::ProcessEvents() { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mEncoder); + + HRESULT hr = E_FAIL; + while (true) { + Event event = mEventSource.GetEvent(); + if (event.isErr()) { + hr = event.unwrapErr(); + break; + } + + MediaEventType evType = event.unwrap(); + switch (evType) { + case METransformNeedInput: + ++mNumNeedInput; + hr = ProcessInput(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + break; + case METransformHaveOutput: + hr = ProcessOutput(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + break; + case METransformDrainComplete: + mDrainState = DrainState::DRAINED; + break; + default: + MFT_ENC_LOGE("unsupported event: %lx", evType); + } + } + + switch (hr) { + case MF_E_NO_EVENTS_AVAILABLE: + return S_OK; + case MF_E_MULTIPLE_SUBSCRIBERS: + default: + MFT_ENC_LOGE("failed to get event: %s", ErrorStr(hr)); + return hr; + } +} + +HRESULT MFTEncoder::ProcessOutput() { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mEncoder); + + MFT_OUTPUT_DATA_BUFFER output = {.dwStreamID = mOutputStreamID, + .pSample = nullptr, + .dwStatus = 0, + .pEvents = nullptr}; + RefPtr sample; + HRESULT hr = E_FAIL; + if (!mOutputStreamProvidesSample) { + hr = CreateSample(&sample, mOutputStreamInfo.cbSize, + mOutputStreamInfo.cbAlignment > 1 + ? mOutputStreamInfo.cbAlignment - 1 + : 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + output.pSample = sample; + } + + DWORD status = 0; + hr = mEncoder->ProcessOutput(0, 1, &output, &status); + if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { + MFT_ENC_LOGD("output stream change"); + if (output.dwStatus & MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE) { + // Follow the instructions in Microsoft doc: + // https://docs.microsoft.com/en-us/windows/win32/medfound/handling-stream-changes#output-type + IMFMediaType* outputType = nullptr; + hr = mEncoder->GetOutputAvailableType(mOutputStreamID, 0, &outputType); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + hr = mEncoder->SetOutputType(mOutputStreamID, outputType, 0); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + return MF_E_TRANSFORM_STREAM_CHANGE; + } + + // Step 8 in + // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + MOZ_ASSERT(mEventSource.IsSync()); + MOZ_ASSERT(mDrainState == DrainState::DRAINING); + + mEventSource.QueueSyncMFTEvent(METransformDrainComplete); + return S_OK; + } + + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + mOutputs.AppendElement(output.pSample); + if (mOutputStreamProvidesSample) { + // Release MFT provided sample. + output.pSample->Release(); + output.pSample = nullptr; + } + + return S_OK; +} + +HRESULT MFTEncoder::TakeOutput(nsTArray>& aOutput) { + MOZ_ASSERT(aOutput.Length() == 0); + aOutput.SwapElements(mOutputs); + return S_OK; +} + +HRESULT MFTEncoder::Drain(nsTArray>& aOutput) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mEncoder); + MOZ_ASSERT(aOutput.Length() == 0); + + switch (mDrainState) { + case DrainState::DRAINABLE: + // Exhaust pending inputs. + while (mPendingInputs.GetSize() > 0) { + if (mEventSource.IsSync()) { + // Step 5 in + // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data + mEventSource.QueueSyncMFTEvent(METransformNeedInput); + } + HRESULT hr = ProcessEvents(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0); + mDrainState = DrainState::DRAINING; + [[fallthrough]]; // To collect and return outputs. + case DrainState::DRAINING: + // Collect remaining outputs. + while (mOutputs.Length() == 0 && mDrainState != DrainState::DRAINED) { + if (mEventSource.IsSync()) { + // Step 8 in + // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data + mEventSource.QueueSyncMFTEvent(METransformHaveOutput); + } + HRESULT hr = ProcessEvents(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + [[fallthrough]]; // To return outputs. + case DrainState::DRAINED: + aOutput.SwapElements(mOutputs); + return S_OK; + } +} + +HRESULT MFTEncoder::GetMPEGSequenceHeader(nsTArray& aHeader) { + MOZ_ASSERT(mscom::IsCurrentThreadMTA()); + MOZ_ASSERT(mEncoder); + MOZ_ASSERT(aHeader.Length() == 0); + + RefPtr outputType; + HRESULT hr = mEncoder->GetOutputCurrentType(mOutputStreamID, + getter_AddRefs(outputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + UINT32 length = 0; + hr = outputType->GetBlobSize(MF_MT_MPEG_SEQUENCE_HEADER, &length); + if (hr == MF_E_ATTRIBUTENOTFOUND || length == 0) { + return S_OK; + } + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + aHeader.SetCapacity(length); + hr = outputType->GetBlob(MF_MT_MPEG_SEQUENCE_HEADER, aHeader.Elements(), + length, nullptr); + aHeader.SetLength(SUCCEEDED(hr) ? length : 0); + + return hr; +} + +MFTEncoder::Event MFTEncoder::EventSource::GetEvent() { + if (IsSync()) { + return GetSyncMFTEvent(); + } + + RefPtr event; + HRESULT hr = mImpl.as>()->GetEvent( + MF_EVENT_FLAG_NO_WAIT, getter_AddRefs(event)); + MediaEventType type = MEUnknown; + if (SUCCEEDED(hr)) { + hr = event->GetType(&type); + } + return SUCCEEDED(hr) ? Event{type} : Event{hr}; +} + +HRESULT MFTEncoder::EventSource::QueueSyncMFTEvent(MediaEventType aEventType) { + MOZ_ASSERT(IsSync()); + MOZ_ASSERT(IsOnCurrentThread()); + + auto q = mImpl.as>().get(); + q->push(aEventType); + return S_OK; +} + +MFTEncoder::Event MFTEncoder::EventSource::GetSyncMFTEvent() { + MOZ_ASSERT(IsOnCurrentThread()); + + auto q = mImpl.as>().get(); + if (q->empty()) { + return Event{MF_E_NO_EVENTS_AVAILABLE}; + } + + MediaEventType type = q->front(); + q->pop(); + return Event{type}; +} + +#ifdef DEBUG +bool MFTEncoder::EventSource::IsOnCurrentThread() { + if (!mThread) { + mThread = GetCurrentSerialEventTarget(); + } + return mThread->IsOnCurrentThread(); +} +#endif + +} // namespace mozilla + +#undef MFT_ENC_SLOGE +#undef MFT_ENC_SLOGD +#undef MFT_ENC_LOGE +#undef MFT_ENC_LOGD diff --git a/dom/media/platforms/wmf/MFTEncoder.h b/dom/media/platforms/wmf/MFTEncoder.h new file mode 100644 index 0000000000..e2eaec3476 --- /dev/null +++ b/dom/media/platforms/wmf/MFTEncoder.h @@ -0,0 +1,144 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(MFTEncoder_h_) +# define MFTEncoder_h_ + +# include +# include +# include "mozilla/RefPtr.h" +# include "mozilla/ResultVariant.h" +# include "nsISupportsImpl.h" +# include "nsDeque.h" +# include "nsTArray.h" +# include "WMF.h" + +namespace mozilla { + +class MFTEncoder final { + public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MFTEncoder) + + explicit MFTEncoder(const bool aHardwareNotAllowed) + : mHardwareNotAllowed(aHardwareNotAllowed) {} + + HRESULT Create(const GUID& aSubtype); + HRESULT Destroy(); + HRESULT SetMediaTypes(IMFMediaType* aInputType, IMFMediaType* aOutputType); + HRESULT SetModes(UINT32 aBitsPerSec); + HRESULT SetBitrate(UINT32 aBitsPerSec); + + HRESULT CreateInputSample(RefPtr* aSample, size_t aSize); + HRESULT PushInput(RefPtr&& aInput); + HRESULT TakeOutput(nsTArray>& aOutput); + HRESULT Drain(nsTArray>& aOutput); + + HRESULT GetMPEGSequenceHeader(nsTArray& aHeader); + + static nsCString GetFriendlyName(const GUID& aSubtype); + + struct Info final { + GUID mSubtype; + nsCString mName; + }; + + private: + // Abstractions to support sync MFTs using the same logic for async MFTs. + // When the MFT is async and a real event generator is available, simply + // forward the calls. For sync MFTs, use the synchronous processing model + // described in + // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data + // to generate events of the asynchronous processing model. + using Event = Result; + using EventQueue = std::queue; + class EventSource final { + public: + EventSource() : mImpl(Nothing{}) {} + + void SetAsyncEventGenerator( + already_AddRefed&& aAsyncEventGenerator) { + MOZ_ASSERT(mImpl.is()); + mImpl.emplace>(aAsyncEventGenerator); + } + + void InitSyncMFTEventQueue() { + MOZ_ASSERT(mImpl.is()); + mImpl.emplace>(MakeUnique()); + } + + bool IsSync() const { return mImpl.is>(); } + + Event GetEvent(); + // Push an event when sync MFT is used. + HRESULT QueueSyncMFTEvent(MediaEventType aEventType); + + private: + // Pop an event from the queue when sync MFT is used. + Event GetSyncMFTEvent(); + + Variant< + // Uninitialized. + Nothing, + // For async MFT events. See + // https://docs.microsoft.com/en-us/windows/win32/medfound/asynchronous-mfts#events + RefPtr, + // Event queue for a sync MFT. Storing EventQueue directly breaks the + // code so a pointer is introduced. + UniquePtr> + mImpl; +# ifdef DEBUG + bool IsOnCurrentThread(); + nsCOMPtr mThread; +# endif + }; + + ~MFTEncoder() { Destroy(); }; + + static nsTArray& Infos(); + static nsTArray Enumerate(); + static Maybe GetInfo(const GUID& aSubtype); + + already_AddRefed CreateFactory(const GUID& aSubtype); + // Return true when successfully enabled, false for MFT that doesn't support + // async processing model, and error otherwise. + using AsyncMFTResult = Result; + AsyncMFTResult AttemptEnableAsync(); + HRESULT GetStreamIDs(); + GUID MatchInputSubtype(IMFMediaType* aInputType); + HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData); + + HRESULT ProcessEvents(); + HRESULT ProcessInput(); + HRESULT ProcessOutput(); + + const bool mHardwareNotAllowed; + RefPtr mEncoder; + // For MFT object creation. See + // https://docs.microsoft.com/en-us/windows/win32/medfound/activation-objects + RefPtr mFactory; + // For encoder configuration. See + // https://docs.microsoft.com/en-us/windows/win32/directshow/encoder-api + RefPtr mConfig; + + DWORD mInputStreamID; + DWORD mOutputStreamID; + MFT_INPUT_STREAM_INFO mInputStreamInfo; + MFT_OUTPUT_STREAM_INFO mOutputStreamInfo; + bool mOutputStreamProvidesSample; + + size_t mNumNeedInput; + enum class DrainState { DRAINED, DRAINABLE, DRAINING }; + DrainState mDrainState = DrainState::DRAINABLE; + + nsRefPtrDeque mPendingInputs; + nsTArray> mOutputs; + + EventSource mEventSource; +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/platforms/wmf/WMF.h b/dom/media/platforms/wmf/WMF.h new file mode 100644 index 0000000000..740442ceda --- /dev/null +++ b/dom/media/platforms/wmf/WMF.h @@ -0,0 +1,198 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef WMF_H_ +#define WMF_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "mozilla/Atomics.h" +#include "mozilla/ClearOnShutdown.h" +#include "mozilla/StaticMutex.h" +#include "nsThreadUtils.h" + +// The Windows headers helpfully declare min and max macros, which don't +// compile in the presence of std::min and std::max and unified builds. +// So undef them here. +#ifdef min +# undef min +#endif +#ifdef max +# undef max +#endif + +// https://stackoverflow.com/questions/25759700/ms-format-tag-for-opus-codec +#ifndef MFAudioFormat_Opus +DEFINE_GUID(MFAudioFormat_Opus, WAVE_FORMAT_OPUS, 0x000, 0x0010, 0x80, 0x00, + 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71); +#endif + +const inline GUID CLSID_CMSVPXDecMFT = { + 0xe3aaf548, + 0xc9a4, + 0x4c6e, + {0x23, 0x4d, 0x5a, 0xda, 0x37, 0x4b, 0x00, 0x00}}; + +namespace mozilla::wmf { + +// A helper class for automatically starting and shuting down the Media +// Foundation. Prior to using Media Foundation in a process, users should call +// MediaFoundationInitializer::HasInitialized() to ensure Media Foundation is +// initialized. Users should also check the result of this call, in case the +// internal call to MFStartup fails. The first check to HasInitialized will +// cause the helper to start up Media Foundation and set up a runnable to handle +// Media Foundation shutdown at XPCOM shutdown. Calls after the first will not +// cause any extra startups or shutdowns, so it's safe to check multiple times +// in the same process. Users do not need to do any manual shutdown, the helper +// will handle this internally. +class MediaFoundationInitializer final { + public: + ~MediaFoundationInitializer() { + if (mHasInitialized) { + if (FAILED(MFShutdown())) { + NS_WARNING("MFShutdown failed"); + } + } + } + static bool HasInitialized() { + if (sIsShutdown) { + return false; + } + return Get()->mHasInitialized; + } + + private: + static MediaFoundationInitializer* Get() { + { + StaticMutexAutoLock lock(sCreateMutex); + if (!sInitializer) { + sInitializer.reset(new MediaFoundationInitializer()); + GetMainThreadSerialEventTarget()->Dispatch( + NS_NewRunnableFunction("MediaFoundationInitializer::Get", [&] { + // Need to run this before MTA thread gets destroyed. + RunOnShutdown( + [&] { + sInitializer.reset(); + sIsShutdown = true; + }, + ShutdownPhase::XPCOMShutdown); + })); + } + } + return sInitializer.get(); + } + + MediaFoundationInitializer() : mHasInitialized(SUCCEEDED(MFStartup())) { + if (!mHasInitialized) { + NS_WARNING("MFStartup failed"); + } + } + + // If successful, loads all required WMF DLLs and calls the WMF MFStartup() + // function. This delegates the WMF MFStartup() call to the MTA thread if + // the current thread is not MTA. This is to ensure we always interact with + // WMF from threads with the same COM compartment model. + HRESULT MFStartup(); + + // Calls the WMF MFShutdown() function. Call this once for every time + // wmf::MFStartup() succeeds. Note: does not unload the WMF DLLs loaded by + // MFStartup(); leaves them in memory to save I/O at next MFStartup() call. + // This delegates the WMF MFShutdown() call to the MTA thread if the current + // thread is not MTA. This is to ensure we always interact with + // WMF from threads with the same COM compartment model. + HRESULT MFShutdown(); + + static inline UniquePtr sInitializer; + static inline StaticMutex sCreateMutex; + static inline Atomic sIsShutdown{false}; + const bool mHasInitialized; +}; + +// All functions below are wrappers around the corresponding WMF function, +// and automatically locate and call the corresponding function in the WMF DLLs. + +HRESULT MFCreateMediaType(IMFMediaType** aOutMFType); + +HRESULT MFGetStrideForBitmapInfoHeader(DWORD aFormat, DWORD aWidth, + LONG* aOutStride); + +HRESULT MFGetService(IUnknown* punkObject, REFGUID guidService, REFIID riid, + LPVOID* ppvObject); + +HRESULT DXVA2CreateDirect3DDeviceManager9( + UINT* pResetToken, IDirect3DDeviceManager9** ppDXVAManager); + +HRESULT MFCreateDXGIDeviceManager(UINT* pResetToken, + IMFDXGIDeviceManager** ppDXVAManager); + +HRESULT MFCreateSample(IMFSample** ppIMFSample); + +HRESULT MFCreateAlignedMemoryBuffer(DWORD cbMaxLength, DWORD fAlignmentFlags, + IMFMediaBuffer** ppBuffer); + +HRESULT MFCreateDXGISurfaceBuffer(REFIID riid, IUnknown* punkSurface, + UINT uSubresourceIndex, + BOOL fButtomUpWhenLinear, + IMFMediaBuffer** ppBuffer); + +HRESULT MFTEnumEx(GUID guidCategory, UINT32 Flags, + const MFT_REGISTER_TYPE_INFO* pInputType, + const MFT_REGISTER_TYPE_INFO* pOutputType, + IMFActivate*** pppMFTActivate, UINT32* pnumMFTActivate); + +HRESULT MFTGetInfo(CLSID clsidMFT, LPWSTR* pszName, + MFT_REGISTER_TYPE_INFO** ppInputTypes, UINT32* pcInputTypes, + MFT_REGISTER_TYPE_INFO** ppOutputTypes, + UINT32* pcOutputTypes, IMFAttributes** ppAttributes); + +HRESULT MFCreateAttributes(IMFAttributes** ppMFAttributes, UINT32 cInitialSize); + +HRESULT MFCreateEventQueue(IMFMediaEventQueue** ppMediaEventQueue); + +HRESULT MFCreateStreamDescriptor(DWORD dwStreamIdentifier, DWORD cMediaTypes, + IMFMediaType** apMediaTypes, + IMFStreamDescriptor** ppDescriptor); + +HRESULT MFCreateAsyncResult(IUnknown* punkObject, IMFAsyncCallback* pCallback, + IUnknown* punkState, + IMFAsyncResult** ppAsyncResult); + +HRESULT MFCreatePresentationDescriptor( + DWORD cStreamDescriptors, IMFStreamDescriptor** apStreamDescriptors, + IMFPresentationDescriptor** ppPresentationDescriptor); + +HRESULT MFCreateMemoryBuffer(DWORD cbMaxLength, IMFMediaBuffer** ppBuffer); + +HRESULT MFLockDXGIDeviceManager(UINT* pResetToken, + IMFDXGIDeviceManager** ppManager); + +HRESULT MFUnlockDXGIDeviceManager(); + +HRESULT MFPutWorkItem(DWORD dwQueue, IMFAsyncCallback* pCallback, + IUnknown* pState); + +HRESULT MFSerializeAttributesToStream(IMFAttributes* pAttr, DWORD dwOptions, + IStream* pStm); + +HRESULT MFWrapMediaType(IMFMediaType* pOrig, REFGUID MajorType, REFGUID SubType, + IMFMediaType** ppWrap); + +} // namespace mozilla::wmf + +#endif diff --git a/dom/media/platforms/wmf/WMFAudioMFTManager.cpp b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp new file mode 100644 index 0000000000..6ebcf9a80a --- /dev/null +++ b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp @@ -0,0 +1,315 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "WMFAudioMFTManager.h" +#include "MediaInfo.h" +#include "TimeUnits.h" +#include "VideoUtils.h" +#include "WMFUtils.h" +#include "mozilla/AbstractThread.h" +#include "mozilla/Logging.h" +#include "mozilla/Telemetry.h" +#include "nsTArray.h" +#include "BufferReader.h" +#include "mozilla/ScopeExit.h" + +#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__)) + +namespace mozilla { + +using media::TimeUnit; + +WMFAudioMFTManager::WMFAudioMFTManager(const AudioInfo& aConfig) + : mAudioChannels(aConfig.mChannels), + mChannelsMap(AudioConfig::ChannelLayout::UNKNOWN_MAP), + mAudioRate(aConfig.mRate), + mStreamType(GetStreamTypeFromMimeType(aConfig.mMimeType)) { + MOZ_COUNT_CTOR(WMFAudioMFTManager); + + if (mStreamType == WMFStreamType::AAC) { + const uint8_t* audioSpecConfig; + uint32_t configLength; + if (aConfig.mCodecSpecificConfig.is()) { + const AacCodecSpecificData& aacCodecSpecificData = + aConfig.mCodecSpecificConfig.as(); + audioSpecConfig = + aacCodecSpecificData.mDecoderConfigDescriptorBinaryBlob->Elements(); + configLength = + aacCodecSpecificData.mDecoderConfigDescriptorBinaryBlob->Length(); + + mRemainingEncoderDelay = mEncoderDelay = + aacCodecSpecificData.mEncoderDelayFrames; + mTotalMediaFrames = aacCodecSpecificData.mMediaFrameCount; + LOG("AudioMFT decoder: Found AAC decoder delay (%" PRIu32 + "frames) and total media frames (%" PRIu64 " frames)\n", + mEncoderDelay, mTotalMediaFrames); + } else { + // Gracefully handle failure to cover all codec specific cases above. Once + // we're confident there is no fall through from these cases above, we + // should remove this code. + RefPtr audioCodecSpecificBinaryBlob = + GetAudioCodecSpecificBlob(aConfig.mCodecSpecificConfig); + audioSpecConfig = audioCodecSpecificBinaryBlob->Elements(); + configLength = audioCodecSpecificBinaryBlob->Length(); + } + AACAudioSpecificConfigToUserData(aConfig.mExtendedProfile, audioSpecConfig, + configLength, mUserData); + } +} + +WMFAudioMFTManager::~WMFAudioMFTManager() { + MOZ_COUNT_DTOR(WMFAudioMFTManager); +} + +const GUID& WMFAudioMFTManager::GetMediaSubtypeGUID() { + MOZ_ASSERT(StreamTypeIsAudio(mStreamType)); + switch (mStreamType) { + case WMFStreamType::AAC: + return MFAudioFormat_AAC; + case WMFStreamType::MP3: + return MFAudioFormat_MP3; + default: + return GUID_NULL; + }; +} + +bool WMFAudioMFTManager::Init() { + NS_ENSURE_TRUE(StreamTypeIsAudio(mStreamType), false); + + RefPtr decoder(new MFTDecoder()); + // Note: MP3 MFT isn't registered as supporting Float output, but it works. + // Find PCM output MFTs as this is the common type. + HRESULT hr = WMFDecoderModule::CreateMFTDecoder(mStreamType, decoder); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + // Setup input/output media types + RefPtr inputType; + + hr = wmf::MFCreateMediaType(getter_AddRefs(inputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID()); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = inputType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, mAudioRate); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = inputType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, mAudioChannels); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + if (mStreamType == WMFStreamType::AAC) { + hr = inputType->SetUINT32(MF_MT_AAC_PAYLOAD_TYPE, 0x0); // Raw AAC packet + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = inputType->SetBlob(MF_MT_USER_DATA, mUserData.Elements(), + mUserData.Length()); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + } + + RefPtr outputType; + hr = wmf::MFCreateMediaType(getter_AddRefs(outputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = outputType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_Float); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = outputType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, 32); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + hr = decoder->SetMediaTypes(inputType, outputType); + NS_ENSURE_TRUE(SUCCEEDED(hr), false); + + mDecoder = decoder; + + return true; +} + +HRESULT +WMFAudioMFTManager::Input(MediaRawData* aSample) { + mLastInputTime = aSample->mTime; + return mDecoder->Input(aSample->Data(), uint32_t(aSample->Size()), + aSample->mTime.ToMicroseconds(), + aSample->mDuration.ToMicroseconds()); +} + +nsCString WMFAudioMFTManager::GetCodecName() const { + if (mStreamType == WMFStreamType::AAC) { + return "aac"_ns; + } else if (mStreamType == WMFStreamType::MP3) { + return "mp3"_ns; + } + return "unknown"_ns; +} + +HRESULT +WMFAudioMFTManager::UpdateOutputType() { + HRESULT hr; + + RefPtr type; + hr = mDecoder->GetOutputMediaType(type); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = type->GetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, &mAudioRate); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = type->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &mAudioChannels); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + uint32_t channelsMap; + hr = type->GetUINT32(MF_MT_AUDIO_CHANNEL_MASK, &channelsMap); + if (SUCCEEDED(hr)) { + mChannelsMap = channelsMap; + } else { + LOG("Unable to retrieve channel layout. Ignoring"); + mChannelsMap = AudioConfig::ChannelLayout::UNKNOWN_MAP; + } + + return S_OK; +} + +HRESULT +WMFAudioMFTManager::Output(int64_t aStreamOffset, RefPtr& aOutData) { + aOutData = nullptr; + RefPtr sample; + HRESULT hr; + int typeChangeCount = 0; + const auto oldAudioRate = mAudioRate; + while (true) { + hr = mDecoder->Output(&sample); + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + return hr; + } + if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { + hr = mDecoder->FindDecoderOutputType(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + hr = UpdateOutputType(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + // Catch infinite loops, but some decoders perform at least 2 stream + // changes on consecutive calls, so be permissive. + // 100 is arbitrarily > 2. + NS_ENSURE_TRUE(typeChangeCount < 100, MF_E_TRANSFORM_STREAM_CHANGE); + ++typeChangeCount; + continue; + } + break; + } + + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (!sample) { + LOG("Audio MFTDecoder returned success but null output."); + return E_FAIL; + } + + UINT32 discontinuity = false; + sample->GetUINT32(MFSampleExtension_Discontinuity, &discontinuity); + if (mFirstFrame || discontinuity) { + // Update the output type, in case this segment has a different + // rate. This also triggers on the first sample, which can have a + // different rate than is advertised in the container, and sometimes we + // don't get a MF_E_TRANSFORM_STREAM_CHANGE when the rate changes. + hr = UpdateOutputType(); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + mFirstFrame = false; + } + + LONGLONG hns; + hr = sample->GetSampleTime(&hns); + if (FAILED(hr)) { + return E_FAIL; + } + TimeUnit pts = TimeUnit::FromHns(hns, mAudioRate); + NS_ENSURE_TRUE(pts.IsValid(), E_FAIL); + + RefPtr buffer; + hr = sample->ConvertToContiguousBuffer(getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we + // don't need to free it. + DWORD maxLength = 0, currentLength = 0; + hr = buffer->Lock(&data, &maxLength, ¤tLength); + ScopeExit exit([buffer] { buffer->Unlock(); }); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + // Output is made of floats. + int32_t numSamples = currentLength / sizeof(float); + int32_t numFrames = numSamples / mAudioChannels; + MOZ_ASSERT(numFrames >= 0); + MOZ_ASSERT(numSamples >= 0); + if (numFrames == 0) { + // All data from this chunk stripped, loop back and try to output the next + // frame, if possible. + return S_OK; + } + + if (oldAudioRate != mAudioRate) { + LOG("Audio rate changed from %" PRIu32 " to %" PRIu32, oldAudioRate, + mAudioRate); + } + + AlignedAudioBuffer audioData(numSamples); + if (!audioData) { + return E_OUTOFMEMORY; + } + + float* floatData = reinterpret_cast(data); + PodCopy(audioData.Data(), floatData, numSamples); + + TimeUnit duration(numFrames, mAudioRate); + NS_ENSURE_TRUE(duration.IsValid(), E_FAIL); + + const bool isAudioRateChangedToHigher = oldAudioRate < mAudioRate; + if (IsPartialOutput(duration, isAudioRateChangedToHigher)) { + LOG("Encounter a partial frame?! duration shrinks from %s to %s", + mLastOutputDuration.ToString().get(), duration.ToString().get()); + return MF_E_TRANSFORM_NEED_MORE_INPUT; + } + + aOutData = new AudioData(aStreamOffset, pts, std::move(audioData), + mAudioChannels, mAudioRate, mChannelsMap); + MOZ_DIAGNOSTIC_ASSERT(duration == aOutData->mDuration, "must be equal"); + mLastOutputDuration = aOutData->mDuration; + +#ifdef LOG_SAMPLE_DECODE + LOG("Decoded audio sample! timestamp=%lld duration=%lld currentLength=%u", + pts.ToMicroseconds(), duration.ToMicroseconds(), currentLength); +#endif + + return S_OK; +} + +bool WMFAudioMFTManager::IsPartialOutput( + const media::TimeUnit& aNewOutputDuration, + const bool aIsRateChangedToHigher) const { + // This issue was found in Windows11, where AAC MFT decoder would incorrectly + // output partial output samples to us, even if MS's documentation said it + // won't happen [1]. More details are described in bug 1731430 comment 26. + // If the audio rate isn't changed to higher, which would result in shorter + // duration, but the new output duration is still shorter than the last one, + // then new output is possible an incorrect partial output. + // [1] + // https://docs.microsoft.com/en-us/windows/win32/medfound/mft-message-command-drain + if (mStreamType != WMFStreamType::AAC) { + return false; + } + if (mLastOutputDuration > aNewOutputDuration && !aIsRateChangedToHigher) { + return true; + } + return false; +} + +void WMFAudioMFTManager::Shutdown() { mDecoder = nullptr; } + +} // namespace mozilla + +#undef LOG diff --git a/dom/media/platforms/wmf/WMFAudioMFTManager.h b/dom/media/platforms/wmf/WMFAudioMFTManager.h new file mode 100644 index 0000000000..b5dc379396 --- /dev/null +++ b/dom/media/platforms/wmf/WMFAudioMFTManager.h @@ -0,0 +1,69 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(WMFAudioOutputSource_h_) +# define WMFAudioOutputSource_h_ + +# include "MFTDecoder.h" +# include "WMF.h" +# include "WMFDecoderModule.h" +# include "WMFMediaDataDecoder.h" +# include "mozilla/RefPtr.h" + +namespace mozilla { + +class WMFAudioMFTManager : public MFTManager { + public: + explicit WMFAudioMFTManager(const AudioInfo& aConfig); + ~WMFAudioMFTManager(); + + bool Init(); + + HRESULT Input(MediaRawData* aSample) override; + + // Note WMF's AAC decoder sometimes output negatively timestamped samples, + // presumably they're the preroll samples, and we strip them. We may return + // a null aOutput in this case. + HRESULT Output(int64_t aStreamOffset, RefPtr& aOutput) override; + + void Shutdown() override; + + TrackInfo::TrackType GetType() override { return TrackInfo::kAudioTrack; } + + nsCString GetDescriptionName() const override { + return "wmf audio decoder"_ns; + } + + nsCString GetCodecName() const override; + + private: + HRESULT UpdateOutputType(); + + bool IsPartialOutput(const media::TimeUnit& aNewOutputDuration, + const bool aIsRateChangedToHigher) const; + + uint32_t mAudioChannels; + AudioConfig::ChannelLayout::ChannelMap mChannelsMap; + uint32_t mAudioRate; + nsTArray mUserData; + + WMFStreamType mStreamType; + + const GUID& GetMediaSubtypeGUID(); + + media::TimeUnit mLastInputTime = media::TimeUnit::Zero(); + media::TimeUnit mLastOutputDuration = media::TimeUnit::Zero(); + + bool mFirstFrame = true; + + uint64_t mTotalMediaFrames = 0; + uint32_t mEncoderDelay = 0; + uint32_t mRemainingEncoderDelay = 0; +}; + +} // namespace mozilla + +#endif // WMFAudioOutputSource_h_ diff --git a/dom/media/platforms/wmf/WMFDataEncoderUtils.h b/dom/media/platforms/wmf/WMFDataEncoderUtils.h new file mode 100644 index 0000000000..7472827b49 --- /dev/null +++ b/dom/media/platforms/wmf/WMFDataEncoderUtils.h @@ -0,0 +1,154 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "WMFMediaDataEncoder.h" + +#include "AnnexB.h" +#include "H264.h" +#include "libyuv.h" +#include "mozilla/Logging.h" +#include "mozilla/mscom/EnsureMTA.h" + +#define WMF_ENC_LOGD(arg, ...) \ + MOZ_LOG( \ + mozilla::sPEMLog, mozilla::LogLevel::Debug, \ + ("WMFMediaDataEncoder(0x%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) +#define WMF_ENC_LOGE(arg, ...) \ + MOZ_LOG( \ + mozilla::sPEMLog, mozilla::LogLevel::Error, \ + ("WMFMediaDataEncoder(0x%p)::%s: " arg, this, __func__, ##__VA_ARGS__)) + +namespace mozilla { + +extern LazyLogModule sPEMLog; + +static const GUID CodecToSubtype(CodecType aCodec) { + switch (aCodec) { + case CodecType::H264: + return MFVideoFormat_H264; + case CodecType::VP8: + return MFVideoFormat_VP80; + case CodecType::VP9: + return MFVideoFormat_VP90; + default: + MOZ_ASSERT(false, "Unsupported codec"); + return GUID_NULL; + } +} + +bool CanCreateWMFEncoder(CodecType aCodec) { + bool canCreate = false; + mscom::EnsureMTA([&]() { + if (!wmf::MediaFoundationInitializer::HasInitialized()) { + return; + } + // Try HW encoder first. + auto enc = MakeRefPtr(false /* HW not allowed */); + canCreate = SUCCEEDED(enc->Create(CodecToSubtype(aCodec))); + if (!canCreate) { + // Try SW encoder. + enc = MakeRefPtr(true /* HW not allowed */); + canCreate = SUCCEEDED(enc->Create(CodecToSubtype(aCodec))); + } + }); + return canCreate; +} + +static already_AddRefed ParseH264Parameters( + nsTArray& aHeader, const bool aAsAnnexB) { + size_t length = aHeader.Length(); + auto annexB = MakeRefPtr(length); + PodCopy(annexB->Elements(), aHeader.Elements(), length); + annexB->SetLength(length); + if (aAsAnnexB) { + return annexB.forget(); + } + + // Convert to avcC. + nsTArray paramSets; + AnnexB::ParseNALEntries( + Span(annexB->Elements(), annexB->Length()), paramSets); + + auto avcc = MakeRefPtr(); + AnnexB::NALEntry& sps = paramSets.ElementAt(0); + AnnexB::NALEntry& pps = paramSets.ElementAt(1); + const uint8_t* spsPtr = annexB->Elements() + sps.mOffset; + H264::WriteExtraData( + avcc, spsPtr[1], spsPtr[2], spsPtr[3], + Span(spsPtr, sps.mSize), + Span(annexB->Elements() + pps.mOffset, pps.mSize)); + return avcc.forget(); +} + +static uint32_t GetProfile(H264_PROFILE aProfileLevel) { + switch (aProfileLevel) { + case H264_PROFILE_BASE: + return eAVEncH264VProfile_Base; + case H264_PROFILE_MAIN: + return eAVEncH264VProfile_Main; + default: + return eAVEncH264VProfile_unknown; + } +} + +already_AddRefed CreateInputType(EncoderConfig& aConfig) { + RefPtr type; + return SUCCEEDED(wmf::MFCreateMediaType(getter_AddRefs(type))) && + SUCCEEDED( + type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) && + SUCCEEDED(type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12)) && + SUCCEEDED(type->SetUINT32(MF_MT_INTERLACE_MODE, + MFVideoInterlace_Progressive)) && + SUCCEEDED(MFSetAttributeRatio(type, MF_MT_FRAME_RATE, + aConfig.mFramerate, 1)) && + SUCCEEDED(MFSetAttributeSize(type, MF_MT_FRAME_SIZE, + aConfig.mSize.width, + aConfig.mSize.height)) + ? type.forget() + : nullptr; +} + +already_AddRefed CreateOutputType(EncoderConfig& aConfig) { + RefPtr type; + if (FAILED(wmf::MFCreateMediaType(getter_AddRefs(type))) || + FAILED(type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) || + FAILED(type->SetGUID(MF_MT_SUBTYPE, CodecToSubtype(aConfig.mCodec))) || + FAILED(type->SetUINT32(MF_MT_AVG_BITRATE, aConfig.mBitrate)) || + FAILED(type->SetUINT32(MF_MT_INTERLACE_MODE, + MFVideoInterlace_Progressive)) || + FAILED( + MFSetAttributeRatio(type, MF_MT_FRAME_RATE, aConfig.mFramerate, 1)) || + FAILED(MFSetAttributeSize(type, MF_MT_FRAME_SIZE, aConfig.mSize.width, + aConfig.mSize.height))) { + return nullptr; + } + if (aConfig.mCodecSpecific) { + if (aConfig.mCodecSpecific->is()) { + if (FAILED(type->SetUINT32( + MF_MT_MPEG2_PROFILE, + GetProfile( + aConfig.mCodecSpecific->as().mProfile)))) { + return nullptr; + } + } + } + + return type.forget(); +} + +HRESULT SetMediaTypes(RefPtr& aEncoder, EncoderConfig& aConfig) { + RefPtr inputType = CreateInputType(aConfig); + if (!inputType) { + return E_FAIL; + } + + RefPtr outputType = CreateOutputType(aConfig); + if (!outputType) { + return E_FAIL; + } + + return aEncoder->SetMediaTypes(inputType, outputType); +} + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/WMFDecoderModule.cpp b/dom/media/platforms/wmf/WMFDecoderModule.cpp new file mode 100644 index 0000000000..b3aae1e750 --- /dev/null +++ b/dom/media/platforms/wmf/WMFDecoderModule.cpp @@ -0,0 +1,492 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "WMFDecoderModule.h" + +#include +#include + +#include "DriverCrashGuard.h" +#include "GfxDriverInfo.h" +#include "MFTDecoder.h" +#include "MP4Decoder.h" +#include "MediaInfo.h" +#include "PDMFactory.h" +#include "VPXDecoder.h" +#include "WMFAudioMFTManager.h" +#include "WMFMediaDataDecoder.h" +#include "WMFVideoMFTManager.h" +#include "mozilla/DebugOnly.h" +#include "mozilla/Maybe.h" +#include "mozilla/StaticMutex.h" +#include "mozilla/StaticPrefs_media.h" +#include "mozilla/SyncRunnable.h" +#include "mozilla/WindowsVersion.h" +#include "mozilla/gfx/gfxVars.h" +#include "mozilla/mscom/EnsureMTA.h" +#include "mozilla/ProfilerMarkers.h" +#include "nsComponentManagerUtils.h" +#include "nsIXULRuntime.h" +#include "nsIXULRuntime.h" // for BrowserTabsRemoteAutostart +#include "nsServiceManagerUtils.h" +#include "nsWindowsHelpers.h" +#include "prsystem.h" + +#ifdef MOZ_AV1 +# include "AOMDecoder.h" +#endif + +#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__)) + +namespace mozilla { + +// Helper function to add a profile marker and log at the same time. +static void MOZ_FORMAT_PRINTF(2, 3) + WmfDecoderModuleMarkerAndLog(const ProfilerString8View& aMarkerTag, + const char* aFormat, ...) { + va_list ap; + va_start(ap, aFormat); + const nsVprintfCString markerString(aFormat, ap); + va_end(ap); + PROFILER_MARKER_TEXT(aMarkerTag, MEDIA_PLAYBACK, {}, markerString); + LOG("%s", markerString.get()); +} + +static const GUID CLSID_CMSAACDecMFT = { + 0x32D186A7, + 0x218F, + 0x4C75, + {0x88, 0x76, 0xDD, 0x77, 0x27, 0x3A, 0x89, 0x99}}; + +static Atomic sDXVAEnabled(false); + +/* static */ +already_AddRefed WMFDecoderModule::Create() { + RefPtr wmf = new WMFDecoderModule(); + return wmf.forget(); +} + +static bool IsRemoteAcceleratedCompositor( + layers::KnowsCompositor* aKnowsCompositor) { + if (!aKnowsCompositor) { + return false; + } + + if (aKnowsCompositor->UsingSoftwareWebRenderD3D11()) { + return true; + } + + layers::TextureFactoryIdentifier ident = + aKnowsCompositor->GetTextureFactoryIdentifier(); + return !aKnowsCompositor->UsingSoftwareWebRender() && + ident.mParentProcessType == GeckoProcessType_GPU; +} + +static Atomic sSupportedTypesInitialized(false); +static EnumSet sSupportedTypes; +static EnumSet sLackOfExtensionTypes; + +/* static */ +void WMFDecoderModule::Init(Config aConfig) { + MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread()); + if (XRE_IsContentProcess()) { + // If we're in the content process and the UseGPUDecoder pref is set, it + // means that we've given up on the GPU process (it's been crashing) so we + // should disable DXVA + sDXVAEnabled = !StaticPrefs::media_gpu_process_decoder(); + } else if (XRE_IsGPUProcess()) { + // Always allow DXVA in the GPU process. + sDXVAEnabled = true; + if (aConfig == Config::ForceEnableHEVC) { + WmfDecoderModuleMarkerAndLog( + "ReportHardwareSupport", + "Enable HEVC for reporting hardware support telemetry"); + sForceEnableHEVC = true; + } else { + sForceEnableHEVC = false; + } + } else if (XRE_IsRDDProcess()) { + // Hardware accelerated decoding is explicitly only done in the GPU process + // to avoid copying textures whenever possible. Previously, detecting + // whether the video bridge was set up could be done with the following: + // sDXVAEnabled = !!DeviceManagerDx::Get()->GetImageDevice(); + // The video bridge was previously broken due to initialization order + // issues. For more information see Bug 1763880. + sDXVAEnabled = false; + } else { + // Only allow DXVA in the UI process if we aren't in e10s Firefox + sDXVAEnabled = !mozilla::BrowserTabsRemoteAutostart(); + } + + // We have heavy logging below to help diagnose issue around hardware + // decoding failures. Due to these failures often relating to driver level + // problems they're hard to nail down, so we want lots of info. We may be + // able to relax this in future if we're not seeing such problems (see bug + // 1673007 for references to the bugs motivating this). + bool hwVideo = gfx::gfxVars::GetCanUseHardwareVideoDecodingOrDefault(); + WmfDecoderModuleMarkerAndLog( + "WMFInit DXVA Status", + "sDXVAEnabled: %s, CanUseHardwareVideoDecoding: %s", + sDXVAEnabled ? "true" : "false", hwVideo ? "true" : "false"); + sDXVAEnabled = sDXVAEnabled && hwVideo; + + mozilla::mscom::EnsureMTA([&]() { + // Store the supported MFT decoders. + sSupportedTypes.clear(); + sLackOfExtensionTypes.clear(); + // i = 1 to skip Unknown. + for (uint32_t i = 1; i < static_cast(WMFStreamType::SENTINEL); + i++) { + WMFStreamType type = static_cast(i); + RefPtr decoder = new MFTDecoder(); + HRESULT hr = CreateMFTDecoder(type, decoder); + if (SUCCEEDED(hr)) { + sSupportedTypes += type; + WmfDecoderModuleMarkerAndLog("WMFInit Decoder Supported", + "%s is enabled", StreamTypeToString(type)); + } else if (hr != E_FAIL) { + // E_FAIL should be logged by CreateMFTDecoder. Skipping those codes + // will help to keep the logs readable. + WmfDecoderModuleMarkerAndLog("WMFInit Decoder Failed", + "%s failed with code 0x%lx", + StreamTypeToString(type), hr); + if (hr == WINCODEC_ERR_COMPONENTNOTFOUND && + type == WMFStreamType::AV1) { + WmfDecoderModuleMarkerAndLog("No AV1 extension", + "Lacking of AV1 extension"); + sLackOfExtensionTypes += type; + } + } + } + }); + + sSupportedTypesInitialized = true; + + WmfDecoderModuleMarkerAndLog("WMFInit Result", + "WMFDecoderModule::Init finishing"); +} + +/* static */ +int WMFDecoderModule::GetNumDecoderThreads() { + int32_t numCores = PR_GetNumberOfProcessors(); + + // If we have more than 4 cores, let the decoder decide how many threads. + // On an 8 core machine, WMF chooses 4 decoder threads. + static const int WMF_DECODER_DEFAULT = -1; + if (numCores > 4) { + return WMF_DECODER_DEFAULT; + } + return std::max(numCores - 1, 1); +} + +/* static */ +HRESULT WMFDecoderModule::CreateMFTDecoder(const WMFStreamType& aType, + RefPtr& aDecoder) { + // Do not expose any video decoder on utility process which is only for audio + // decoding. + if (XRE_IsUtilityProcess()) { + switch (aType) { + case WMFStreamType::H264: + case WMFStreamType::VP8: + case WMFStreamType::VP9: + case WMFStreamType::AV1: + case WMFStreamType::HEVC: + return E_FAIL; + default: + break; + } + } + + switch (aType) { + case WMFStreamType::H264: + return aDecoder->Create(CLSID_CMSH264DecoderMFT); + case WMFStreamType::VP8: + static const uint32_t VP8_USABLE_BUILD = 16287; + if (!IsWindows10BuildOrLater(VP8_USABLE_BUILD)) { + WmfDecoderModuleMarkerAndLog("CreateMFTDecoder, VP8 Failure", + "VP8 MFT requires Windows build %" PRId32 + " or later", + VP8_USABLE_BUILD); + return E_FAIL; + } + if (!gfx::gfxVars::UseVP8HwDecode()) { + WmfDecoderModuleMarkerAndLog("CreateMFTDecoder, VP8 Failure", + "Gfx VP8 blocklist"); + return E_FAIL; + } + [[fallthrough]]; + case WMFStreamType::VP9: + if (!sDXVAEnabled) { + WmfDecoderModuleMarkerAndLog("CreateMFTDecoder, VPx Disabled", + "%s MFT requires DXVA", + StreamTypeToString(aType)); + return E_FAIL; + } + + { + gfx::WMFVPXVideoCrashGuard guard; + if (guard.Crashed()) { + WmfDecoderModuleMarkerAndLog( + "CreateMFTDecoder, VPx Failure", + "Will not use VPx MFT due to crash guard reporting a crash"); + return E_FAIL; + } + return aDecoder->Create(CLSID_CMSVPXDecMFT); + } +#ifdef MOZ_AV1 + case WMFStreamType::AV1: + // If this process cannot use DXVA, the AV1 decoder will not be used. + // Also, upon startup, init will be called both before and after + // layers acceleration is setup. This prevents creating the AV1 decoder + // twice. + if (!sDXVAEnabled) { + WmfDecoderModuleMarkerAndLog("CreateMFTDecoder AV1 Disabled", + "AV1 MFT requires DXVA"); + return E_FAIL; + } + // TODO: MFTEnumEx is slower than creating by CLSID, it may be worth + // investigating other ways to instantiate the AV1 decoder. + return aDecoder->Create(MFT_CATEGORY_VIDEO_DECODER, MFVideoFormat_AV1, + MFVideoFormat_NV12); +#endif + case WMFStreamType::HEVC: + if (!WMFDecoderModule::IsHEVCSupported() || !sDXVAEnabled) { + return E_FAIL; + } + return aDecoder->Create(MFT_CATEGORY_VIDEO_DECODER, MFVideoFormat_HEVC, + MFVideoFormat_NV12); + case WMFStreamType::MP3: + return aDecoder->Create(CLSID_CMP3DecMediaObject); + case WMFStreamType::AAC: + return aDecoder->Create(CLSID_CMSAACDecMFT); + default: + return E_FAIL; + } +} + +/* static */ +bool WMFDecoderModule::CanCreateMFTDecoder(const WMFStreamType& aType) { + MOZ_ASSERT(WMFStreamType::Unknown < aType && aType < WMFStreamType::SENTINEL); + if (!sSupportedTypesInitialized) { + if (NS_IsMainThread()) { + Init(); + } else { + nsCOMPtr runnable = + NS_NewRunnableFunction("WMFDecoderModule::Init", [&]() { Init(); }); + SyncRunnable::DispatchToThread(GetMainThreadSerialEventTarget(), + runnable); + } + } + + // Check prefs here rather than CreateMFTDecoder so that prefs aren't baked + // into sSupportedTypes + switch (aType) { + case WMFStreamType::VP8: + case WMFStreamType::VP9: + if (!StaticPrefs::media_wmf_vp9_enabled()) { + return false; + } + break; +#ifdef MOZ_AV1 + case WMFStreamType::AV1: + if (!StaticPrefs::media_av1_enabled() || + !StaticPrefs::media_wmf_av1_enabled()) { + return false; + } + break; +#endif + case WMFStreamType::HEVC: + if (!WMFDecoderModule::IsHEVCSupported()) { + return false; + } + break; + // Always use ffvpx for mp3 + case WMFStreamType::MP3: + return false; + default: + break; + } + + // Do not expose any video decoder on utility process which is only for audio + // decoding. + if (XRE_IsUtilityProcess()) { + switch (aType) { + case WMFStreamType::H264: + case WMFStreamType::VP8: + case WMFStreamType::VP9: + case WMFStreamType::AV1: + case WMFStreamType::HEVC: + return false; + default: + break; + } + } + + return sSupportedTypes.contains(aType); +} + +bool WMFDecoderModule::SupportsColorDepth( + gfx::ColorDepth aColorDepth, DecoderDoctorDiagnostics* aDiagnostics) const { + // Color depth support can be determined by creating DX decoders. + return true; +} + +media::DecodeSupportSet WMFDecoderModule::Supports( + const SupportDecoderParams& aParams, + DecoderDoctorDiagnostics* aDiagnostics) const { + // This should only be supported by MFMediaEngineDecoderModule. + if (aParams.mMediaEngineId) { + return media::DecodeSupportSet{}; + } + // In GPU process, only support decoding if video. This only gives a hint of + // what the GPU decoder *may* support. The actual check will occur in + // CreateVideoDecoder. + const auto& trackInfo = aParams.mConfig; + if (XRE_IsGPUProcess() && !trackInfo.GetAsVideoInfo()) { + return media::DecodeSupportSet{}; + } + + const auto* videoInfo = trackInfo.GetAsVideoInfo(); + // Temporary - forces use of VPXDecoder when alpha is present. + // Bug 1263836 will handle alpha scenario once implemented. It will shift + // the check for alpha to PDMFactory but not itself remove the need for a + // check. + if (videoInfo && (!SupportsColorDepth(videoInfo->mColorDepth, aDiagnostics) || + videoInfo->HasAlpha())) { + return media::DecodeSupportSet{}; + } + + if (videoInfo && VPXDecoder::IsVP9(aParams.MimeType()) && + aParams.mOptions.contains(CreateDecoderParams::Option::LowLatency)) { + // SVC layers are unsupported, and may be used in low latency use cases + // (WebRTC). + return media::DecodeSupportSet{}; + } + + WMFStreamType type = GetStreamTypeFromMimeType(aParams.MimeType()); + if (type == WMFStreamType::Unknown) { + return media::DecodeSupportSet{}; + } + + if (CanCreateMFTDecoder(type)) { + if (StreamTypeIsVideo(type)) { + return sDXVAEnabled ? media::DecodeSupport::HardwareDecode + : media::DecodeSupport::SoftwareDecode; + } else { + // Audio only supports software decode + return media::DecodeSupport::SoftwareDecode; + } + } + return sLackOfExtensionTypes.contains(type) + ? media::DecodeSupport::UnsureDueToLackOfExtension + : media::DecodeSupportSet{}; +} + +nsresult WMFDecoderModule::Startup() { + return wmf::MediaFoundationInitializer::HasInitialized() ? NS_OK + : NS_ERROR_FAILURE; +} + +already_AddRefed WMFDecoderModule::CreateVideoDecoder( + const CreateDecoderParams& aParams) { + // In GPU process, only support decoding if an accelerated compositor is + // known. + if (XRE_IsGPUProcess() && + !IsRemoteAcceleratedCompositor(aParams.mKnowsCompositor)) { + return nullptr; + } + + UniquePtr manager(new WMFVideoMFTManager( + aParams.VideoConfig(), aParams.mKnowsCompositor, aParams.mImageContainer, + aParams.mRate.mValue, aParams.mOptions, sDXVAEnabled, + aParams.mTrackingId)); + + MediaResult result = manager->Init(); + if (NS_FAILED(result)) { + if (aParams.mError) { + *aParams.mError = result; + } + WmfDecoderModuleMarkerAndLog( + "WMFVDecoderCreation Failure", + "WMFDecoderModule::CreateVideoDecoder failed for manager with " + "description %s with result: %s", + manager->GetDescriptionName().get(), result.Description().get()); + return nullptr; + } + + nsAutoCString hwFailure; + if (!manager->IsHardwareAccelerated(hwFailure)) { + // The decoder description includes whether it is using software or + // hardware, but no information about how the hardware acceleration failed. + WmfDecoderModuleMarkerAndLog( + "WMFVDecoderCreation Success", + "WMFDecoderModule::CreateVideoDecoder success for manager with " + "description %s - DXVA failure: %s", + manager->GetDescriptionName().get(), hwFailure.get()); + } else { + WmfDecoderModuleMarkerAndLog( + "WMFVDecoderCreation Success", + "WMFDecoderModule::CreateVideoDecoder success for manager with " + "description %s", + manager->GetDescriptionName().get()); + } + + RefPtr decoder = new WMFMediaDataDecoder(manager.release()); + return decoder.forget(); +} + +already_AddRefed WMFDecoderModule::CreateAudioDecoder( + const CreateDecoderParams& aParams) { + if (XRE_IsGPUProcess()) { + // Only allow video in the GPU process. + return nullptr; + } + + UniquePtr manager( + new WMFAudioMFTManager(aParams.AudioConfig())); + + if (!manager->Init()) { + WmfDecoderModuleMarkerAndLog( + "WMFADecoderCreation Failure", + "WMFDecoderModule::CreateAudioDecoder failed for manager with " + "description %s", + manager->GetDescriptionName().get()); + return nullptr; + } + + WmfDecoderModuleMarkerAndLog( + "WMFADecoderCreation Success", + "WMFDecoderModule::CreateAudioDecoder success for manager with " + "description %s", + manager->GetDescriptionName().get()); + + RefPtr decoder = new WMFMediaDataDecoder(manager.release()); + return decoder.forget(); +} + +media::DecodeSupportSet WMFDecoderModule::SupportsMimeType( + const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const { + UniquePtr trackInfo = CreateTrackInfoWithMIMEType(aMimeType); + if (!trackInfo) { + return media::DecodeSupportSet{}; + } + auto supports = Supports(SupportDecoderParams(*trackInfo), aDiagnostics); + MOZ_LOG( + sPDMLog, LogLevel::Debug, + ("WMF decoder %s requested type '%s'", + !supports.isEmpty() ? "supports" : "rejects", aMimeType.BeginReading())); + return supports; +} + +/* static */ +bool WMFDecoderModule::IsHEVCSupported() { + return sForceEnableHEVC || StaticPrefs::media_wmf_hevc_enabled() == 1; +} + +} // namespace mozilla + +#undef WFM_DECODER_MODULE_STATUS_MARKER +#undef LOG diff --git a/dom/media/platforms/wmf/WMFDecoderModule.h b/dom/media/platforms/wmf/WMFDecoderModule.h new file mode 100644 index 0000000000..3b130fd657 --- /dev/null +++ b/dom/media/platforms/wmf/WMFDecoderModule.h @@ -0,0 +1,70 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(WMFPlatformDecoderModule_h_) +# define WMFPlatformDecoderModule_h_ + +# include "PlatformDecoderModule.h" +# include "WMF.h" +# include "WMFUtils.h" + +namespace mozilla { + +class MFTDecoder; + +class WMFDecoderModule : public PlatformDecoderModule { + public: + static already_AddRefed Create(); + + // Initializes the module, loads required dynamic libraries, etc. + nsresult Startup() override; + + already_AddRefed CreateVideoDecoder( + const CreateDecoderParams& aParams) override; + + already_AddRefed CreateAudioDecoder( + const CreateDecoderParams& aParams) override; + + bool SupportsColorDepth( + gfx::ColorDepth aColorDepth, + DecoderDoctorDiagnostics* aDiagnostics) const override; + media::DecodeSupportSet SupportsMimeType( + const nsACString& aMimeType, + DecoderDoctorDiagnostics* aDiagnostics) const override; + media::DecodeSupportSet Supports( + const SupportDecoderParams& aParams, + DecoderDoctorDiagnostics* aDiagnostics) const override; + + enum class Config { + None, + ForceEnableHEVC, + }; + + // Called on main thread. + static void Init(Config aConfig = Config::None); + + // Called from any thread, must call init first + static int GetNumDecoderThreads(); + + static HRESULT CreateMFTDecoder(const WMFStreamType& aType, + RefPtr& aDecoder); + static bool CanCreateMFTDecoder(const WMFStreamType& aType); + + private: + // This is used for GPU process only, where we can't set the preference + // directly (it can only set in the parent process) So we need a way to force + // enable the HEVC in order to report the support information via telemetry. + static inline bool sForceEnableHEVC = false; + + static bool IsHEVCSupported(); + + WMFDecoderModule() = default; + virtual ~WMFDecoderModule() = default; +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/platforms/wmf/WMFEncoderModule.cpp b/dom/media/platforms/wmf/WMFEncoderModule.cpp new file mode 100644 index 0000000000..f9f35db653 --- /dev/null +++ b/dom/media/platforms/wmf/WMFEncoderModule.cpp @@ -0,0 +1,32 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "WMFEncoderModule.h" + +#include "WMFMediaDataEncoder.h" + +namespace mozilla { +extern LazyLogModule sPEMLog; + +bool WMFEncoderModule::SupportsCodec(CodecType aCodecType) const { + return CanCreateWMFEncoder(aCodecType); +} + +bool WMFEncoderModule::Supports(const EncoderConfig& aConfig) const { + if (!CanLikelyEncode(aConfig)) { + return false; + } + return SupportsCodec(aConfig.mCodec); +} + +already_AddRefed WMFEncoderModule::CreateVideoEncoder( + const EncoderConfig& aConfig, const RefPtr& aTaskQueue) const { + RefPtr encoder( + new WMFMediaDataEncoder(aConfig, aTaskQueue)); + return encoder.forget(); +} + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/WMFEncoderModule.h b/dom/media/platforms/wmf/WMFEncoderModule.h new file mode 100644 index 0000000000..7b8e0db465 --- /dev/null +++ b/dom/media/platforms/wmf/WMFEncoderModule.h @@ -0,0 +1,27 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef WMFEncoderModule_h_ +#define WMFEncoderModule_h_ + +#include "PlatformEncoderModule.h" + +namespace mozilla { +class WMFEncoderModule final : public PlatformEncoderModule { + public: + virtual bool Supports(const EncoderConfig& aConfig) const override; + virtual bool SupportsCodec(CodecType aCodec) const override; + + const char* GetName() const override { return "WMF Encoder Module"; } + + already_AddRefed CreateVideoEncoder( + const EncoderConfig& aConfig, + const RefPtr& aTaskQueue) const override; +}; + +} // namespace mozilla + +#endif /* WMFEncoderModule_h_ */ diff --git a/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp b/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp new file mode 100644 index 0000000000..78fd7b50dd --- /dev/null +++ b/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp @@ -0,0 +1,272 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "WMFMediaDataDecoder.h" + +#include "VideoUtils.h" +#include "WMFUtils.h" +#include "mozilla/Logging.h" +#include "mozilla/ProfilerMarkers.h" +#include "mozilla/SyncRunnable.h" +#include "mozilla/TaskQueue.h" +#include "mozilla/Telemetry.h" +#include "nsTArray.h" + +#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__)) + +namespace mozilla { + +WMFMediaDataDecoder::WMFMediaDataDecoder(MFTManager* aMFTManager) + : mTaskQueue(TaskQueue::Create( + GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER), + "WMFMediaDataDecoder")), + mMFTManager(aMFTManager) {} + +WMFMediaDataDecoder::~WMFMediaDataDecoder() {} + +RefPtr WMFMediaDataDecoder::Init() { + MOZ_ASSERT(!mIsShutDown); + return InitPromise::CreateAndResolve(mMFTManager->GetType(), __func__); +} + +RefPtr WMFMediaDataDecoder::Shutdown() { + MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown); + mIsShutDown = true; + + return InvokeAsync(mTaskQueue, __func__, [self = RefPtr{this}, this] { + if (mMFTManager) { + mMFTManager->Shutdown(); + mMFTManager = nullptr; + } + return mTaskQueue->BeginShutdown(); + }); +} + +// Inserts data into the decoder's pipeline. +RefPtr WMFMediaDataDecoder::Decode( + MediaRawData* aSample) { + MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown); + + return InvokeAsync( + mTaskQueue, this, __func__, &WMFMediaDataDecoder::ProcessDecode, aSample); +} + +RefPtr WMFMediaDataDecoder::ProcessError( + HRESULT aError, const char* aReason) { + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + + nsPrintfCString markerString( + "WMFMediaDataDecoder::ProcessError for decoder with description %s with " + "reason: %s", + GetDescriptionName().get(), aReason); + LOG("%s", markerString.get()); + PROFILER_MARKER_TEXT("WMFDecoder Error", MEDIA_PLAYBACK, {}, markerString); + + // TODO: For the error DXGI_ERROR_DEVICE_RESET, we could return + // NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER to get the latest device. Maybe retry + // up to 3 times. + return DecodePromise::CreateAndReject( + MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, + RESULT_DETAIL("%s:%lx", aReason, aError)), + __func__); +} + +RefPtr WMFMediaDataDecoder::ProcessDecode( + MediaRawData* aSample) { + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + DecodedData results; + LOG("ProcessDecode, type=%s, sample=%" PRId64, + TrackTypeToStr(mMFTManager->GetType()), aSample->mTime.ToMicroseconds()); + HRESULT hr = mMFTManager->Input(aSample); + if (hr == MF_E_NOTACCEPTING) { + hr = ProcessOutput(results); + if (FAILED(hr) && hr != MF_E_TRANSFORM_NEED_MORE_INPUT) { + return ProcessError(hr, "MFTManager::Output(1)"); + } + hr = mMFTManager->Input(aSample); + } + + if (FAILED(hr)) { + NS_WARNING("MFTManager rejected sample"); + return ProcessError(hr, "MFTManager::Input"); + } + + if (mOutputsCount == 0) { + mInputTimesSet.insert(aSample->mTime.ToMicroseconds()); + } + + if (!mLastTime || aSample->mTime > *mLastTime) { + mLastTime = Some(aSample->mTime); + mLastDuration = aSample->mDuration; + } + + mSamplesCount++; + mDrainStatus = DrainStatus::DRAINABLE; + mLastStreamOffset = aSample->mOffset; + + hr = ProcessOutput(results); + if (SUCCEEDED(hr) || hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + return DecodePromise::CreateAndResolve(std::move(results), __func__); + } + return ProcessError(hr, "MFTManager::Output(2)"); +} + +bool WMFMediaDataDecoder::ShouldGuardAgaintIncorrectFirstSample( + MediaData* aOutput) const { + // Incorrect first samples have only been observed in video tracks, so only + // guard video tracks. + if (mMFTManager->GetType() != TrackInfo::kVideoTrack) { + return false; + } + + // This is not the first output sample so we don't need to guard it. + if (mOutputsCount != 0) { + return false; + } + + // Output isn't in the map which contains the inputs we gave to the decoder. + // This is probably the invalid first sample. MFT decoder sometime will return + // incorrect first output to us, which always has 0 timestamp, even if the + // input we gave to MFT has timestamp that is way later than 0. + MOZ_ASSERT(!mInputTimesSet.empty()); + return mInputTimesSet.find(aOutput->mTime.ToMicroseconds()) == + mInputTimesSet.end() && + aOutput->mTime.ToMicroseconds() == 0; +} + +HRESULT +WMFMediaDataDecoder::ProcessOutput(DecodedData& aResults) { + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + RefPtr output; + HRESULT hr = S_OK; + while (SUCCEEDED(hr = mMFTManager->Output(mLastStreamOffset, output))) { + MOZ_ASSERT(output.get(), "Upon success, we must receive an output"); + if (ShouldGuardAgaintIncorrectFirstSample(output)) { + LOG("Discarding sample with time %" PRId64 + " because of ShouldGuardAgaintIncorrectFirstSample check", + output->mTime.ToMicroseconds()); + continue; + } + if (++mOutputsCount == 1) { + // Got first valid sample, don't need to guard following sample anymore. + mInputTimesSet.clear(); + } + aResults.AppendElement(std::move(output)); + if (mDrainStatus == DrainStatus::DRAINING) { + break; + } + } + return hr; +} + +RefPtr WMFMediaDataDecoder::ProcessFlush() { + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + if (mMFTManager) { + mMFTManager->Flush(); + } + LOG("ProcessFlush, type=%s", TrackTypeToStr(mMFTManager->GetType())); + mDrainStatus = DrainStatus::DRAINED; + mSamplesCount = 0; + mOutputsCount = 0; + mLastTime.reset(); + mInputTimesSet.clear(); + return FlushPromise::CreateAndResolve(true, __func__); +} + +RefPtr WMFMediaDataDecoder::Flush() { + MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown); + + return InvokeAsync(mTaskQueue, this, __func__, + &WMFMediaDataDecoder::ProcessFlush); +} + +RefPtr WMFMediaDataDecoder::ProcessDrain() { + MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); + if (!mMFTManager || mDrainStatus == DrainStatus::DRAINED) { + return DecodePromise::CreateAndResolve(DecodedData(), __func__); + } + + if (mDrainStatus != DrainStatus::DRAINING) { + // Order the decoder to drain... + mMFTManager->Drain(); + mDrainStatus = DrainStatus::DRAINING; + } + + // Then extract all available output. + DecodedData results; + HRESULT hr = ProcessOutput(results); + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + mDrainStatus = DrainStatus::DRAINED; + } + if (SUCCEEDED(hr) || hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + if (results.Length() > 0 && + results.LastElement()->mType == MediaData::Type::VIDEO_DATA) { + const RefPtr& data = results.LastElement(); + if (mSamplesCount == 1 && data->mTime == media::TimeUnit::Zero()) { + // WMF is unable to calculate a duration if only a single sample + // was parsed. Additionally, the pts always comes out at 0 under those + // circumstances. + // Seeing that we've only fed the decoder a single frame, the pts + // and duration are known, it's of the last sample. + data->mTime = *mLastTime; + } + if (data->mTime == *mLastTime) { + // The WMF Video decoder is sometimes unable to provide a valid duration + // on the last sample even if it has been first set through + // SetSampleTime (appears to always happen on Windows 7). So we force + // set the duration of the last sample as it was input. + data->mDuration = mLastDuration; + } + } else if (results.Length() == 1 && + results.LastElement()->mType == MediaData::Type::AUDIO_DATA) { + // When we drain the audio decoder and one frame was queued (such as with + // AAC) the MFT will re-calculate the starting time rather than use the + // value set on the IMF Sample. + // This is normally an okay thing to do; however when dealing with poorly + // muxed content that has incorrect start time, it could lead to broken + // A/V sync. So we ensure that we use the compressed sample's time + // instead. Additionally, this is what all other audio decoders are doing + // anyway. + MOZ_ASSERT(mLastTime, + "We must have attempted to decode at least one frame to get " + "one decoded output"); + results.LastElement()->As()->SetOriginalStartTime(*mLastTime); + } + return DecodePromise::CreateAndResolve(std::move(results), __func__); + } + return ProcessError(hr, "MFTManager::Output"); +} + +RefPtr WMFMediaDataDecoder::Drain() { + MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown); + + return InvokeAsync(mTaskQueue, this, __func__, + &WMFMediaDataDecoder::ProcessDrain); +} + +bool WMFMediaDataDecoder::IsHardwareAccelerated( + nsACString& aFailureReason) const { + MOZ_ASSERT(!mIsShutDown); + + return mMFTManager && mMFTManager->IsHardwareAccelerated(aFailureReason); +} + +void WMFMediaDataDecoder::SetSeekThreshold(const media::TimeUnit& aTime) { + MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown); + + RefPtr self = this; + nsCOMPtr runnable = NS_NewRunnableFunction( + "WMFMediaDataDecoder::SetSeekThreshold", [self, aTime]() { + MOZ_ASSERT(self->mTaskQueue->IsCurrentThreadIn()); + media::TimeUnit threshold = aTime; + self->mMFTManager->SetSeekThreshold(threshold); + }); + nsresult rv = mTaskQueue->Dispatch(runnable.forget()); + MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv)); + Unused << rv; +} + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/WMFMediaDataDecoder.h b/dom/media/platforms/wmf/WMFMediaDataDecoder.h new file mode 100644 index 0000000000..b344ba7b65 --- /dev/null +++ b/dom/media/platforms/wmf/WMFMediaDataDecoder.h @@ -0,0 +1,182 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(WMFMediaDataDecoder_h_) +# define WMFMediaDataDecoder_h_ + +# include + +# include "MFTDecoder.h" +# include "PlatformDecoderModule.h" +# include "WMF.h" +# include "mozilla/RefPtr.h" + +namespace mozilla { + +// Encapsulates the initialization of the MFTDecoder appropriate for decoding +// a given stream, and the process of converting the IMFSample produced +// by the MFT into a MediaData object. +class MFTManager { + public: + virtual ~MFTManager() {} + + // Submit a compressed sample for decoding. + // This should forward to the MFTDecoder after performing + // any required sample formatting. + virtual HRESULT Input(MediaRawData* aSample) = 0; + + // Produces decoded output, if possible. Blocks until output can be produced, + // or until no more is able to be produced. + // Returns S_OK on success, or MF_E_TRANSFORM_NEED_MORE_INPUT if there's not + // enough data to produce more output. If this returns a failure code other + // than MF_E_TRANSFORM_NEED_MORE_INPUT, an error will be reported to the + // MP4Reader. + virtual HRESULT Output(int64_t aStreamOffset, RefPtr& aOutput) = 0; + + virtual void Flush() { + mDecoder->Flush(); + mSeekTargetThreshold.reset(); + } + + void Drain() { + if (FAILED(mDecoder->SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0))) { + NS_WARNING("Failed to send DRAIN command to MFT"); + } + } + + // Destroys all resources. + virtual void Shutdown() = 0; + + virtual bool IsHardwareAccelerated(nsACString& aFailureReason) const { + return false; + } + + virtual TrackInfo::TrackType GetType() = 0; + + virtual nsCString GetDescriptionName() const = 0; + + virtual nsCString GetCodecName() const = 0; + + virtual void SetSeekThreshold(const media::TimeUnit& aTime) { + if (aTime.IsValid()) { + mSeekTargetThreshold = Some(aTime); + } else { + mSeekTargetThreshold.reset(); + } + } + + virtual bool HasSeekThreshold() const { + return mSeekTargetThreshold.isSome(); + } + + virtual MediaDataDecoder::ConversionRequired NeedsConversion() const { + return MediaDataDecoder::ConversionRequired::kNeedNone; + } + + protected: + // IMFTransform wrapper that performs the decoding. + RefPtr mDecoder; + + Maybe mSeekTargetThreshold; +}; + +DDLoggedTypeDeclNameAndBase(WMFMediaDataDecoder, MediaDataDecoder); + +// Decodes audio and video using Windows Media Foundation. Samples are decoded +// using the MFTDecoder created by the MFTManager. This class implements +// the higher-level logic that drives mapping the MFT to the async +// MediaDataDecoder interface. The specifics of decoding the exact stream +// type are handled by MFTManager and the MFTDecoder it creates. +class WMFMediaDataDecoder final + : public MediaDataDecoder, + public DecoderDoctorLifeLogger { + public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WMFMediaDataDecoder, final); + + explicit WMFMediaDataDecoder(MFTManager* aOutputSource); + + RefPtr Init() override; + + RefPtr Decode(MediaRawData* aSample) override; + + RefPtr Drain() override; + + RefPtr Flush() override; + + RefPtr Shutdown() override; + + bool IsHardwareAccelerated(nsACString& aFailureReason) const override; + + nsCString GetDescriptionName() const override { + return mMFTManager ? mMFTManager->GetDescriptionName() : "unknown"_ns; + } + + nsCString GetCodecName() const override { + return mMFTManager ? mMFTManager->GetCodecName() : ""_ns; + } + + ConversionRequired NeedsConversion() const override { + MOZ_ASSERT(mMFTManager); + return mMFTManager->NeedsConversion(); + } + + virtual void SetSeekThreshold(const media::TimeUnit& aTime) override; + + private: + ~WMFMediaDataDecoder(); + + RefPtr ProcessError(HRESULT aError, const char* aReason); + + // Called on the task queue. Inserts the sample into the decoder, and + // extracts output if available. + RefPtr ProcessDecode(MediaRawData* aSample); + + // Called on the task queue. Extracts output if available, and delivers + // it to the reader. Called after ProcessDecode() and ProcessDrain(). + HRESULT ProcessOutput(DecodedData& aResults); + + // Called on the task queue. Orders the MFT to flush. There is no output to + // extract. + RefPtr ProcessFlush(); + + // Called on the task queue. Orders the MFT to drain, and then extracts + // all available output. + RefPtr ProcessDrain(); + + // Checks if `aOutput` should be discarded (guarded against) because its a + // potentially invalid output from the decoder. This is done because the + // Windows decoder appears to produce invalid outputs under certain + // conditions. + bool ShouldGuardAgaintIncorrectFirstSample(MediaData* aOutput) const; + + const RefPtr mTaskQueue; + + UniquePtr mMFTManager; + + // The last offset into the media resource that was passed into Input(). + // This is used to approximate the decoder's position in the media resource. + int64_t mLastStreamOffset; + Maybe mLastTime; + media::TimeUnit mLastDuration; + // Before we get the first sample, this records the times of all samples we + // send to the decoder which is used to validate if the first sample is valid. + std::set mInputTimesSet; + int64_t mSamplesCount = 0; + int64_t mOutputsCount = 0; + + bool mIsShutDown = false; + + enum class DrainStatus { + DRAINED, + DRAINABLE, + DRAINING, + }; + DrainStatus mDrainStatus = DrainStatus::DRAINED; +}; + +} // namespace mozilla + +#endif // WMFMediaDataDecoder_h_ diff --git a/dom/media/platforms/wmf/WMFMediaDataEncoder.h b/dom/media/platforms/wmf/WMFMediaDataEncoder.h new file mode 100644 index 0000000000..13848b47ad --- /dev/null +++ b/dom/media/platforms/wmf/WMFMediaDataEncoder.h @@ -0,0 +1,347 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef WMFMediaDataEncoder_h_ +#define WMFMediaDataEncoder_h_ + +#include "ImageContainer.h" +#include "MFTEncoder.h" +#include "PlatformEncoderModule.h" +#include "TimeUnits.h" +#include "WMFDataEncoderUtils.h" +#include "WMFUtils.h" + +namespace mozilla { + +class WMFMediaDataEncoder final : public MediaDataEncoder { + public: + WMFMediaDataEncoder(const EncoderConfig& aConfig, + const RefPtr& aTaskQueue) + : mConfig(aConfig), + mTaskQueue(aTaskQueue), + mHardwareNotAllowed(aConfig.mHardwarePreference == + HardwarePreference::RequireSoftware || + aConfig.mHardwarePreference == + HardwarePreference::None) { + MOZ_ASSERT(mTaskQueue); + } + + RefPtr Init() override { + return InvokeAsync(mTaskQueue, this, __func__, + &WMFMediaDataEncoder::ProcessInit); + } + RefPtr Encode(const MediaData* aSample) override { + MOZ_ASSERT(aSample); + + RefPtr sample(aSample->As()); + + return InvokeAsync>( + mTaskQueue, this, __func__, &WMFMediaDataEncoder::ProcessEncode, + std::move(sample)); + } + RefPtr Drain() override { + return InvokeAsync( + mTaskQueue, __func__, [self = RefPtr(this)]() { + nsTArray> outputs; + return SUCCEEDED(self->mEncoder->Drain(outputs)) + ? self->ProcessOutputSamples(outputs) + : EncodePromise::CreateAndReject( + NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__); + }); + } + RefPtr Shutdown() override { + return InvokeAsync( + mTaskQueue, __func__, [self = RefPtr(this)]() { + if (self->mEncoder) { + self->mEncoder->Destroy(); + self->mEncoder = nullptr; + } + return ShutdownPromise::CreateAndResolve(true, __func__); + }); + } + RefPtr SetBitrate(uint32_t aBitsPerSec) override { + return InvokeAsync( + mTaskQueue, __func__, + [self = RefPtr(this), aBitsPerSec]() { + MOZ_ASSERT(self->mEncoder); + return SUCCEEDED(self->mEncoder->SetBitrate(aBitsPerSec)) + ? GenericPromise::CreateAndResolve(true, __func__) + : GenericPromise::CreateAndReject( + NS_ERROR_DOM_MEDIA_NOT_SUPPORTED_ERR, __func__); + }); + } + + RefPtr Reconfigure( + const RefPtr& aConfigurationChanges) + override { + // General reconfiguration interface not implemented right now + return MediaDataEncoder::ReconfigurationPromise::CreateAndReject( + NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__); + }; + + nsCString GetDescriptionName() const override { + return MFTEncoder::GetFriendlyName(CodecToSubtype(mConfig.mCodec)); + } + + private: + // Automatically lock/unlock IMFMediaBuffer. + class LockBuffer final { + public: + explicit LockBuffer(RefPtr& aBuffer) : mBuffer(aBuffer) { + mResult = mBuffer->Lock(&mBytes, &mCapacity, &mLength); + } + + ~LockBuffer() { + if (SUCCEEDED(mResult)) { + mBuffer->Unlock(); + } + } + + BYTE* Data() { return mBytes; } + DWORD Capacity() { return mCapacity; } + DWORD Length() { return mLength; } + HRESULT Result() { return mResult; } + + private: + RefPtr mBuffer; + BYTE* mBytes; + DWORD mCapacity; + DWORD mLength; + HRESULT mResult; + }; + + RefPtr ProcessInit() { + AssertOnTaskQueue(); + + MOZ_ASSERT(!mEncoder, + "Should not initialize encoder again without shutting down"); + + if (!wmf::MediaFoundationInitializer::HasInitialized()) { + return InitPromise::CreateAndReject( + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Can't create the MFT encoder.")), + __func__); + } + + RefPtr encoder = new MFTEncoder(mHardwareNotAllowed); + HRESULT hr; + mscom::EnsureMTA([&]() { hr = InitMFTEncoder(encoder); }); + + if (FAILED(hr)) { + WMF_ENC_LOGE("init MFTEncoder: error = 0x%lX", hr); + return InitPromise::CreateAndReject( + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Can't create the MFT encoder.")), + __func__); + } + + mEncoder = std::move(encoder); + FillConfigData(); + return InitPromise::CreateAndResolve(TrackInfo::TrackType::kVideoTrack, + __func__); + } + + HRESULT InitMFTEncoder(RefPtr& aEncoder) { + HRESULT hr = aEncoder->Create(CodecToSubtype(mConfig.mCodec)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = SetMediaTypes(aEncoder, mConfig); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = aEncoder->SetModes(mConfig.mBitrate); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + return S_OK; + } + + void FillConfigData() { + nsTArray header; + NS_ENSURE_TRUE_VOID(SUCCEEDED(mEncoder->GetMPEGSequenceHeader(header))); + + mConfigData = + header.Length() > 0 + ? ParseH264Parameters(header, mConfig.mUsage == Usage::Realtime) + : nullptr; + } + + RefPtr ProcessEncode(RefPtr&& aSample) { + AssertOnTaskQueue(); + MOZ_ASSERT(mEncoder); + MOZ_ASSERT(aSample); + + RefPtr nv12 = ConvertToNV12InputSample(std::move(aSample)); + if (!nv12 || FAILED(mEncoder->PushInput(std::move(nv12)))) { + WMF_ENC_LOGE("failed to process input sample"); + return EncodePromise::CreateAndReject( + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Failed to process input.")), + __func__); + } + + nsTArray> outputs; + HRESULT hr = mEncoder->TakeOutput(outputs); + if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { + FillConfigData(); + } else if (FAILED(hr)) { + WMF_ENC_LOGE("failed to process output"); + return EncodePromise::CreateAndReject( + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Failed to process output.")), + __func__); + } + + return ProcessOutputSamples(outputs); + } + + already_AddRefed ConvertToNV12InputSample( + RefPtr&& aData) { + AssertOnTaskQueue(); + MOZ_ASSERT(mEncoder); + + const layers::PlanarYCbCrImage* image = aData->mImage->AsPlanarYCbCrImage(); + MOZ_ASSERT(image); + const layers::PlanarYCbCrData* yuv = image->GetData(); + auto ySize = yuv->YDataSize(); + auto cbcrSize = yuv->CbCrDataSize(); + size_t yLength = yuv->mYStride * ySize.height; + size_t length = yLength + (yuv->mCbCrStride * cbcrSize.height * 2); + + RefPtr input; + HRESULT hr = mEncoder->CreateInputSample(&input, length); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + RefPtr buffer; + hr = input->GetBufferByIndex(0, getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + hr = buffer->SetCurrentLength(length); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + LockBuffer lockBuffer(buffer); + NS_ENSURE_TRUE(SUCCEEDED(lockBuffer.Result()), nullptr); + + bool ok = libyuv::I420ToNV12( + yuv->mYChannel, yuv->mYStride, yuv->mCbChannel, + yuv->mCbCrStride, yuv->mCrChannel, yuv->mCbCrStride, + lockBuffer.Data(), yuv->mYStride, lockBuffer.Data() + yLength, + yuv->mCbCrStride * 2, ySize.width, ySize.height) == 0; + NS_ENSURE_TRUE(ok, nullptr); + + hr = input->SetSampleTime(UsecsToHNs(aData->mTime.ToMicroseconds())); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + hr = + input->SetSampleDuration(UsecsToHNs(aData->mDuration.ToMicroseconds())); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + return input.forget(); + } + + RefPtr ProcessOutputSamples( + nsTArray>& aSamples) { + EncodedData frames; + for (auto sample : aSamples) { + RefPtr frame = IMFSampleToMediaData(sample); + if (frame) { + frames.AppendElement(std::move(frame)); + } else { + WMF_ENC_LOGE("failed to convert output frame"); + } + } + aSamples.Clear(); + return EncodePromise::CreateAndResolve(std::move(frames), __func__); + } + + already_AddRefed IMFSampleToMediaData( + RefPtr& aSample) { + AssertOnTaskQueue(); + MOZ_ASSERT(aSample); + + RefPtr buffer; + HRESULT hr = aSample->GetBufferByIndex(0, getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + LockBuffer lockBuffer(buffer); + NS_ENSURE_TRUE(SUCCEEDED(lockBuffer.Result()), nullptr); + + LONGLONG time = 0; + hr = aSample->GetSampleTime(&time); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + LONGLONG duration = 0; + hr = aSample->GetSampleDuration(&duration); + NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr); + + bool isKeyframe = + MFGetAttributeUINT32(aSample, MFSampleExtension_CleanPoint, false); + + auto frame = MakeRefPtr(); + if (!WriteFrameData(frame, lockBuffer, isKeyframe)) { + return nullptr; + } + + frame->mTime = media::TimeUnit::FromMicroseconds(HNsToUsecs(time)); + frame->mDuration = media::TimeUnit::FromMicroseconds(HNsToUsecs(duration)); + frame->mKeyframe = isKeyframe; + + return frame.forget(); + } + + bool WriteFrameData(RefPtr& aDest, LockBuffer& aSrc, + bool aIsKeyframe) { + if (mConfig.mCodec == CodecType::H264) { + size_t prependLength = 0; + RefPtr avccHeader; + if (aIsKeyframe && mConfigData) { + if (mConfig.mUsage == Usage::Realtime) { + prependLength = mConfigData->Length(); + } else { + avccHeader = mConfigData; + } + } + + UniquePtr writer(aDest->CreateWriter()); + if (!writer->SetSize(prependLength + aSrc.Length())) { + WMF_ENC_LOGE("fail to allocate output buffer"); + return false; + } + + if (prependLength > 0) { + PodCopy(writer->Data(), mConfigData->Elements(), prependLength); + } + PodCopy(writer->Data() + prependLength, aSrc.Data(), aSrc.Length()); + + if (mConfig.mUsage != Usage::Realtime && + !AnnexB::ConvertSampleToAVCC(aDest, avccHeader)) { + WMF_ENC_LOGE("fail to convert annex-b sample to AVCC"); + return false; + } + + return true; + } + UniquePtr writer(aDest->CreateWriter()); + if (!writer->SetSize(aSrc.Length())) { + WMF_ENC_LOGE("fail to allocate output buffer"); + return false; + } + + PodCopy(writer->Data(), aSrc.Data(), aSrc.Length()); + return true; + } + + void AssertOnTaskQueue() { MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); } + + EncoderConfig mConfig; + const RefPtr mTaskQueue; + const bool mHardwareNotAllowed; + RefPtr mEncoder; + // SPS/PPS NALUs for realtime usage, avcC otherwise. + RefPtr mConfigData; +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/platforms/wmf/WMFUtils.cpp b/dom/media/platforms/wmf/WMFUtils.cpp new file mode 100644 index 0000000000..d096979919 --- /dev/null +++ b/dom/media/platforms/wmf/WMFUtils.cpp @@ -0,0 +1,628 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "WMFUtils.h" + +#include +#include +#include +#include +#include + +#ifdef MOZ_AV1 +# include "AOMDecoder.h" +#endif +#include "MP4Decoder.h" +#include "VideoUtils.h" +#include "VPXDecoder.h" +#include "mozilla/ArrayUtils.h" +#include "mozilla/CheckedInt.h" +#include "mozilla/Logging.h" +#include "mozilla/RefPtr.h" +#include "nsTArray.h" +#include "nsThreadUtils.h" +#include "nsWindowsHelpers.h" +#include "prenv.h" +#include "mozilla/mscom/EnsureMTA.h" + +#ifndef WAVE_FORMAT_OPUS +# define WAVE_FORMAT_OPUS 0x704F +#endif +DEFINE_GUID(MEDIASUBTYPE_OPUS, WAVE_FORMAT_OPUS, 0x000, 0x0010, 0x80, 0x00, + 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71); + +namespace mozilla { + +using media::TimeUnit; + +bool StreamTypeIsVideo(const WMFStreamType& aType) { + switch (aType) { + case WMFStreamType::H264: + case WMFStreamType::VP8: + case WMFStreamType::VP9: + case WMFStreamType::AV1: + case WMFStreamType::HEVC: + return true; + default: + return false; + } +} + +bool StreamTypeIsAudio(const WMFStreamType& aType) { + switch (aType) { + case WMFStreamType::MP3: + case WMFStreamType::AAC: + case WMFStreamType::OPUS: + case WMFStreamType::VORBIS: + return true; + default: + return false; + } +} + +// Get a string representation of the stream type. Useful for logging. +const char* StreamTypeToString(WMFStreamType aStreamType) { + switch (aStreamType) { + case WMFStreamType::H264: + return "H264"; + case WMFStreamType::VP8: + return "VP8"; + case WMFStreamType::VP9: + return "VP9"; + case WMFStreamType::AV1: + return "AV1"; + case WMFStreamType::HEVC: + return "HEVC"; + case WMFStreamType::MP3: + return "MP3"; + case WMFStreamType::AAC: + return "AAC"; + case WMFStreamType::OPUS: + return "OPUS"; + case WMFStreamType::VORBIS: + return "VORBIS"; + default: + MOZ_ASSERT(aStreamType == WMFStreamType::Unknown); + return "Unknown"; + } +} + +WMFStreamType GetStreamTypeFromMimeType(const nsCString& aMimeType) { + if (MP4Decoder::IsH264(aMimeType)) { + return WMFStreamType::H264; + } + if (VPXDecoder::IsVP8(aMimeType)) { + return WMFStreamType::VP8; + } + if (VPXDecoder::IsVP9(aMimeType)) { + return WMFStreamType::VP9; + } +#ifdef MOZ_AV1 + if (AOMDecoder::IsAV1(aMimeType)) { + return WMFStreamType::AV1; + } +#endif + if (MP4Decoder::IsHEVC(aMimeType)) { + return WMFStreamType::HEVC; + } + if (aMimeType.EqualsLiteral("audio/mp4a-latm") || + aMimeType.EqualsLiteral("audio/mp4")) { + return WMFStreamType::AAC; + } + if (aMimeType.EqualsLiteral("audio/mpeg")) { + return WMFStreamType::MP3; + } + if (aMimeType.EqualsLiteral("audio/opus")) { + return WMFStreamType::OPUS; + } + if (aMimeType.EqualsLiteral("audio/vorbis")) { + return WMFStreamType::VORBIS; + } + return WMFStreamType::Unknown; +} + +HRESULT +HNsToFrames(int64_t aHNs, uint32_t aRate, int64_t* aOutFrames) { + MOZ_ASSERT(aOutFrames); + const int64_t HNS_PER_S = USECS_PER_S * 10; + CheckedInt i = aHNs; + i *= aRate; + i /= HNS_PER_S; + NS_ENSURE_TRUE(i.isValid(), E_FAIL); + *aOutFrames = i.value(); + return S_OK; +} + +HRESULT +GetDefaultStride(IMFMediaType* aType, uint32_t aWidth, uint32_t* aOutStride) { + // Try to get the default stride from the media type. + HRESULT hr = aType->GetUINT32(MF_MT_DEFAULT_STRIDE, aOutStride); + if (SUCCEEDED(hr)) { + return S_OK; + } + + // Stride attribute not set, calculate it. + GUID subtype = GUID_NULL; + + hr = aType->GetGUID(MF_MT_SUBTYPE, &subtype); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = wmf::MFGetStrideForBitmapInfoHeader(subtype.Data1, aWidth, + (LONG*)(aOutStride)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + return hr; +} + +Maybe GetYUVColorSpace(IMFMediaType* aType) { + UINT32 yuvColorMatrix; + HRESULT hr = aType->GetUINT32(MF_MT_YUV_MATRIX, &yuvColorMatrix); + NS_ENSURE_TRUE(SUCCEEDED(hr), {}); + + switch (yuvColorMatrix) { + case MFVideoTransferMatrix_BT2020_10: + case MFVideoTransferMatrix_BT2020_12: + return Some(gfx::YUVColorSpace::BT2020); + case MFVideoTransferMatrix_BT709: + return Some(gfx::YUVColorSpace::BT709); + case MFVideoTransferMatrix_BT601: + return Some(gfx::YUVColorSpace::BT601); + default: + MOZ_ASSERT_UNREACHABLE("Unhandled MFVideoTransferMatrix_?"); + return {}; + } +} + +int32_t MFOffsetToInt32(const MFOffset& aOffset) { + return int32_t(aOffset.value + (aOffset.fract / 65536.0f)); +} + +TimeUnit GetSampleDuration(IMFSample* aSample) { + NS_ENSURE_TRUE(aSample, TimeUnit::Invalid()); + int64_t duration = 0; + HRESULT hr = aSample->GetSampleDuration(&duration); + NS_ENSURE_TRUE(SUCCEEDED(hr), TimeUnit::Invalid()); + return TimeUnit::FromMicroseconds(HNsToUsecs(duration)); +} + +TimeUnit GetSampleTime(IMFSample* aSample) { + NS_ENSURE_TRUE(aSample, TimeUnit::Invalid()); + LONGLONG timestampHns = 0; + HRESULT hr = aSample->GetSampleTime(×tampHns); + NS_ENSURE_TRUE(SUCCEEDED(hr), TimeUnit::Invalid()); + return TimeUnit::FromMicroseconds(HNsToUsecs(timestampHns)); +} + +// Gets the sub-region of the video frame that should be displayed. +// See: +// http://msdn.microsoft.com/en-us/library/windows/desktop/bb530115(v=vs.85).aspx +HRESULT +GetPictureRegion(IMFMediaType* aMediaType, gfx::IntRect& aOutPictureRegion) { + // Determine if "pan and scan" is enabled for this media. If it is, we + // only display a region of the video frame, not the entire frame. + BOOL panScan = + MFGetAttributeUINT32(aMediaType, MF_MT_PAN_SCAN_ENABLED, FALSE); + + // If pan and scan mode is enabled. Try to get the display region. + HRESULT hr = E_FAIL; + MFVideoArea videoArea; + memset(&videoArea, 0, sizeof(MFVideoArea)); + if (panScan) { + hr = aMediaType->GetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&videoArea, + sizeof(MFVideoArea), nullptr); + } + + // If we're not in pan-and-scan mode, or the pan-and-scan region is not set, + // check for a minimimum display aperture. + if (!panScan || hr == MF_E_ATTRIBUTENOTFOUND) { + hr = aMediaType->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&videoArea, + sizeof(MFVideoArea), nullptr); + } + + if (hr == MF_E_ATTRIBUTENOTFOUND) { + // Minimum display aperture is not set, for "backward compatibility with + // some components", check for a geometric aperture. + hr = aMediaType->GetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&videoArea, + sizeof(MFVideoArea), nullptr); + } + + if (SUCCEEDED(hr)) { + // The media specified a picture region, return it. + aOutPictureRegion = gfx::IntRect(MFOffsetToInt32(videoArea.OffsetX), + MFOffsetToInt32(videoArea.OffsetY), + videoArea.Area.cx, videoArea.Area.cy); + return S_OK; + } + + // No picture region defined, fall back to using the entire video area. + UINT32 width = 0, height = 0; + hr = MFGetAttributeSize(aMediaType, MF_MT_FRAME_SIZE, &width, &height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL); + NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL); + + aOutPictureRegion = gfx::IntRect(0, 0, width, height); + return S_OK; +} + +nsString GetProgramW6432Path() { + char* programPath = PR_GetEnvSecure("ProgramW6432"); + if (!programPath) { + programPath = PR_GetEnvSecure("ProgramFiles"); + } + + if (!programPath) { + return u"C:\\Program Files"_ns; + } + return NS_ConvertUTF8toUTF16(programPath); +} + +const char* MFTMessageTypeToStr(MFT_MESSAGE_TYPE aMsg) { + switch (aMsg) { + case MFT_MESSAGE_COMMAND_FLUSH: + return "MFT_MESSAGE_COMMAND_FLUSH"; + case MFT_MESSAGE_COMMAND_DRAIN: + return "MFT_MESSAGE_COMMAND_DRAIN"; + case MFT_MESSAGE_COMMAND_MARKER: + return "MFT_MESSAGE_COMMAND_MARKER"; + case MFT_MESSAGE_SET_D3D_MANAGER: + return "MFT_MESSAGE_SET_D3D_MANAGER"; + case MFT_MESSAGE_NOTIFY_BEGIN_STREAMING: + return "MFT_MESSAGE_NOTIFY_BEGIN_STREAMING"; + case MFT_MESSAGE_NOTIFY_END_STREAMING: + return "MFT_MESSAGE_NOTIFY_END_STREAMING"; + case MFT_MESSAGE_NOTIFY_END_OF_STREAM: + return "MFT_MESSAGE_NOTIFY_END_OF_STREAM"; + case MFT_MESSAGE_NOTIFY_START_OF_STREAM: + return "MFT_MESSAGE_NOTIFY_START_OF_STREAM"; + case MFT_MESSAGE_DROP_SAMPLES: + return "MFT_MESSAGE_DROP_SAMPLES"; + case MFT_MESSAGE_COMMAND_TICK: + return "MFT_MESSAGE_COMMAND_TICK"; + case MFT_MESSAGE_NOTIFY_RELEASE_RESOURCES: + return "MFT_MESSAGE_NOTIFY_RELEASE_RESOURCES"; + case MFT_MESSAGE_NOTIFY_REACQUIRE_RESOURCES: + return "MFT_MESSAGE_NOTIFY_REACQUIRE_RESOURCES"; + case MFT_MESSAGE_NOTIFY_EVENT: + return "MFT_MESSAGE_NOTIFY_EVENT"; + case MFT_MESSAGE_COMMAND_SET_OUTPUT_STREAM_STATE: + return "MFT_MESSAGE_COMMAND_SET_OUTPUT_STREAM_STATE"; + case MFT_MESSAGE_COMMAND_FLUSH_OUTPUT_STREAM: + return "MFT_MESSAGE_COMMAND_FLUSH_OUTPUT_STREAM"; + default: + return "Invalid message?"; + } +} + +GUID AudioMimeTypeToMediaFoundationSubtype(const nsACString& aMimeType) { + if (aMimeType.EqualsLiteral("audio/mpeg")) { + return MFAudioFormat_MP3; + } else if (MP4Decoder::IsAAC(aMimeType)) { + return MFAudioFormat_AAC; + } else if (aMimeType.EqualsLiteral("audio/vorbis")) { + return MFAudioFormat_Vorbis; + } else if (aMimeType.EqualsLiteral("audio/opus")) { + return MFAudioFormat_Opus; + } + NS_WARNING("Unsupport audio mimetype"); + return GUID_NULL; +} + +GUID VideoMimeTypeToMediaFoundationSubtype(const nsACString& aMimeType) { + if (MP4Decoder::IsH264(aMimeType)) { + return MFVideoFormat_H264; + } else if (VPXDecoder::IsVP8(aMimeType)) { + return MFVideoFormat_VP80; + } else if (VPXDecoder::IsVP9(aMimeType)) { + return MFVideoFormat_VP90; + } +#ifdef MOZ_AV1 + else if (AOMDecoder::IsAV1(aMimeType)) { + return MFVideoFormat_AV1; + } +#endif + else if (MP4Decoder::IsHEVC(aMimeType)) { + return MFVideoFormat_HEVC; + } + NS_WARNING("Unsupport video mimetype"); + return GUID_NULL; +} + +void AACAudioSpecificConfigToUserData(uint8_t aAACProfileLevelIndication, + const uint8_t* aAudioSpecConfig, + uint32_t aConfigLength, + nsTArray& aOutUserData) { + MOZ_ASSERT(aOutUserData.IsEmpty()); + + // The MF_MT_USER_DATA for AAC is defined here: + // http://msdn.microsoft.com/en-us/library/windows/desktop/dd742784%28v=vs.85%29.aspx + // + // For MFAudioFormat_AAC, MF_MT_USER_DATA contains the portion of + // the HEAACWAVEINFO structure that appears after the WAVEFORMATEX + // structure (that is, after the wfx member). This is followed by + // the AudioSpecificConfig() data, as defined by ISO/IEC 14496-3. + // [...] + // The length of the AudioSpecificConfig() data is 2 bytes for AAC-LC + // or HE-AAC with implicit signaling of SBR/PS. It is more than 2 bytes + // for HE-AAC with explicit signaling of SBR/PS. + // + // The value of audioObjectType as defined in AudioSpecificConfig() + // must be 2, indicating AAC-LC. The value of extensionAudioObjectType + // must be 5 for SBR or 29 for PS. + // + // HEAACWAVEINFO structure: + // typedef struct heaacwaveinfo_tag { + // WAVEFORMATEX wfx; + // WORD wPayloadType; + // WORD wAudioProfileLevelIndication; + // WORD wStructType; + // WORD wReserved1; + // DWORD dwReserved2; + // } + const UINT32 heeInfoLen = 4 * sizeof(WORD) + sizeof(DWORD); + + // The HEAACWAVEINFO must have payload and profile set, + // the rest can be all 0x00. + BYTE heeInfo[heeInfoLen] = {0}; + WORD* w = (WORD*)heeInfo; + w[0] = 0x0; // Payload type raw AAC packet + w[1] = aAACProfileLevelIndication; + + aOutUserData.AppendElements(heeInfo, heeInfoLen); + + if (aAACProfileLevelIndication == 2 && aConfigLength > 2) { + // The AudioSpecificConfig is TTTTTFFF|FCCCCGGG + // (T=ObjectType, F=Frequency, C=Channel, G=GASpecificConfig) + // If frequency = 0xf, then the frequency is explicitly defined on 24 bits. + int8_t frequency = + (aAudioSpecConfig[0] & 0x7) << 1 | (aAudioSpecConfig[1] & 0x80) >> 7; + int8_t channels = (aAudioSpecConfig[1] & 0x78) >> 3; + int8_t gasc = aAudioSpecConfig[1] & 0x7; + if (frequency != 0xf && channels && !gasc) { + // We enter this condition if the AudioSpecificConfig should theorically + // be 2 bytes long but it's not. + // The WMF AAC decoder will error if unknown extensions are found, + // so remove them. + aConfigLength = 2; + } + } + aOutUserData.AppendElements(aAudioSpecConfig, aConfigLength); +} + +namespace wmf { + +static const wchar_t* sDLLs[] = { + L"mfplat.dll", + L"mf.dll", + L"dxva2.dll", + L"evr.dll", +}; + +HRESULT +LoadDLLs() { + static bool sDLLsLoaded = false; + static bool sFailedToLoadDlls = false; + + if (sDLLsLoaded) { + return S_OK; + } + if (sFailedToLoadDlls) { + return E_FAIL; + } + + // Try to load all the required DLLs. If we fail to load any dll, + // unload the dlls we succeeded in loading. + nsTArray loadedDlls; + for (const wchar_t* dll : sDLLs) { + if (!LoadLibrarySystem32(dll)) { + NS_WARNING("Failed to load WMF DLLs"); + for (const wchar_t* loadedDll : loadedDlls) { + FreeLibrary(GetModuleHandleW(loadedDll)); + } + sFailedToLoadDlls = true; + return E_FAIL; + } + loadedDlls.AppendElement(dll); + } + sDLLsLoaded = true; + + return S_OK; +} + +#define ENSURE_FUNCTION_PTR_HELPER(FunctionType, FunctionName, DLL) \ + static FunctionType FunctionName##Ptr = nullptr; \ + if (!FunctionName##Ptr) { \ + FunctionName##Ptr = (FunctionType)GetProcAddress( \ + GetModuleHandleW(L## #DLL), #FunctionName); \ + if (!FunctionName##Ptr) { \ + NS_WARNING("Failed to get GetProcAddress of " #FunctionName \ + " from " #DLL); \ + return E_FAIL; \ + } \ + } + +#define ENSURE_FUNCTION_PTR(FunctionName, DLL) \ + ENSURE_FUNCTION_PTR_HELPER(decltype(::FunctionName)*, FunctionName, DLL) + +#define ENSURE_FUNCTION_PTR_(FunctionName, DLL) \ + ENSURE_FUNCTION_PTR_HELPER(FunctionName##Ptr_t, FunctionName, DLL) + +#define DECL_FUNCTION_PTR(FunctionName, ...) \ + typedef HRESULT(STDMETHODCALLTYPE* FunctionName##Ptr_t)(__VA_ARGS__) + +HRESULT +MediaFoundationInitializer::MFStartup() { + HRESULT hr = LoadDLLs(); + if (FAILED(hr)) { + return hr; + } + + const int MF_WIN7_VERSION = (0x0002 << 16 | MF_API_VERSION); + + // decltype is unusable for functions having default parameters + DECL_FUNCTION_PTR(MFStartup, ULONG, DWORD); + ENSURE_FUNCTION_PTR_(MFStartup, Mfplat.dll) + + hr = E_FAIL; + mozilla::mscom::EnsureMTA( + [&]() -> void { hr = MFStartupPtr(MF_WIN7_VERSION, MFSTARTUP_FULL); }); + return hr; +} + +HRESULT +MediaFoundationInitializer::MFShutdown() { + ENSURE_FUNCTION_PTR(MFShutdown, Mfplat.dll) + HRESULT hr = E_FAIL; + mozilla::mscom::EnsureMTA([&]() -> void { hr = (MFShutdownPtr)(); }); + return hr; +} + +HRESULT +MFCreateMediaType(IMFMediaType** aOutMFType) { + ENSURE_FUNCTION_PTR(MFCreateMediaType, Mfplat.dll) + return (MFCreateMediaTypePtr)(aOutMFType); +} + +HRESULT +MFGetStrideForBitmapInfoHeader(DWORD aFormat, DWORD aWidth, LONG* aOutStride) { + ENSURE_FUNCTION_PTR(MFGetStrideForBitmapInfoHeader, evr.dll) + return (MFGetStrideForBitmapInfoHeaderPtr)(aFormat, aWidth, aOutStride); +} + +HRESULT MFGetService(IUnknown* punkObject, REFGUID guidService, REFIID riid, + LPVOID* ppvObject) { + ENSURE_FUNCTION_PTR(MFGetService, mf.dll) + return (MFGetServicePtr)(punkObject, guidService, riid, ppvObject); +} + +HRESULT +DXVA2CreateDirect3DDeviceManager9(UINT* pResetToken, + IDirect3DDeviceManager9** ppDXVAManager) { + ENSURE_FUNCTION_PTR(DXVA2CreateDirect3DDeviceManager9, dxva2.dll) + return (DXVA2CreateDirect3DDeviceManager9Ptr)(pResetToken, ppDXVAManager); +} + +HRESULT +MFCreateSample(IMFSample** ppIMFSample) { + ENSURE_FUNCTION_PTR(MFCreateSample, mfplat.dll) + return (MFCreateSamplePtr)(ppIMFSample); +} + +HRESULT +MFCreateAlignedMemoryBuffer(DWORD cbMaxLength, DWORD fAlignmentFlags, + IMFMediaBuffer** ppBuffer) { + ENSURE_FUNCTION_PTR(MFCreateAlignedMemoryBuffer, mfplat.dll) + return (MFCreateAlignedMemoryBufferPtr)(cbMaxLength, fAlignmentFlags, + ppBuffer); +} + +HRESULT +MFCreateDXGIDeviceManager(UINT* pResetToken, + IMFDXGIDeviceManager** ppDXVAManager) { + ENSURE_FUNCTION_PTR(MFCreateDXGIDeviceManager, mfplat.dll) + return (MFCreateDXGIDeviceManagerPtr)(pResetToken, ppDXVAManager); +} + +HRESULT +MFCreateDXGISurfaceBuffer(REFIID riid, IUnknown* punkSurface, + UINT uSubresourceIndex, BOOL fButtomUpWhenLinear, + IMFMediaBuffer** ppBuffer) { + ENSURE_FUNCTION_PTR(MFCreateDXGISurfaceBuffer, mfplat.dll) + return (MFCreateDXGISurfaceBufferPtr)(riid, punkSurface, uSubresourceIndex, + fButtomUpWhenLinear, ppBuffer); +} + +HRESULT +MFTEnumEx(GUID guidCategory, UINT32 Flags, + const MFT_REGISTER_TYPE_INFO* pInputType, + const MFT_REGISTER_TYPE_INFO* pOutputType, + IMFActivate*** pppMFTActivate, UINT32* pnumMFTActivate) { + ENSURE_FUNCTION_PTR(MFTEnumEx, mfplat.dll) + return (MFTEnumExPtr)(guidCategory, Flags, pInputType, pOutputType, + pppMFTActivate, pnumMFTActivate); +} + +HRESULT MFTGetInfo(CLSID clsidMFT, LPWSTR* pszName, + MFT_REGISTER_TYPE_INFO** ppInputTypes, UINT32* pcInputTypes, + MFT_REGISTER_TYPE_INFO** ppOutputTypes, + UINT32* pcOutputTypes, IMFAttributes** ppAttributes) { + ENSURE_FUNCTION_PTR(MFTGetInfo, mfplat.dll) + return (MFTGetInfoPtr)(clsidMFT, pszName, ppInputTypes, pcInputTypes, + ppOutputTypes, pcOutputTypes, ppAttributes); +} + +HRESULT +MFCreateAttributes(IMFAttributes** ppMFAttributes, UINT32 cInitialSize) { + ENSURE_FUNCTION_PTR(MFCreateAttributes, mfplat.dll) + return (MFCreateAttributesPtr)(ppMFAttributes, cInitialSize); +} + +HRESULT MFCreateEventQueue(IMFMediaEventQueue** ppMediaEventQueue) { + ENSURE_FUNCTION_PTR(MFCreateEventQueue, mfplat.dll) + return (MFCreateEventQueuePtr)(ppMediaEventQueue); +} + +HRESULT MFCreateStreamDescriptor(DWORD dwStreamIdentifier, DWORD cMediaTypes, + IMFMediaType** apMediaTypes, + IMFStreamDescriptor** ppDescriptor) { + ENSURE_FUNCTION_PTR(MFCreateStreamDescriptor, mfplat.dll) + return (MFCreateStreamDescriptorPtr)(dwStreamIdentifier, cMediaTypes, + apMediaTypes, ppDescriptor); +} + +HRESULT MFCreateAsyncResult(IUnknown* punkObject, IMFAsyncCallback* pCallback, + IUnknown* punkState, + IMFAsyncResult** ppAsyncResult) { + ENSURE_FUNCTION_PTR(MFCreateAsyncResult, mfplat.dll) + return (MFCreateAsyncResultPtr)(punkObject, pCallback, punkState, + ppAsyncResult); +} + +HRESULT MFCreatePresentationDescriptor( + DWORD cStreamDescriptors, IMFStreamDescriptor** apStreamDescriptors, + IMFPresentationDescriptor** ppPresentationDescriptor) { + ENSURE_FUNCTION_PTR(MFCreatePresentationDescriptor, mfplat.dll) + return (MFCreatePresentationDescriptorPtr)(cStreamDescriptors, + apStreamDescriptors, + ppPresentationDescriptor); +} + +HRESULT MFCreateMemoryBuffer(DWORD cbMaxLength, IMFMediaBuffer** ppBuffer) { + ENSURE_FUNCTION_PTR(MFCreateMemoryBuffer, mfplat.dll); + return (MFCreateMemoryBufferPtr)(cbMaxLength, ppBuffer); +} + +HRESULT MFLockDXGIDeviceManager(UINT* pResetToken, + IMFDXGIDeviceManager** ppManager) { + ENSURE_FUNCTION_PTR(MFLockDXGIDeviceManager, mfplat.dll); + return (MFLockDXGIDeviceManagerPtr)(pResetToken, ppManager); +} + +HRESULT MFUnlockDXGIDeviceManager() { + ENSURE_FUNCTION_PTR(MFUnlockDXGIDeviceManager, mfplat.dll); + return (MFUnlockDXGIDeviceManagerPtr)(); +} + +HRESULT MFPutWorkItem(DWORD dwQueue, IMFAsyncCallback* pCallback, + IUnknown* pState) { + ENSURE_FUNCTION_PTR(MFPutWorkItem, mfplat.dll); + return (MFPutWorkItemPtr)(dwQueue, pCallback, pState); +} + +HRESULT MFSerializeAttributesToStream(IMFAttributes* pAttr, DWORD dwOptions, + IStream* pStm) { + ENSURE_FUNCTION_PTR(MFSerializeAttributesToStream, mfplat.dll); + return (MFSerializeAttributesToStreamPtr)(pAttr, dwOptions, pStm); +} + +HRESULT MFWrapMediaType(IMFMediaType* pOrig, REFGUID MajorType, REFGUID SubType, + IMFMediaType** ppWrap) { + ENSURE_FUNCTION_PTR(MFWrapMediaType, mfplat.dll); + return (MFWrapMediaTypePtr)(pOrig, MajorType, SubType, ppWrap); +} + +} // end namespace wmf +} // end namespace mozilla diff --git a/dom/media/platforms/wmf/WMFUtils.h b/dom/media/platforms/wmf/WMFUtils.h new file mode 100644 index 0000000000..9ce5226739 --- /dev/null +++ b/dom/media/platforms/wmf/WMFUtils.h @@ -0,0 +1,123 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef WMFUtils_h +#define WMFUtils_h + +#include +#include + +#include "ImageTypes.h" +#include "TimeUnits.h" +#include "VideoUtils.h" +#include "WMF.h" +#include "mozilla/gfx/Rect.h" +#include "nsString.h" + +// Various utilities shared by WMF backend files. + +namespace mozilla { + +static const GUID CLSID_MSOpusDecoder = { + 0x63e17c10, + 0x2d43, + 0x4c42, + {0x8f, 0xe3, 0x8d, 0x8b, 0x63, 0xe4, 0x6a, 0x6a}}; + +// Media types supported by Media Foundation. +enum class WMFStreamType { + Unknown, + H264, + VP8, + VP9, + AV1, + HEVC, + MP3, + AAC, + OPUS, + VORBIS, + SENTINEL +}; + +bool StreamTypeIsVideo(const WMFStreamType& aType); + +bool StreamTypeIsAudio(const WMFStreamType& aType); + +// Get a string representation of the stream type. Useful for logging. +const char* StreamTypeToString(WMFStreamType aStreamType); + +WMFStreamType GetStreamTypeFromMimeType(const nsCString& aMimeType); + +// Converts from microseconds to hundreds of nanoseconds. +// We use microseconds for our timestamps, whereas WMF uses +// hundreds of nanoseconds. +inline int64_t UsecsToHNs(int64_t aUsecs) { return aUsecs * 10; } + +// Converts from hundreds of nanoseconds to microseconds. +// We use microseconds for our timestamps, whereas WMF uses +// hundreds of nanoseconds. +inline int64_t HNsToUsecs(int64_t hNanoSecs) { return hNanoSecs / 10; } + +HRESULT HNsToFrames(int64_t aHNs, uint32_t aRate, int64_t* aOutFrames); + +HRESULT +GetDefaultStride(IMFMediaType* aType, uint32_t aWidth, uint32_t* aOutStride); + +Maybe GetYUVColorSpace(IMFMediaType* aType); + +int32_t MFOffsetToInt32(const MFOffset& aOffset); + +// Gets the sub-region of the video frame that should be displayed. +// See: +// http://msdn.microsoft.com/en-us/library/windows/desktop/bb530115(v=vs.85).aspx +HRESULT +GetPictureRegion(IMFMediaType* aMediaType, gfx::IntRect& aOutPictureRegion); + +// Returns the duration of a IMFSample in TimeUnit. +// Returns media::TimeUnit::Invalid() on failure. +media::TimeUnit GetSampleDuration(IMFSample* aSample); + +// Returns the presentation time of a IMFSample in TimeUnit. +// Returns media::TimeUnit::Invalid() on failure. +media::TimeUnit GetSampleTime(IMFSample* aSample); + +inline bool IsFlagSet(DWORD flags, DWORD pattern) { + return (flags & pattern) == pattern; +} + +// Will return %ProgramW6432% value as per: +// https://msdn.microsoft.com/library/windows/desktop/aa384274.aspx +nsString GetProgramW6432Path(); + +const char* MFTMessageTypeToStr(MFT_MESSAGE_TYPE aMsg); + +GUID AudioMimeTypeToMediaFoundationSubtype(const nsACString& aMimeType); + +GUID VideoMimeTypeToMediaFoundationSubtype(const nsACString& aMimeType); + +void AACAudioSpecificConfigToUserData(uint8_t aAACProfileLevelIndication, + const uint8_t* aAudioSpecConfig, + uint32_t aConfigLength, + nsTArray& aOutUserData); + +class ScopedHString final { + public: + explicit ScopedHString(const nsAString& aStr) { + WindowsCreateString(PromiseFlatString(aStr).get(), aStr.Length(), &mString); + } + explicit ScopedHString(const WCHAR aCharArray[]) { + WindowsCreateString(aCharArray, wcslen(aCharArray), &mString); + } + ~ScopedHString() { WindowsDeleteString(mString); } + const HSTRING& Get() { return mString; } + + private: + HSTRING mString; +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp new file mode 100644 index 0000000000..65480c4a01 --- /dev/null +++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp @@ -0,0 +1,1014 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "WMFVideoMFTManager.h" + +#include +#include +#include "DXVA2Manager.h" +#include "GMPUtils.h" // For SplitAt. TODO: Move SplitAt to a central place. +#include "IMFYCbCrImage.h" +#include "ImageContainer.h" +#include "MediaInfo.h" +#include "MediaTelemetryConstants.h" +#include "VideoUtils.h" +#include "WMFDecoderModule.h" +#include "WMFUtils.h" +#include "gfx2DGlue.h" +#include "gfxWindowsPlatform.h" +#include "mozilla/AbstractThread.h" +#include "mozilla/ClearOnShutdown.h" +#include "mozilla/Logging.h" +#include "mozilla/SchedulerGroup.h" +#include "mozilla/StaticPrefs_gfx.h" +#include "mozilla/StaticPrefs_media.h" +#include "mozilla/SyncRunnable.h" +#include "mozilla/Telemetry.h" +#include "mozilla/gfx/DeviceManagerDx.h" +#include "mozilla/gfx/gfxVars.h" +#include "mozilla/layers/LayersTypes.h" +#include "nsPrintfCString.h" +#include "nsThreadUtils.h" +#include "nsWindowsHelpers.h" + +#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__)) + +using mozilla::layers::Image; +using mozilla::layers::IMFYCbCrImage; +using mozilla::layers::LayerManager; +using mozilla::layers::LayersBackend; +using mozilla::media::TimeUnit; + +namespace mozilla { + +LayersBackend GetCompositorBackendType( + layers::KnowsCompositor* aKnowsCompositor) { + if (aKnowsCompositor) { + return aKnowsCompositor->GetCompositorBackendType(); + } + return LayersBackend::LAYERS_NONE; +} + +WMFVideoMFTManager::WMFVideoMFTManager( + const VideoInfo& aConfig, layers::KnowsCompositor* aKnowsCompositor, + layers::ImageContainer* aImageContainer, float aFramerate, + const CreateDecoderParams::OptionSet& aOptions, bool aDXVAEnabled, + Maybe aTrackingId) + : mVideoInfo(aConfig), + mImageSize(aConfig.mImage), + mStreamType(GetStreamTypeFromMimeType(aConfig.mMimeType)), + mSoftwareImageSize(aConfig.mImage), + mSoftwarePictureSize(aConfig.mImage), + mVideoStride(0), + mColorSpace(aConfig.mColorSpace), + mColorRange(aConfig.mColorRange), + mImageContainer(aImageContainer), + mKnowsCompositor(aKnowsCompositor), + mDXVAEnabled(aDXVAEnabled && + !aOptions.contains( + CreateDecoderParams::Option::HardwareDecoderNotAllowed)), + mZeroCopyNV12Texture(false), + mFramerate(aFramerate), + mLowLatency(aOptions.contains(CreateDecoderParams::Option::LowLatency)), + mTrackingId(std::move(aTrackingId)) +// mVideoStride, mVideoWidth, mVideoHeight, mUseHwAccel are initialized in +// Init(). +{ + MOZ_COUNT_CTOR(WMFVideoMFTManager); + + // The V and U planes are stored 16-row-aligned, so we need to add padding + // to the row heights to ensure the Y'CbCr planes are referenced properly. + // This value is only used with software decoder. + if (mSoftwareImageSize.height % 16 != 0) { + mSoftwareImageSize.height += 16 - (mSoftwareImageSize.height % 16); + } +} + +WMFVideoMFTManager::~WMFVideoMFTManager() { + MOZ_COUNT_DTOR(WMFVideoMFTManager); +} + +/* static */ +const GUID& WMFVideoMFTManager::GetMediaSubtypeGUID() { + MOZ_ASSERT(StreamTypeIsVideo(mStreamType)); + switch (mStreamType) { + case WMFStreamType::H264: + return MFVideoFormat_H264; + case WMFStreamType::VP8: + return MFVideoFormat_VP80; + case WMFStreamType::VP9: + return MFVideoFormat_VP90; + case WMFStreamType::AV1: + return MFVideoFormat_AV1; + case WMFStreamType::HEVC: + return MFVideoFormat_HEVC; + default: + return GUID_NULL; + }; +} + +bool WMFVideoMFTManager::InitializeDXVA() { + // If we use DXVA but aren't running with a D3D layer manager then the + // readback of decoded video frames from GPU to CPU memory grinds painting + // to a halt, and makes playback performance *worse*. + if (!mDXVAEnabled) { + mDXVAFailureReason.AssignLiteral( + "Hardware video decoding disabled or blacklisted"); + return false; + } + MOZ_ASSERT(!mDXVA2Manager); + if (!mKnowsCompositor || !mKnowsCompositor->SupportsD3D11()) { + mDXVAFailureReason.AssignLiteral("Unsupported layers backend"); + return false; + } + + if (!XRE_IsRDDProcess() && !XRE_IsGPUProcess()) { + mDXVAFailureReason.AssignLiteral( + "DXVA only supported in RDD or GPU process"); + return false; + } + + bool d3d11 = true; + if (!StaticPrefs::media_wmf_dxva_d3d11_enabled()) { + mDXVAFailureReason = nsPrintfCString( + "D3D11: %s is false", + StaticPrefs::GetPrefName_media_wmf_dxva_d3d11_enabled()); + d3d11 = false; + } + + if (d3d11) { + mDXVAFailureReason.AppendLiteral("D3D11: "); + mDXVA2Manager.reset( + DXVA2Manager::CreateD3D11DXVA(mKnowsCompositor, mDXVAFailureReason)); + if (mDXVA2Manager) { + return true; + } + } + + return mDXVA2Manager != nullptr; +} + +MediaResult WMFVideoMFTManager::ValidateVideoInfo() { + NS_ENSURE_TRUE(StreamTypeIsVideo(mStreamType), + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Invalid stream type"))); + switch (mStreamType) { + case WMFStreamType::H264: + if (!StaticPrefs::media_wmf_allow_unsupported_resolutions()) { + // The WMF H.264 decoder is documented to have a minimum resolution + // 48x48 pixels for resolution, but we won't enable hw decoding for the + // resolution < 132 pixels. It's assumed the software decoder doesn't + // have this limitation, but it still might have maximum resolution + // limitation. + // https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx + static const int32_t MAX_H264_PIXEL_COUNT = 4096 * 2304; + const CheckedInt32 pixelCount = + CheckedInt32(mVideoInfo.mImage.width) * mVideoInfo.mImage.height; + + if (!pixelCount.isValid() || + pixelCount.value() > MAX_H264_PIXEL_COUNT) { + mIsValid = false; + return MediaResult( + NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Can't decode H.264 stream because its " + "resolution is out of the maximum limitation")); + } + } + break; + default: + break; + } + + return NS_OK; +} + +MediaResult WMFVideoMFTManager::Init() { + MediaResult result = ValidateVideoInfo(); + if (NS_FAILED(result)) { + return result; + } + + result = InitInternal(); + if (NS_SUCCEEDED(result) && mDXVA2Manager) { + // If we had some failures but eventually made it work, + // make sure we preserve the messages. + mDXVAFailureReason.AppendLiteral("Using D3D11 API"); + } + + return result; +} + +MediaResult WMFVideoMFTManager::InitInternal() { + // The H264 SanityTest uses a 132x132 videos to determine if DXVA can be used. + // so we want to use the software decoder for videos with lower resolutions. + static const int MIN_H264_HW_WIDTH = 132; + static const int MIN_H264_HW_HEIGHT = 132; + + mUseHwAccel = false; // default value; changed if D3D setup succeeds. + bool useDxva = true; + + if (mStreamType == WMFStreamType::H264 && + (mVideoInfo.ImageRect().width < MIN_H264_HW_WIDTH || + mVideoInfo.ImageRect().height < MIN_H264_HW_HEIGHT)) { + useDxva = false; + mDXVAFailureReason = nsPrintfCString( + "H264 video resolution too low: %" PRIu32 "x%" PRIu32, + mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height); + } + + if (useDxva) { + useDxva = InitializeDXVA(); + } + + RefPtr decoder = new MFTDecoder(); + HRESULT hr = WMFDecoderModule::CreateMFTDecoder(mStreamType, decoder); + NS_ENSURE_TRUE(SUCCEEDED(hr), + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Can't create the MFT decoder."))); + + RefPtr attr(decoder->GetAttributes()); + UINT32 aware = 0; + if (attr) { + attr->GetUINT32(MF_SA_D3D_AWARE, &aware); + attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads, + WMFDecoderModule::GetNumDecoderThreads()); + bool lowLatency = StaticPrefs::media_wmf_low_latency_enabled(); + if (mLowLatency || lowLatency) { + hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE); + if (SUCCEEDED(hr)) { + LOG("Enabling Low Latency Mode"); + } else { + LOG("Couldn't enable Low Latency Mode"); + } + } + + if (gfx::gfxVars::HwDecodedVideoZeroCopy() && mKnowsCompositor && + mKnowsCompositor->UsingHardwareWebRender() && mDXVA2Manager && + mDXVA2Manager->SupportsZeroCopyNV12Texture()) { + mZeroCopyNV12Texture = true; + const int kOutputBufferSize = 10; + + // Each picture buffer can store a sample, plus one in + // pending_output_samples_. The decoder adds this number to the number of + // reference pictures it expects to need and uses that to determine the + // array size of the output texture. + const int kMaxOutputSamples = kOutputBufferSize + 1; + attr->SetUINT32(MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT_PROGRESSIVE, + kMaxOutputSamples); + attr->SetUINT32(MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, kMaxOutputSamples); + } + } + + if (useDxva) { + if (aware) { + // TODO: Test if I need this anywhere... Maybe on Vista? + // hr = attr->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE); + // NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + MOZ_ASSERT(mDXVA2Manager); + ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager()); + hr = decoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager); + if (SUCCEEDED(hr)) { + mUseHwAccel = true; + } else { + mDXVAFailureReason = nsPrintfCString( + "MFT_MESSAGE_SET_D3D_MANAGER failed with code %lX", hr); + } + } else { + mDXVAFailureReason.AssignLiteral( + "Decoder returned false for MF_SA_D3D_AWARE"); + } + } + + if (!mDXVAFailureReason.IsEmpty()) { + // DXVA failure reason being set can mean that D3D11 failed, or that DXVA is + // entirely disabled. + LOG("DXVA failure: %s", mDXVAFailureReason.get()); + } + + if (!mUseHwAccel) { + if (mDXVA2Manager) { + // Either mDXVAEnabled was set to false prior the second call to + // InitInternal() due to CanUseDXVA() returning false, or + // MFT_MESSAGE_SET_D3D_MANAGER failed + mDXVA2Manager.reset(); + } + if (mStreamType == WMFStreamType::VP9 || + mStreamType == WMFStreamType::VP8 || + mStreamType == WMFStreamType::AV1 || + mStreamType == WMFStreamType::HEVC) { + return MediaResult( + NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Use VP8/VP9/AV1 MFT only if HW acceleration " + "is available.")); + } + Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED, + uint32_t(media::MediaDecoderBackend::WMFSoftware)); + } + + mDecoder = decoder; + hr = SetDecoderMediaTypes(); + NS_ENSURE_TRUE( + SUCCEEDED(hr), + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Fail to set the decoder media types."))); + + RefPtr inputType; + hr = mDecoder->GetInputMediaType(inputType); + NS_ENSURE_TRUE( + SUCCEEDED(hr), + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Fail to get the input media type."))); + + RefPtr outputType; + hr = mDecoder->GetOutputMediaType(outputType); + NS_ENSURE_TRUE( + SUCCEEDED(hr), + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Fail to get the output media type."))); + + if (mUseHwAccel && !CanUseDXVA(inputType, outputType)) { + LOG("DXVA manager determined that the input type was unsupported in " + "hardware, retrying init without DXVA."); + mDXVAEnabled = false; + // DXVA initialization with current decoder actually failed, + // re-do initialization. + return InitInternal(); + } + + LOG("Video Decoder initialized, Using DXVA: %s", + (mUseHwAccel ? "Yes" : "No")); + + if (mUseHwAccel) { + hr = mDXVA2Manager->ConfigureForSize( + outputType, + mColorSpace.refOr( + DefaultColorSpace({mImageSize.width, mImageSize.height})), + mColorRange, mVideoInfo.ImageRect().width, + mVideoInfo.ImageRect().height); + NS_ENSURE_TRUE(SUCCEEDED(hr), + MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, + RESULT_DETAIL("Fail to configure image size for " + "DXVA2Manager."))); + } else { + GetDefaultStride(outputType, mVideoInfo.ImageRect().width, &mVideoStride); + } + LOG("WMFVideoMFTManager frame geometry stride=%u picture=(%d, %d, %d, %d) " + "display=(%d,%d)", + mVideoStride, mVideoInfo.ImageRect().x, mVideoInfo.ImageRect().y, + mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height, + mVideoInfo.mDisplay.width, mVideoInfo.mDisplay.height); + + if (!mUseHwAccel) { + RefPtr device = gfx::DeviceManagerDx::Get()->GetImageDevice(); + if (device) { + mIMFUsable = true; + } + } + return MediaResult(NS_OK); +} + +HRESULT +WMFVideoMFTManager::SetDecoderMediaTypes() { + // Setup the input/output media types. + RefPtr inputType; + HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID()); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, + MFVideoInterlace_MixedInterlaceOrProgressive); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, + mVideoInfo.ImageRect().width, + mVideoInfo.ImageRect().height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + UINT32 fpsDenominator = 1000; + UINT32 fpsNumerator = static_cast(mFramerate * fpsDenominator); + if (fpsNumerator > 0) { + hr = MFSetAttributeRatio(inputType, MF_MT_FRAME_RATE, fpsNumerator, + fpsDenominator); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + + RefPtr outputType; + hr = wmf::MFCreateMediaType(getter_AddRefs(outputType)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = MFSetAttributeSize(outputType, MF_MT_FRAME_SIZE, + mVideoInfo.ImageRect().width, + mVideoInfo.ImageRect().height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (fpsNumerator > 0) { + hr = MFSetAttributeRatio(outputType, MF_MT_FRAME_RATE, fpsNumerator, + fpsDenominator); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + + GUID outputSubType = [&]() { + switch (mVideoInfo.mColorDepth) { + case gfx::ColorDepth::COLOR_8: + return mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12; + case gfx::ColorDepth::COLOR_10: + return MFVideoFormat_P010; + case gfx::ColorDepth::COLOR_12: + case gfx::ColorDepth::COLOR_16: + return MFVideoFormat_P016; + default: + MOZ_ASSERT_UNREACHABLE("Unexpected color depth"); + } + }(); + hr = outputType->SetGUID(MF_MT_SUBTYPE, outputSubType); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (mZeroCopyNV12Texture) { + RefPtr attr(mDecoder->GetOutputStreamAttributes()); + if (attr) { + hr = attr->SetUINT32(MF_SA_D3D11_SHARED_WITHOUT_MUTEX, TRUE); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + hr = attr->SetUINT32(MF_SA_D3D11_BINDFLAGS, + D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_DECODER); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + } + + return mDecoder->SetMediaTypes(inputType, outputType); +} + +HRESULT +WMFVideoMFTManager::Input(MediaRawData* aSample) { + if (!mIsValid) { + return E_FAIL; + } + + if (!mDecoder) { + // This can happen during shutdown. + return E_FAIL; + } + + mTrackingId.apply([&](const auto& aId) { + MediaInfoFlag flag = MediaInfoFlag::None; + flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame + : MediaInfoFlag::NonKeyFrame); + flag |= (mUseHwAccel ? MediaInfoFlag::HardwareDecoding + : MediaInfoFlag::SoftwareDecoding); + switch (mStreamType) { + case WMFStreamType::H264: + flag |= MediaInfoFlag::VIDEO_H264; + break; + case WMFStreamType::VP8: + flag |= MediaInfoFlag::VIDEO_VP8; + break; + case WMFStreamType::VP9: + flag |= MediaInfoFlag::VIDEO_VP9; + break; + case WMFStreamType::AV1: + flag |= MediaInfoFlag::VIDEO_AV1; + break; + case WMFStreamType::HEVC: + flag |= MediaInfoFlag::VIDEO_HEVC; + break; + default: + break; + }; + mPerformanceRecorder.Start(aSample->mTime.ToMicroseconds(), + "WMFVideoDecoder"_ns, aId, flag); + }); + + RefPtr inputSample; + HRESULT hr = mDecoder->CreateInputSample( + aSample->Data(), uint32_t(aSample->Size()), + aSample->mTime.ToMicroseconds(), aSample->mDuration.ToMicroseconds(), + &inputSample); + NS_ENSURE_TRUE(SUCCEEDED(hr) && inputSample != nullptr, hr); + + if (!mColorSpace && aSample->mTrackInfo) { + // The colorspace definition is found in the H264 SPS NAL, available out of + // band, while for VP9 it's only available within the VP9 bytestream. + // The info would have been updated by the MediaChangeMonitor. + mColorSpace = aSample->mTrackInfo->GetAsVideoInfo()->mColorSpace; + mColorRange = aSample->mTrackInfo->GetAsVideoInfo()->mColorRange; + } + mLastDuration = aSample->mDuration; + + // Forward sample data to the decoder. + return mDecoder->Input(inputSample); +} + +// The MFTransforms we use for decoding H264 and AV1 video will silently fall +// back to software decoding (even if we've negotiated DXVA) if the GPU +// doesn't support decoding the given codec and resolution. It will then upload +// the software decoded frames into d3d textures to preserve behaviour. +// +// Unfortunately this seems to cause corruption (see bug 1193547) and is +// slow because the upload is done into a non-shareable texture and requires +// us to copy it. +// +// This code tests if the given codec and resolution can be supported directly +// on the GPU, and makes sure we only ask the MFT for DXVA if it can be +// supported properly. +// +// Ideally we'd know the framerate during initialization and would also ensure +// that new decoders are created if the resolution changes. Then we could move +// this check into Init and consolidate the main thread blocking code. +bool WMFVideoMFTManager::CanUseDXVA(IMFMediaType* aInputType, + IMFMediaType* aOutputType) { + MOZ_ASSERT(mDXVA2Manager); + // Check if we're able to use hardware decoding for the current codec config. + return mDXVA2Manager->SupportsConfig(mVideoInfo, aInputType, aOutputType); +} + +TimeUnit WMFVideoMFTManager::GetSampleDurationOrLastKnownDuration( + IMFSample* aSample) const { + TimeUnit duration = GetSampleDuration(aSample); + if (!duration.IsValid()) { + // WMF returned a non-success code (likely duration unknown, but the API + // also allows for other, unspecified codes). + LOG("Got unknown sample duration -- bad return code. Using mLastDuration."); + } else if (duration == TimeUnit::Zero()) { + // Duration is zero. WMF uses this to indicate an unknown duration. + LOG("Got unknown sample duration -- zero duration returned. Using " + "mLastDuration."); + } else if (duration.IsNegative()) { + // A negative duration will cause issues up the stack. It's also unclear + // why this would happen, but the API allows for it by returning a signed + // int, so we handle it here. + LOG("Got negative sample duration: %f seconds. Using mLastDuration " + "instead.", + duration.ToSeconds()); + } else { + // We got a duration without any problems. + return duration; + } + + return mLastDuration; +} + +HRESULT +WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample, + int64_t aStreamOffset, + VideoData** aOutVideoData) { + NS_ENSURE_TRUE(aSample, E_POINTER); + NS_ENSURE_TRUE(aOutVideoData, E_POINTER); + + *aOutVideoData = nullptr; + + HRESULT hr; + RefPtr buffer; + + // Must convert to contiguous buffer to use IMD2DBuffer interface. + hr = aSample->ConvertToContiguousBuffer(getter_AddRefs(buffer)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + // Try and use the IMF2DBuffer interface if available, otherwise fallback + // to the IMFMediaBuffer interface. Apparently IMF2DBuffer is more efficient, + // but only some systems (Windows 8?) support it. + BYTE* data = nullptr; + LONG stride = 0; + RefPtr twoDBuffer; + hr = buffer->QueryInterface( + static_cast(getter_AddRefs(twoDBuffer))); + if (SUCCEEDED(hr)) { + hr = twoDBuffer->Lock2D(&data, &stride); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } else { + hr = buffer->Lock(&data, nullptr, nullptr); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + stride = mVideoStride; + } + + const GUID& subType = mDecoder->GetOutputMediaSubType(); + MOZ_DIAGNOSTIC_ASSERT(subType == MFVideoFormat_YV12 || + subType == MFVideoFormat_P010 || + subType == MFVideoFormat_P016); + const gfx::ColorDepth colorDepth = subType == MFVideoFormat_YV12 + ? gfx::ColorDepth::COLOR_8 + : gfx::ColorDepth::COLOR_16; + + // YV12, planar format (3 planes): [YYYY....][VVVV....][UUUU....] + // i.e., Y, then V, then U. + // P010, P016 planar format (2 planes) [YYYY....][UVUV...] + // See + // https://docs.microsoft.com/en-us/windows/desktop/medfound/10-bit-and-16-bit-yuv-video-formats + VideoData::YCbCrBuffer b; + + const uint32_t videoWidth = mSoftwareImageSize.width; + const uint32_t videoHeight = mSoftwareImageSize.height; + + // Y (Y') plane + b.mPlanes[0].mData = data; + b.mPlanes[0].mStride = stride; + b.mPlanes[0].mHeight = videoHeight; + b.mPlanes[0].mWidth = videoWidth; + b.mPlanes[0].mSkip = 0; + + MOZ_DIAGNOSTIC_ASSERT(mSoftwareImageSize.height % 16 == 0, + "decoded height must be 16 bytes aligned"); + const uint32_t y_size = stride * mSoftwareImageSize.height; + const uint32_t v_size = stride * mSoftwareImageSize.height / 4; + const uint32_t halfStride = (stride + 1) / 2; + const uint32_t halfHeight = (videoHeight + 1) / 2; + const uint32_t halfWidth = (videoWidth + 1) / 2; + + if (subType == MFVideoFormat_YV12) { + // U plane (Cb) + b.mPlanes[1].mData = data + y_size + v_size; + b.mPlanes[1].mStride = halfStride; + b.mPlanes[1].mHeight = halfHeight; + b.mPlanes[1].mWidth = halfWidth; + b.mPlanes[1].mSkip = 0; + + // V plane (Cr) + b.mPlanes[2].mData = data + y_size; + b.mPlanes[2].mStride = halfStride; + b.mPlanes[2].mHeight = halfHeight; + b.mPlanes[2].mWidth = halfWidth; + b.mPlanes[2].mSkip = 0; + } else { + // U plane (Cb) + b.mPlanes[1].mData = data + y_size; + b.mPlanes[1].mStride = stride; + b.mPlanes[1].mHeight = halfHeight; + b.mPlanes[1].mWidth = halfWidth; + b.mPlanes[1].mSkip = 1; + + // V plane (Cr) + b.mPlanes[2].mData = data + y_size + sizeof(short); + b.mPlanes[2].mStride = stride; + b.mPlanes[2].mHeight = halfHeight; + b.mPlanes[2].mWidth = halfWidth; + b.mPlanes[2].mSkip = 1; + } + + b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT; + + // YuvColorSpace + b.mYUVColorSpace = + mColorSpace.refOr(DefaultColorSpace({videoWidth, videoHeight})); + b.mColorDepth = colorDepth; + b.mColorRange = mColorRange; + + TimeUnit pts = GetSampleTime(aSample); + NS_ENSURE_TRUE(pts.IsValid(), E_FAIL); + TimeUnit duration = GetSampleDurationOrLastKnownDuration(aSample); + NS_ENSURE_TRUE(duration.IsValid(), E_FAIL); + gfx::IntRect pictureRegion = mVideoInfo.ScaledImageRect( + mSoftwarePictureSize.width, mSoftwarePictureSize.height); + + if (colorDepth != gfx::ColorDepth::COLOR_8 || !mKnowsCompositor || + !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) { + Result, MediaResult> r = + VideoData::CreateAndCopyData( + mVideoInfo, mImageContainer, aStreamOffset, pts, duration, b, false, + TimeUnit::FromMicroseconds(-1), pictureRegion, mKnowsCompositor); + RefPtr v = r.unwrapOr(nullptr); + if (twoDBuffer) { + twoDBuffer->Unlock2D(); + } else { + buffer->Unlock(); + } + v.forget(aOutVideoData); + return S_OK; + } + + RefPtr image = + new IMFYCbCrImage(buffer, twoDBuffer, mKnowsCompositor, mImageContainer); + + VideoData::SetVideoDataToImage(image, mVideoInfo, b, pictureRegion, false); + + RefPtr v = VideoData::CreateFromImage( + mVideoInfo.mDisplay, aStreamOffset, pts, duration, image.forget(), false, + TimeUnit::FromMicroseconds(-1)); + + mPerformanceRecorder.Record(pts.ToMicroseconds(), [&](DecodeStage& aStage) { + aStage.SetColorDepth(b.mColorDepth); + aStage.SetColorRange(b.mColorRange); + aStage.SetYUVColorSpace(b.mYUVColorSpace); + if (subType == MFVideoFormat_NV12) { + aStage.SetImageFormat(DecodeStage::NV12); + } else if (subType == MFVideoFormat_YV12) { + aStage.SetImageFormat(DecodeStage::YV12); + } else if (subType == MFVideoFormat_P010) { + aStage.SetImageFormat(DecodeStage::P010); + } else if (subType == MFVideoFormat_P016) { + aStage.SetImageFormat(DecodeStage::P016); + } + aStage.SetResolution(videoWidth, videoHeight); + }); + + v.forget(aOutVideoData); + return S_OK; +} + +HRESULT +WMFVideoMFTManager::CreateD3DVideoFrame(IMFSample* aSample, + int64_t aStreamOffset, + VideoData** aOutVideoData) { + NS_ENSURE_TRUE(aSample, E_POINTER); + NS_ENSURE_TRUE(aOutVideoData, E_POINTER); + NS_ENSURE_TRUE(mDXVA2Manager, E_ABORT); + NS_ENSURE_TRUE(mUseHwAccel, E_ABORT); + + *aOutVideoData = nullptr; + HRESULT hr; + + gfx::IntRect pictureRegion = + mVideoInfo.ScaledImageRect(mImageSize.width, mImageSize.height); + RefPtr image; + if (mZeroCopyNV12Texture && mDXVA2Manager->SupportsZeroCopyNV12Texture()) { + hr = mDXVA2Manager->WrapTextureWithImage(aSample, pictureRegion, + getter_AddRefs(image)); + } else { + hr = mDXVA2Manager->CopyToImage(aSample, pictureRegion, + getter_AddRefs(image)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } + NS_ENSURE_TRUE(image, E_FAIL); + + gfx::IntSize size = image->GetSize(); + + TimeUnit pts = GetSampleTime(aSample); + NS_ENSURE_TRUE(pts.IsValid(), E_FAIL); + TimeUnit duration = GetSampleDurationOrLastKnownDuration(aSample); + NS_ENSURE_TRUE(duration.IsValid(), E_FAIL); + RefPtr v = VideoData::CreateFromImage( + mVideoInfo.mDisplay, aStreamOffset, pts, duration, image.forget(), false, + TimeUnit::FromMicroseconds(-1)); + + NS_ENSURE_TRUE(v, E_FAIL); + v.forget(aOutVideoData); + + mPerformanceRecorder.Record(pts.ToMicroseconds(), [&](DecodeStage& aStage) { + aStage.SetColorDepth(mVideoInfo.mColorDepth); + aStage.SetColorRange(mColorRange); + aStage.SetYUVColorSpace(mColorSpace.refOr( + DefaultColorSpace({mImageSize.width, mImageSize.height}))); + const GUID& subType = mDecoder->GetOutputMediaSubType(); + if (subType == MFVideoFormat_NV12) { + aStage.SetImageFormat(DecodeStage::NV12); + } else if (subType == MFVideoFormat_YV12) { + aStage.SetImageFormat(DecodeStage::YV12); + } else if (subType == MFVideoFormat_P010) { + aStage.SetImageFormat(DecodeStage::P010); + } else if (subType == MFVideoFormat_P016) { + aStage.SetImageFormat(DecodeStage::P016); + } + aStage.SetResolution(size.width, size.height); + }); + + return S_OK; +} + +// Blocks until decoded sample is produced by the decoder. +HRESULT +WMFVideoMFTManager::Output(int64_t aStreamOffset, RefPtr& aOutData) { + RefPtr sample; + HRESULT hr; + aOutData = nullptr; + int typeChangeCount = 0; + + // Loop until we decode a sample, or an unexpected error that we can't + // handle occurs. + while (true) { + hr = mDecoder->Output(&sample); + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) { + return MF_E_TRANSFORM_NEED_MORE_INPUT; + } + + if (hr == MF_E_TRANSFORM_STREAM_CHANGE) { + MOZ_ASSERT(!sample); + // Video stream output type change, probably geometric aperture change or + // pixel type. + // We must reconfigure the decoder output type. + + // Attempt to find an appropriate OutputType, trying in order: + // if HW accelerated: NV12, P010, P016 + // if SW: YV12, P010, P016 + if (FAILED( + (hr = (mDecoder->FindDecoderOutputTypeWithSubtype( + mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12)))) && + FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype( + MFVideoFormat_P010))) && + FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype( + MFVideoFormat_P016)))) { + LOG("No suitable output format found"); + return hr; + } + + RefPtr outputType; + hr = mDecoder->GetOutputMediaType(outputType); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + if (mUseHwAccel) { + hr = mDXVA2Manager->ConfigureForSize( + outputType, + mColorSpace.refOr( + DefaultColorSpace({mImageSize.width, mImageSize.height})), + mColorRange, mVideoInfo.ImageRect().width, + mVideoInfo.ImageRect().height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + } else { + // The stride may have changed, recheck for it. + hr = GetDefaultStride(outputType, mVideoInfo.ImageRect().width, + &mVideoStride); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + + UINT32 width = 0, height = 0; + hr = MFGetAttributeSize(outputType, MF_MT_FRAME_SIZE, &width, &height); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL); + NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL); + mSoftwareImageSize = gfx::IntSize(width, height); + + gfx::IntRect picture; + hr = GetPictureRegion(outputType, picture); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + MOZ_ASSERT(picture.width != 0 && picture.height != 0); + mSoftwarePictureSize = gfx::IntSize(picture.width, picture.height); + LOG("Output stream change, image size=[%ux%u], picture=[%u,%u]", + mSoftwareImageSize.width, mSoftwareImageSize.height, + mSoftwarePictureSize.width, mSoftwarePictureSize.height); + } + // Catch infinite loops, but some decoders perform at least 2 stream + // changes on consecutive calls, so be permissive. + // 100 is arbitrarily > 2. + NS_ENSURE_TRUE(typeChangeCount < 100, MF_E_TRANSFORM_STREAM_CHANGE); + // Loop back and try decoding again... + ++typeChangeCount; + continue; + } + + if (SUCCEEDED(hr)) { + if (!sample) { + LOG("Video MFTDecoder returned success but no output!"); + // On some machines/input the MFT returns success but doesn't output + // a video frame. If we detect this, try again, but only up to a + // point; after 250 failures, give up. Note we count all failures + // over the life of the decoder, as we may end up exiting with a + // NEED_MORE_INPUT and coming back to hit the same error. So just + // counting with a local variable (like typeChangeCount does) may + // not work in this situation. + ++mNullOutputCount; + if (mNullOutputCount > 250) { + LOG("Excessive Video MFTDecoder returning success but no output; " + "giving up"); + mGotExcessiveNullOutput = true; + return E_FAIL; + } + continue; + } + TimeUnit pts = GetSampleTime(sample); + TimeUnit duration = GetSampleDurationOrLastKnownDuration(sample); + + // AV1 MFT fix: Sample duration after seeking is always equal to the + // sample time, for some reason. Set it to last duration instead. + if (mStreamType == WMFStreamType::AV1 && duration == pts) { + LOG("Video sample duration (%" PRId64 ") matched timestamp (%" PRId64 + "), setting to previous sample duration (%" PRId64 ") instead.", + pts.ToMicroseconds(), duration.ToMicroseconds(), + mLastDuration.ToMicroseconds()); + duration = mLastDuration; + sample->SetSampleDuration(UsecsToHNs(duration.ToMicroseconds())); + } + + if (!pts.IsValid() || !duration.IsValid()) { + return E_FAIL; + } + if (mSeekTargetThreshold.isSome()) { + if ((pts + duration) < mSeekTargetThreshold.ref()) { + LOG("Dropping video frame which pts (%" PRId64 " + %" PRId64 + ") is smaller than seek target (%" PRId64 ").", + pts.ToMicroseconds(), duration.ToMicroseconds(), + mSeekTargetThreshold->ToMicroseconds()); + // It is necessary to clear the pointer to release the previous output + // buffer. + sample = nullptr; + continue; + } + mSeekTargetThreshold.reset(); + } + break; + } + // Else unexpected error so bail. + NS_WARNING("WMFVideoMFTManager::Output() unexpected error"); + return hr; + } + + RefPtr frame; + if (mUseHwAccel) { + hr = CreateD3DVideoFrame(sample, aStreamOffset, getter_AddRefs(frame)); + } else { + hr = CreateBasicVideoFrame(sample, aStreamOffset, getter_AddRefs(frame)); + } + // Frame should be non null only when we succeeded. + MOZ_ASSERT((frame != nullptr) == SUCCEEDED(hr)); + NS_ENSURE_TRUE(SUCCEEDED(hr), hr); + NS_ENSURE_TRUE(frame, E_FAIL); + + aOutData = frame; + + if (mNullOutputCount) { + mGotValidOutputAfterNullOutput = true; + } + + return S_OK; +} + +void WMFVideoMFTManager::Flush() { + MFTManager::Flush(); + mPerformanceRecorder.Record(std::numeric_limits::max()); +} + +void WMFVideoMFTManager::Shutdown() { + if (mDXVA2Manager) { + mDXVA2Manager->BeforeShutdownVideoMFTDecoder(); + } + mDecoder = nullptr; + mDXVA2Manager.reset(); +} + +bool WMFVideoMFTManager::IsHardwareAccelerated( + nsACString& aFailureReason) const { + aFailureReason = mDXVAFailureReason; + return mDecoder && mUseHwAccel; +} + +nsCString WMFVideoMFTManager::GetDescriptionName() const { + nsCString failureReason; + bool hw = IsHardwareAccelerated(failureReason); + + const char* formatName = [&]() { + if (!mDecoder) { + return "not initialized"; + } + GUID format = mDecoder->GetOutputMediaSubType(); + if (format == MFVideoFormat_NV12) { + if (!gfx::DeviceManagerDx::Get()->CanUseNV12()) { + return "nv12->argb32"; + } + return "nv12"; + } + if (format == MFVideoFormat_P010) { + if (!gfx::DeviceManagerDx::Get()->CanUseP010()) { + return "p010->argb32"; + } + return "p010"; + } + if (format == MFVideoFormat_P016) { + if (!gfx::DeviceManagerDx::Get()->CanUseP016()) { + return "p016->argb32"; + } + return "p016"; + } + if (format == MFVideoFormat_YV12) { + return "yv12"; + } + return "unknown"; + }(); + + const char* dxvaName = [&]() { + if (!mDXVA2Manager) { + return "no DXVA"; + } + return "D3D11"; + }(); + + return nsPrintfCString("wmf %s codec %s video decoder - %s, %s", + StreamTypeToString(mStreamType), + hw ? "hardware" : "software", dxvaName, formatName); +} +nsCString WMFVideoMFTManager::GetCodecName() const { + switch (mStreamType) { + case WMFStreamType::H264: + return "h264"_ns; + case WMFStreamType::VP8: + return "vp8"_ns; + case WMFStreamType::VP9: + return "vp9"_ns; + case WMFStreamType::AV1: + return "av1"_ns; + case WMFStreamType::HEVC: + return "hevc"_ns; + default: + return "unknown"_ns; + }; +} + +} // namespace mozilla diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.h b/dom/media/platforms/wmf/WMFVideoMFTManager.h new file mode 100644 index 0000000000..9a4367d72d --- /dev/null +++ b/dom/media/platforms/wmf/WMFVideoMFTManager.h @@ -0,0 +1,133 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#if !defined(WMFVideoMFTManager_h_) +# define WMFVideoMFTManager_h_ + +# include "MFTDecoder.h" +# include "MediaResult.h" +# include "PerformanceRecorder.h" +# include "WMF.h" +# include "WMFDecoderModule.h" +# include "WMFMediaDataDecoder.h" +# include "mozilla/Atomics.h" +# include "mozilla/RefPtr.h" +# include "mozilla/gfx/Rect.h" + +namespace mozilla { + +class DXVA2Manager; + +class WMFVideoMFTManager : public MFTManager { + public: + WMFVideoMFTManager(const VideoInfo& aConfig, + layers::KnowsCompositor* aKnowsCompositor, + layers::ImageContainer* aImageContainer, float aFramerate, + const CreateDecoderParams::OptionSet& aOptions, + bool aDXVAEnabled, Maybe aTrackingId); + ~WMFVideoMFTManager(); + + MediaResult Init(); + + HRESULT Input(MediaRawData* aSample) override; + + HRESULT Output(int64_t aStreamOffset, RefPtr& aOutput) override; + + void Flush() override; + + void Shutdown() override; + + bool IsHardwareAccelerated(nsACString& aFailureReason) const override; + + TrackInfo::TrackType GetType() override { return TrackInfo::kVideoTrack; } + + nsCString GetDescriptionName() const override; + + nsCString GetCodecName() const override; + + MediaDataDecoder::ConversionRequired NeedsConversion() const override { + return mStreamType == WMFStreamType::H264 || + mStreamType == WMFStreamType::HEVC + ? MediaDataDecoder::ConversionRequired::kNeedAnnexB + : MediaDataDecoder::ConversionRequired::kNeedNone; + } + + private: + MediaResult ValidateVideoInfo(); + + bool InitializeDXVA(); + + MediaResult InitInternal(); + + HRESULT CreateBasicVideoFrame(IMFSample* aSample, int64_t aStreamOffset, + VideoData** aOutVideoData); + + HRESULT CreateD3DVideoFrame(IMFSample* aSample, int64_t aStreamOffset, + VideoData** aOutVideoData); + + HRESULT SetDecoderMediaTypes(); + + bool CanUseDXVA(IMFMediaType* aInputType, IMFMediaType* aOutputType); + + // Gets the duration from aSample, and if an unknown or invalid duration is + // returned from WMF, this instead returns the last known input duration. + // The sample duration is unknown per `IMFSample::GetSampleDuration` docs + // 'If the retrieved duration is zero, or if the method returns + // MF_E_NO_SAMPLE_DURATION, the duration is unknown'. The same API also + // suggests it may return other unspecified error codes, so we handle those + // too. It also returns a signed int, but since a negative duration doesn't + // make sense, we also handle that case. + media::TimeUnit GetSampleDurationOrLastKnownDuration( + IMFSample* aSample) const; + + // Video frame geometry. + const VideoInfo mVideoInfo; + const gfx::IntSize mImageSize; + const WMFStreamType mStreamType; + + // The size we update from the IMFMediaType which might include paddings when + // the stream format changes. This is only used for software decoding. + gfx::IntSize mSoftwareImageSize; + + // The picture size we update from the IMFMediaType when the stream format + // changes. We assume it's equal to the image size by default (no cropping). + // This is only used for software decoding. + gfx::IntSize mSoftwarePictureSize; + + uint32_t mVideoStride; + Maybe mColorSpace; + gfx::ColorRange mColorRange; + + RefPtr mImageContainer; + RefPtr mKnowsCompositor; + UniquePtr mDXVA2Manager; + + media::TimeUnit mLastDuration; + + bool mDXVAEnabled; + bool mUseHwAccel; + + bool mZeroCopyNV12Texture; + + nsCString mDXVAFailureReason; + + const GUID& GetMediaSubtypeGUID(); + + uint32_t mNullOutputCount = 0; + bool mGotValidOutputAfterNullOutput = false; + bool mGotExcessiveNullOutput = false; + bool mIsValid = true; + bool mIMFUsable = false; + const float mFramerate; + const bool mLowLatency; + + PerformanceRecorderMulti mPerformanceRecorder; + const Maybe mTrackingId; +}; + +} // namespace mozilla + +#endif // WMFVideoMFTManager_h_ diff --git a/dom/media/platforms/wmf/gtest/TestCanCreateMFTDecoder.cpp b/dom/media/platforms/wmf/gtest/TestCanCreateMFTDecoder.cpp new file mode 100644 index 0000000000..1746d6d4cc --- /dev/null +++ b/dom/media/platforms/wmf/gtest/TestCanCreateMFTDecoder.cpp @@ -0,0 +1,15 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include + +#include "WMFDecoderModule.h" +#include "mozilla/StaticPrefs_media.h" +#include "mozilla/Preferences.h" + +using namespace mozilla; + +TEST(CanCreateMFTDecoder, NoIPC) +{ EXPECT_TRUE(WMFDecoderModule::CanCreateMFTDecoder(WMFStreamType::H264)); } diff --git a/dom/media/platforms/wmf/gtest/moz.build b/dom/media/platforms/wmf/gtest/moz.build new file mode 100644 index 0000000000..ccd056ecf1 --- /dev/null +++ b/dom/media/platforms/wmf/gtest/moz.build @@ -0,0 +1,15 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +UNIFIED_SOURCES += [ + "TestCanCreateMFTDecoder.cpp", +] + +LOCAL_INCLUDES += [ + "/dom/media/platforms/wmf/gtest", +] + +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/media/platforms/wmf/metrics.yaml b/dom/media/platforms/wmf/metrics.yaml new file mode 100644 index 0000000000..3803016767 --- /dev/null +++ b/dom/media/platforms/wmf/metrics.yaml @@ -0,0 +1,88 @@ +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +# Adding a new metric? We have docs for that! +# https://firefox-source-docs.mozilla.org/toolkit/components/glean/user/new_definitions_file.html + +--- +$schema: moz://mozilla.org/schemas/glean/metrics/2-0-0 +$tags: + - 'Core :: Audio/Video' + +mfcdm: + eme_playback: + type: event + description: + Record the information about the EME playback when using the media engine. + The value of this event is the key system name. + metadata: + tags: + - 'Core :: Audio/Video: Playback' + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1873394 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1873394#c7 + data_sensitivity: + - technical + notification_emails: + - media-alerts@mozilla.com + extra_keys: + key_system: + description: The key system used for the EME playback + type: string + video_codec: + description: The video codec used for EME playback + type: string + resolution: + description: The video resolution used for EME playback + type: string + played_time: + description: The amount of time (in seconds) the EME content has been played (in seconds) + type: quantity + rendered_frames: + description: The amount of video frames has been rendered + type: quantity + dropped_frames: + description: The amount of video frames don't get rendered but dropped + type: quantity + expires: 130 + telemetry_mirror: Mfcdm_EmePlayback_Gecko + error: + type: event + description: + Record the error or crash happened while using the media engine playback. + The value of this event is the name of error. This probe covers both EME + and non-EME playback. + metadata: + tags: + - 'Core :: Audio/Video: Playback' + bugs: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1873394 + data_reviews: + - https://bugzilla.mozilla.org/show_bug.cgi?id=1873394#c7 + data_sensitivity: + - technical + notification_emails: + - media-alerts@mozilla.com + extra_keys: + error_name: + description: The name of the error + type: string + current_state: + description: The state of the external state machine was being used when the error or crash happened + type: string + audio_codec: + description: The audio codec was being used when the error or crash happened + type: string + video_codec: + description: The video codec was being used when the error or crash happened + type: string + resolution: + description: The video resolution was being used when the error or crash happened + type: string + key_system: + description: The key system was being used when the error or crash happened + type: string + expires: 130 + telemetry_mirror: Mfcdm_Error_Gecko diff --git a/dom/media/platforms/wmf/moz.build b/dom/media/platforms/wmf/moz.build new file mode 100644 index 0000000000..9e0f3aa94a --- /dev/null +++ b/dom/media/platforms/wmf/moz.build @@ -0,0 +1,85 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +EXPORTS += [ + "DXVA2Manager.h", + "MFTDecoder.h", + "WMF.h", + "WMFAudioMFTManager.h", + "WMFDataEncoderUtils.h", + "WMFDecoderModule.h", + "WMFEncoderModule.h", + "WMFMediaDataDecoder.h", + "WMFMediaDataEncoder.h", + "WMFUtils.h", + "WMFVideoMFTManager.h", +] + +if CONFIG["MOZ_WMF_MEDIA_ENGINE"]: + EXPORTS += [ + "MFMediaEngineAudioStream.h", + "MFMediaEngineDecoderModule.h", + "MFMediaEngineExtra.h", + "MFMediaEngineStream.h", + "MFMediaEngineVideoStream.h", + "MFMediaSource.h", + ] + UNIFIED_SOURCES += [ + "MFMediaEngineAudioStream.cpp", + "MFMediaEngineDecoderModule.cpp", + "MFMediaEngineExtension.cpp", + "MFMediaEngineNotify.cpp", + "MFMediaEngineStream.cpp", + "MFMediaEngineVideoStream.cpp", + "MFMediaSource.cpp", + ] + +if CONFIG["MOZ_WMF_CDM"]: + EXPORTS += [ + "MFCDMExtra.h", + "MFCDMProxy.h", + "MFCDMSession.h", + "MFContentProtectionManager.h", + "MFPMPHostWrapper.h", + ] + UNIFIED_SOURCES += [ + "MFCDMProxy.cpp", + "MFCDMSession.cpp", + "MFContentProtectionManager.cpp", + "MFPMPHostWrapper.cpp", + ] + +UNIFIED_SOURCES += [ + "DXVA2Manager.cpp", + "MFTDecoder.cpp", + "MFTEncoder.cpp", + "WMFAudioMFTManager.cpp", + "WMFDecoderModule.cpp", + "WMFEncoderModule.cpp", + "WMFMediaDataDecoder.cpp", + "WMFVideoMFTManager.cpp", +] + +SOURCES += [ + "WMFUtils.cpp", +] + +LOCAL_INCLUDES += [ + "../../ipc/", + "/gfx/cairo/cairo/src", + "/media/libyuv/libyuv/include", +] + +TEST_DIRS += [ + "gtest", +] + +include("/ipc/chromium/chromium-config.mozbuild") + +FINAL_LIBRARY = "xul" + +# Add libFuzzer configuration directives +include("/tools/fuzzing/libfuzzer-config.mozbuild") -- cgit v1.2.3