summaryrefslogtreecommitdiffstats
path: root/dom/media/platforms/wmf
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
commit6bf0a5cb5034a7e684dcc3500e841785237ce2dd (patch)
treea68f146d7fa01f0134297619fbe7e33db084e0aa /dom/media/platforms/wmf
parentInitial commit. (diff)
downloadthunderbird-6bf0a5cb5034a7e684dcc3500e841785237ce2dd.tar.xz
thunderbird-6bf0a5cb5034a7e684dcc3500e841785237ce2dd.zip
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--dom/media/platforms/wmf/DXVA2Manager.cpp1512
-rw-r--r--dom/media/platforms/wmf/DXVA2Manager.h94
-rw-r--r--dom/media/platforms/wmf/MFCDMExtra.h307
-rw-r--r--dom/media/platforms/wmf/MFCDMProxy.cpp74
-rw-r--r--dom/media/platforms/wmf/MFCDMProxy.h71
-rw-r--r--dom/media/platforms/wmf/MFCDMSession.cpp314
-rw-r--r--dom/media/platforms/wmf/MFCDMSession.h93
-rw-r--r--dom/media/platforms/wmf/MFContentProtectionManager.cpp164
-rw-r--r--dom/media/platforms/wmf/MFContentProtectionManager.h79
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineAudioStream.cpp137
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineAudioStream.h51
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineDecoderModule.cpp174
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineDecoderModule.h45
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineExtension.cpp88
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineExtension.h49
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineExtra.h715
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineNotify.cpp32
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineNotify.h55
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineStream.cpp596
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineStream.h228
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineVideoStream.cpp372
-rw-r--r--dom/media/platforms/wmf/MFMediaEngineVideoStream.h107
-rw-r--r--dom/media/platforms/wmf/MFMediaSource.cpp605
-rw-r--r--dom/media/platforms/wmf/MFMediaSource.h188
-rw-r--r--dom/media/platforms/wmf/MFPMPHostWrapper.cpp66
-rw-r--r--dom/media/platforms/wmf/MFPMPHostWrapper.h42
-rw-r--r--dom/media/platforms/wmf/MFTDecoder.cpp430
-rw-r--r--dom/media/platforms/wmf/MFTDecoder.h132
-rw-r--r--dom/media/platforms/wmf/MFTEncoder.cpp754
-rw-r--r--dom/media/platforms/wmf/MFTEncoder.h144
-rw-r--r--dom/media/platforms/wmf/WMF.h198
-rw-r--r--dom/media/platforms/wmf/WMFAudioMFTManager.cpp315
-rw-r--r--dom/media/platforms/wmf/WMFAudioMFTManager.h69
-rw-r--r--dom/media/platforms/wmf/WMFDataEncoderUtils.h165
-rw-r--r--dom/media/platforms/wmf/WMFDecoderModule.cpp454
-rw-r--r--dom/media/platforms/wmf/WMFDecoderModule.h58
-rw-r--r--dom/media/platforms/wmf/WMFEncoderModule.cpp43
-rw-r--r--dom/media/platforms/wmf/WMFEncoderModule.h24
-rw-r--r--dom/media/platforms/wmf/WMFMediaDataDecoder.cpp279
-rw-r--r--dom/media/platforms/wmf/WMFMediaDataDecoder.h182
-rw-r--r--dom/media/platforms/wmf/WMFMediaDataEncoder.h337
-rw-r--r--dom/media/platforms/wmf/WMFUtils.cpp632
-rw-r--r--dom/media/platforms/wmf/WMFUtils.h104
-rw-r--r--dom/media/platforms/wmf/WMFVideoMFTManager.cpp1096
-rw-r--r--dom/media/platforms/wmf/WMFVideoMFTManager.h132
-rw-r--r--dom/media/platforms/wmf/gtest/TestCanCreateMFTDecoder.cpp21
-rw-r--r--dom/media/platforms/wmf/gtest/moz.build15
-rw-r--r--dom/media/platforms/wmf/moz.build85
48 files changed, 11927 insertions, 0 deletions
diff --git a/dom/media/platforms/wmf/DXVA2Manager.cpp b/dom/media/platforms/wmf/DXVA2Manager.cpp
new file mode 100644
index 0000000000..f080a16779
--- /dev/null
+++ b/dom/media/platforms/wmf/DXVA2Manager.cpp
@@ -0,0 +1,1512 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifdef MOZ_AV1
+# include "AOMDecoder.h"
+#endif
+#include "DXVA2Manager.h"
+#include <d3d11.h>
+#include "D3D9SurfaceImage.h"
+#include "DriverCrashGuard.h"
+#include "GfxDriverInfo.h"
+#include "ImageContainer.h"
+#include "MFTDecoder.h"
+#include "MediaTelemetryConstants.h"
+#include "PerformanceRecorder.h"
+#include "VideoUtils.h"
+#include "VPXDecoder.h"
+#include "WMFUtils.h"
+#include "gfxCrashReporterUtils.h"
+#include "gfxWindowsPlatform.h"
+#include "mfapi.h"
+#include "mozilla/StaticMutex.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/Telemetry.h"
+#include "mozilla/gfx/DeviceManagerDx.h"
+#include "mozilla/layers/D3D11ShareHandleImage.h"
+#include "mozilla/layers/D3D11TextureIMFSampleImage.h"
+#include "mozilla/layers/ImageBridgeChild.h"
+#include "mozilla/layers/TextureD3D11.h"
+#include "mozilla/layers/TextureForwarder.h"
+#include "mozilla/mscom/EnsureMTA.h"
+#include "nsPrintfCString.h"
+#include "nsThreadUtils.h"
+
+const GUID MF_XVP_PLAYBACK_MODE = {
+ 0x3c5d293f,
+ 0xad67,
+ 0x4e29,
+ {0xaf, 0x12, 0xcf, 0x3e, 0x23, 0x8a, 0xcc, 0xe9}};
+
+DEFINE_GUID(MF_LOW_LATENCY, 0x9c27891a, 0xed7a, 0x40e1, 0x88, 0xe8, 0xb2, 0x27,
+ 0x27, 0xa0, 0x24, 0xee);
+
+// R600, R700, Evergreen and Cayman AMD cards. These support DXVA via UVD3 or
+// earlier, and don't handle 1080p60 well.
+static const DWORD sAMDPreUVD4[] = {
+ // clang-format off
+ 0x9400, 0x9401, 0x9402, 0x9403, 0x9405, 0x940a, 0x940b, 0x940f, 0x94c0, 0x94c1, 0x94c3, 0x94c4, 0x94c5,
+ 0x94c6, 0x94c7, 0x94c8, 0x94c9, 0x94cb, 0x94cc, 0x94cd, 0x9580, 0x9581, 0x9583, 0x9586, 0x9587, 0x9588,
+ 0x9589, 0x958a, 0x958b, 0x958c, 0x958d, 0x958e, 0x958f, 0x9500, 0x9501, 0x9504, 0x9505, 0x9506, 0x9507,
+ 0x9508, 0x9509, 0x950f, 0x9511, 0x9515, 0x9517, 0x9519, 0x95c0, 0x95c2, 0x95c4, 0x95c5, 0x95c6, 0x95c7,
+ 0x95c9, 0x95cc, 0x95cd, 0x95ce, 0x95cf, 0x9590, 0x9591, 0x9593, 0x9595, 0x9596, 0x9597, 0x9598, 0x9599,
+ 0x959b, 0x9610, 0x9611, 0x9612, 0x9613, 0x9614, 0x9615, 0x9616, 0x9710, 0x9711, 0x9712, 0x9713, 0x9714,
+ 0x9715, 0x9440, 0x9441, 0x9442, 0x9443, 0x9444, 0x9446, 0x944a, 0x944b, 0x944c, 0x944e, 0x9450, 0x9452,
+ 0x9456, 0x945a, 0x945b, 0x945e, 0x9460, 0x9462, 0x946a, 0x946b, 0x947a, 0x947b, 0x9480, 0x9487, 0x9488,
+ 0x9489, 0x948a, 0x948f, 0x9490, 0x9491, 0x9495, 0x9498, 0x949c, 0x949e, 0x949f, 0x9540, 0x9541, 0x9542,
+ 0x954e, 0x954f, 0x9552, 0x9553, 0x9555, 0x9557, 0x955f, 0x94a0, 0x94a1, 0x94a3, 0x94b1, 0x94b3, 0x94b4,
+ 0x94b5, 0x94b9, 0x68e0, 0x68e1, 0x68e4, 0x68e5, 0x68e8, 0x68e9, 0x68f1, 0x68f2, 0x68f8, 0x68f9, 0x68fa,
+ 0x68fe, 0x68c0, 0x68c1, 0x68c7, 0x68c8, 0x68c9, 0x68d8, 0x68d9, 0x68da, 0x68de, 0x68a0, 0x68a1, 0x68a8,
+ 0x68a9, 0x68b0, 0x68b8, 0x68b9, 0x68ba, 0x68be, 0x68bf, 0x6880, 0x6888, 0x6889, 0x688a, 0x688c, 0x688d,
+ 0x6898, 0x6899, 0x689b, 0x689e, 0x689c, 0x689d, 0x9802, 0x9803, 0x9804, 0x9805, 0x9806, 0x9807, 0x9808,
+ 0x9809, 0x980a, 0x9640, 0x9641, 0x9647, 0x9648, 0x964a, 0x964b, 0x964c, 0x964e, 0x964f, 0x9642, 0x9643,
+ 0x9644, 0x9645, 0x9649, 0x6720, 0x6721, 0x6722, 0x6723, 0x6724, 0x6725, 0x6726, 0x6727, 0x6728, 0x6729,
+ 0x6738, 0x6739, 0x673e, 0x6740, 0x6741, 0x6742, 0x6743, 0x6744, 0x6745, 0x6746, 0x6747, 0x6748, 0x6749,
+ 0x674a, 0x6750, 0x6751, 0x6758, 0x6759, 0x675b, 0x675d, 0x675f, 0x6840, 0x6841, 0x6842, 0x6843, 0x6849,
+ 0x6850, 0x6858, 0x6859, 0x6760, 0x6761, 0x6762, 0x6763, 0x6764, 0x6765, 0x6766, 0x6767, 0x6768, 0x6770,
+ 0x6771, 0x6772, 0x6778, 0x6779, 0x677b, 0x6700, 0x6701, 0x6702, 0x6703, 0x6704, 0x6705, 0x6706, 0x6707,
+ 0x6708, 0x6709, 0x6718, 0x6719, 0x671c, 0x671d, 0x671f, 0x9900, 0x9901, 0x9903, 0x9904, 0x9905, 0x9906,
+ 0x9907, 0x9908, 0x9909, 0x990a, 0x990b, 0x990c, 0x990d, 0x990e, 0x990f, 0x9910, 0x9913, 0x9917, 0x9918,
+ 0x9919, 0x9990, 0x9991, 0x9992, 0x9993, 0x9994, 0x9995, 0x9996, 0x9997, 0x9998, 0x9999, 0x999a, 0x999b,
+ 0x999c, 0x999d, 0x99a0, 0x99a2, 0x99a4
+ // clang-format on
+};
+
+// List of NVidia Telsa GPU known to have broken NV12 rendering.
+static const DWORD sNVIDIABrokenNV12[] = {
+ // clang-format off
+ 0x0191, 0x0193, 0x0194, 0x0197, 0x019d, 0x019e, // G80
+ 0x0400, 0x0401, 0x0402, 0x0403, 0x0404, 0x0405, 0x0406, 0x0407, 0x0408, 0x0409, // G84
+ 0x040a, 0x040b, 0x040c, 0x040d, 0x040e, 0x040f,
+ 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, // G86
+ 0x042a, 0x042b, 0x042c, 0x042d, 0x042e, 0x042f,
+ 0x0410, 0x0600, 0x0601, 0x0602, 0x0603, 0x0604, 0x0605, 0x0606, 0x0607, 0x0608, // G92
+ 0x0609, 0x060a, 0x060b, 0x060c, 0x060f, 0x0610, 0x0611, 0x0612, 0x0613, 0x0614,
+ 0x0615, 0x0617, 0x0618, 0x0619, 0x061a, 0x061b, 0x061c, 0x061d, 0x061e, 0x061f, // G94
+ 0x0621, 0x0622, 0x0623, 0x0625, 0x0626, 0x0627, 0x0628, 0x062a, 0x062b, 0x062c,
+ 0x062d, 0x062e, 0x0631, 0x0635, 0x0637, 0x0638, 0x063a,
+ 0x0640, 0x0641, 0x0643, 0x0644, 0x0645, 0x0646, 0x0647, 0x0648, 0x0649, 0x064a, // G96
+ 0x064b, 0x064c, 0x0651, 0x0652, 0x0653, 0x0654, 0x0655, 0x0656, 0x0658, 0x0659,
+ 0x065a, 0x065b, 0x065c, 0x065f,
+ 0x06e0, 0x06e1, 0x06e2, 0x06e3, 0x06e4, 0x06e6, 0x06e7, 0x06e8, 0x06e9, 0x06ea, // G98
+ 0x06eb, 0x06ec, 0x06ef, 0x06f1, 0x06f8, 0x06f9, 0x06fa, 0x06fb, 0x06fd, 0x06ff,
+ 0x05e0, 0x05e1, 0x05e2, 0x05e3, 0x05e6, 0x05e7, 0x05e9, 0x05ea, 0x05eb, 0x05ed, // G200
+ 0x05ee, 0x05ef,
+ 0x0840, 0x0844, 0x0845, 0x0846, 0x0847, 0x0848, 0x0849, 0x084a, 0x084b, 0x084c, // MCP77
+ 0x084d, 0x084f,
+ 0x0860, 0x0861, 0x0862, 0x0863, 0x0864, 0x0865, 0x0866, 0x0867, 0x0868, 0x0869, // MCP79
+ 0x086a, 0x086c, 0x086d, 0x086e, 0x086f, 0x0870, 0x0871, 0x0872, 0x0873, 0x0874,
+ 0x0876, 0x087a, 0x087d, 0x087e, 0x087f,
+ 0x0ca0, 0x0ca2, 0x0ca3, 0x0ca2, 0x0ca4, 0x0ca5, 0x0ca7, 0x0ca9, 0x0cac, 0x0caf, // GT215
+ 0x0cb0, 0x0cb1, 0x0cbc,
+ 0x0a20, 0x0a22, 0x0a23, 0x0a26, 0x0a27, 0x0a28, 0x0a29, 0x0a2a, 0x0a2b, 0x0a2c, // GT216
+ 0x0a2d, 0x0a32, 0x0a34, 0x0a35, 0x0a38, 0x0a3c,
+ 0x0a60, 0x0a62, 0x0a63, 0x0a64, 0x0a65, 0x0a66, 0x0a67, 0x0a68, 0x0a69, 0x0a6a, // GT218
+ 0x0a6c, 0x0a6e, 0x0a6f, 0x0a70, 0x0a71, 0x0a72, 0x0a73, 0x0a74, 0x0a75, 0x0a76,
+ 0x0a78, 0x0a7a, 0x0a7c, 0x10c0, 0x10c3, 0x10c5, 0x10d8
+ // clang-format on
+};
+
+// The size we use for our synchronization surface.
+// 16x16 is the size recommended by Microsoft (in the D3D9ExDXGISharedSurf
+// sample) that works best to avoid driver bugs.
+static const uint32_t kSyncSurfaceSize = 16;
+
+namespace mozilla {
+
+using layers::D3D11RecycleAllocator;
+using layers::D3D11ShareHandleImage;
+using layers::D3D9RecycleAllocator;
+using layers::D3D9SurfaceImage;
+using layers::Image;
+using layers::ImageContainer;
+using namespace layers;
+using namespace gfx;
+
+class D3D9DXVA2Manager : public DXVA2Manager {
+ public:
+ D3D9DXVA2Manager();
+ virtual ~D3D9DXVA2Manager();
+
+ HRESULT Init(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason);
+
+ IUnknown* GetDXVADeviceManager() override;
+
+ // Copies a region (aRegion) of the video frame stored in aVideoSample
+ // into an image which is returned by aOutImage.
+ HRESULT CopyToImage(IMFSample* aVideoSample, const gfx::IntRect& aRegion,
+ Image** aOutImage) override;
+
+ bool SupportsConfig(const VideoInfo& aInfo, IMFMediaType* aInputType,
+ IMFMediaType* aOutputType) override;
+
+ private:
+ bool CanCreateDecoder(const DXVA2_VideoDesc& aDesc) const;
+
+ already_AddRefed<IDirectXVideoDecoder> CreateDecoder(
+ const DXVA2_VideoDesc& aDesc) const;
+
+ RefPtr<IDirect3D9Ex> mD3D9;
+ RefPtr<IDirect3DDevice9Ex> mDevice;
+ RefPtr<IDirect3DDeviceManager9> mDeviceManager;
+ RefPtr<D3D9RecycleAllocator> mTextureClientAllocator;
+ RefPtr<IDirectXVideoDecoderService> mDecoderService;
+ RefPtr<IDirect3DSurface9> mSyncSurface;
+ RefPtr<IDirectXVideoDecoder> mDecoder;
+ GUID mDecoderGUID;
+ UINT32 mResetToken = 0;
+};
+
+void GetDXVA2ExtendedFormatFromMFMediaType(IMFMediaType* pType,
+ DXVA2_ExtendedFormat* pFormat) {
+ // Get the interlace mode.
+ MFVideoInterlaceMode interlace = MFVideoInterlaceMode(MFGetAttributeUINT32(
+ pType, MF_MT_INTERLACE_MODE, MFVideoInterlace_Unknown));
+
+ if (interlace == MFVideoInterlace_MixedInterlaceOrProgressive) {
+ pFormat->SampleFormat = DXVA2_SampleFieldInterleavedEvenFirst;
+ } else {
+ pFormat->SampleFormat = UINT(interlace);
+ }
+
+ pFormat->VideoChromaSubsampling = MFGetAttributeUINT32(
+ pType, MF_MT_VIDEO_CHROMA_SITING, MFVideoChromaSubsampling_Unknown);
+ pFormat->NominalRange = MFGetAttributeUINT32(pType, MF_MT_VIDEO_NOMINAL_RANGE,
+ MFNominalRange_Unknown);
+ pFormat->VideoTransferMatrix = MFGetAttributeUINT32(
+ pType, MF_MT_YUV_MATRIX, MFVideoTransferMatrix_Unknown);
+ pFormat->VideoLighting = MFGetAttributeUINT32(pType, MF_MT_VIDEO_LIGHTING,
+ MFVideoLighting_Unknown);
+ pFormat->VideoPrimaries = MFGetAttributeUINT32(pType, MF_MT_VIDEO_PRIMARIES,
+ MFVideoPrimaries_Unknown);
+ pFormat->VideoTransferFunction = MFGetAttributeUINT32(
+ pType, MF_MT_TRANSFER_FUNCTION, MFVideoTransFunc_Unknown);
+}
+
+HRESULT ConvertMFTypeToDXVAType(IMFMediaType* pType, DXVA2_VideoDesc* pDesc) {
+ ZeroMemory(pDesc, sizeof(*pDesc));
+
+ // The D3D format is the first DWORD of the subtype GUID.
+ GUID subtype = GUID_NULL;
+ HRESULT hr = pType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ pDesc->Format = (D3DFORMAT)subtype.Data1;
+
+ UINT32 width = 0;
+ UINT32 height = 0;
+ hr = MFGetAttributeSize(pType, MF_MT_FRAME_SIZE, &width, &height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL);
+ NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL);
+ pDesc->SampleWidth = width;
+ pDesc->SampleHeight = height;
+
+ UINT32 fpsNumerator = 0;
+ UINT32 fpsDenominator = 0;
+ if (SUCCEEDED(MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, &fpsNumerator,
+ &fpsDenominator))) {
+ pDesc->InputSampleFreq.Numerator = fpsNumerator;
+ pDesc->InputSampleFreq.Denominator = fpsDenominator;
+
+ GetDXVA2ExtendedFormatFromMFMediaType(pType, &pDesc->SampleFormat);
+ pDesc->OutputFrameFreq = pDesc->InputSampleFreq;
+ if ((pDesc->SampleFormat.SampleFormat ==
+ DXVA2_SampleFieldInterleavedEvenFirst) ||
+ (pDesc->SampleFormat.SampleFormat ==
+ DXVA2_SampleFieldInterleavedOddFirst)) {
+ pDesc->OutputFrameFreq.Numerator *= 2;
+ }
+ }
+
+ return S_OK;
+}
+
+// All GUIDs other than Intel ClearVideo can be found here:
+// https://docs.microsoft.com/en-us/windows/win32/medfound/direct3d-12-video-guids
+// VLD = Variable-length decoder, FGT = Film grain technology
+static const GUID DXVA2_ModeH264_VLD_NoFGT = {
+ 0x1b81be68,
+ 0xa0c7,
+ 0x11d3,
+ {0xb9, 0x84, 0x00, 0xc0, 0x4f, 0x2e, 0x73, 0xc5}};
+
+// Also known as DXVADDI_Intel_ModeH264_E here:
+// https://www.intel.com/content/dam/develop/external/us/en/documents/h264-avc-x4500-acceration-esardell-157713.pdf
+// Named based on the fact that this is only supported on older ClearVideo
+// Intel decoding hardware.
+static const GUID DXVA2_Intel_ClearVideo_ModeH264_VLD_NoFGT = {
+ 0x604F8E68,
+ 0x4951,
+ 0x4c54,
+ {0x88, 0xFE, 0xAB, 0xD2, 0x5C, 0x15, 0xB3, 0xD6}};
+
+// VP8 profiles
+static const GUID DXVA2_ModeVP8_VLD = {
+ 0x90b899ea,
+ 0x3a62,
+ 0x4705,
+ {0x88, 0xb3, 0x8d, 0xf0, 0x4b, 0x27, 0x44, 0xe7}};
+
+// VP9 profiles
+static const GUID DXVA2_ModeVP9_VLD_Profile0 = {
+ 0x463707f8,
+ 0xa1d0,
+ 0x4585,
+ {0x87, 0x6d, 0x83, 0xaa, 0x6d, 0x60, 0xb8, 0x9e}};
+
+static const GUID DXVA2_ModeVP9_VLD_10bit_Profile2 = {
+ 0xa4c749ef,
+ 0x6ecf,
+ 0x48aa,
+ {0x84, 0x48, 0x50, 0xa7, 0xa1, 0x16, 0x5f, 0xf7}};
+
+// AV1 profiles
+static const GUID DXVA2_ModeAV1_VLD_Profile0 = {
+ 0xb8be4ccb,
+ 0xcf53,
+ 0x46ba,
+ {0x8d, 0x59, 0xd6, 0xb8, 0xa6, 0xda, 0x5d, 0x2a}};
+
+static const GUID DXVA2_ModeAV1_VLD_Profile1 = {
+ 0x6936ff0f,
+ 0x45b1,
+ 0x4163,
+ {0x9c, 0xc1, 0x64, 0x6e, 0xf6, 0x94, 0x61, 0x08}};
+
+static const GUID DXVA2_ModeAV1_VLD_Profile2 = {
+ 0x0c5f2aa1,
+ 0xe541,
+ 0x4089,
+ {0xbb, 0x7b, 0x98, 0x11, 0x0a, 0x19, 0xd7, 0xc8}};
+
+static const GUID DXVA2_ModeAV1_VLD_12bit_Profile2 = {
+ 0x17127009,
+ 0xa00f,
+ 0x4ce1,
+ {0x99, 0x4e, 0xbf, 0x40, 0x81, 0xf6, 0xf3, 0xf0}};
+
+static const GUID DXVA2_ModeAV1_VLD_12bit_Profile2_420 = {
+ 0x2d80bed6,
+ 0x9cac,
+ 0x4835,
+ {0x9e, 0x91, 0x32, 0x7b, 0xbc, 0x4f, 0x9e, 0xe8}};
+
+// This tests if a DXVA video decoder can be created for the given media
+// type/resolution. It uses the same decoder device (DXVA2_ModeH264_E -
+// DXVA2_ModeH264_VLD_NoFGT) as the H264 decoder MFT provided by windows
+// (CLSID_CMSH264DecoderMFT) uses, so we can use it to determine if the MFT will
+// use software fallback or not.
+bool D3D9DXVA2Manager::SupportsConfig(const VideoInfo& aInfo,
+ IMFMediaType* aInputType,
+ IMFMediaType* aOutputType) {
+ GUID inputSubtype;
+ HRESULT hr = aInputType->GetGUID(MF_MT_SUBTYPE, &inputSubtype);
+ if (FAILED(hr) || inputSubtype != MFVideoFormat_H264) {
+ return false;
+ }
+
+ DXVA2_VideoDesc desc;
+ hr = ConvertMFTypeToDXVAType(aInputType, &desc);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+ return CanCreateDecoder(desc);
+}
+
+D3D9DXVA2Manager::D3D9DXVA2Manager() { MOZ_COUNT_CTOR(D3D9DXVA2Manager); }
+
+D3D9DXVA2Manager::~D3D9DXVA2Manager() { MOZ_COUNT_DTOR(D3D9DXVA2Manager); }
+
+IUnknown* D3D9DXVA2Manager::GetDXVADeviceManager() {
+ MutexAutoLock lock(mLock);
+ return mDeviceManager;
+}
+
+HRESULT
+D3D9DXVA2Manager::Init(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason) {
+ ScopedGfxFeatureReporter reporter("DXVA2D3D9");
+
+ // Create D3D9Ex.
+ HMODULE d3d9lib = LoadLibraryW(L"d3d9.dll");
+ NS_ENSURE_TRUE(d3d9lib, E_FAIL);
+ decltype(Direct3DCreate9Ex)* d3d9Create =
+ (decltype(Direct3DCreate9Ex)*)GetProcAddress(d3d9lib,
+ "Direct3DCreate9Ex");
+ if (!d3d9Create) {
+ NS_WARNING("Couldn't find Direct3DCreate9Ex symbol in d3d9.dll");
+ aFailureReason.AssignLiteral(
+ "Couldn't find Direct3DCreate9Ex symbol in d3d9.dll");
+ return E_FAIL;
+ }
+ RefPtr<IDirect3D9Ex> d3d9Ex;
+ HRESULT hr = d3d9Create(D3D_SDK_VERSION, getter_AddRefs(d3d9Ex));
+ if (!d3d9Ex) {
+ NS_WARNING("Direct3DCreate9 failed");
+ aFailureReason.AssignLiteral("Direct3DCreate9 failed");
+ return E_FAIL;
+ }
+
+ // Ensure we can do the YCbCr->RGB conversion in StretchRect.
+ // Fail if we can't.
+ hr = d3d9Ex->CheckDeviceFormatConversion(
+ D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL,
+ (D3DFORMAT)MAKEFOURCC('N', 'V', '1', '2'), D3DFMT_X8R8G8B8);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "CheckDeviceFormatConversion failed with error %lX", hr);
+ return hr;
+ }
+
+ // Create D3D9DeviceEx. We pass null HWNDs here even though the documentation
+ // suggests that one of them should not be. At this point in time Chromium
+ // does the same thing for video acceleration.
+ D3DPRESENT_PARAMETERS params = {0};
+ params.BackBufferWidth = 1;
+ params.BackBufferHeight = 1;
+ params.BackBufferFormat = D3DFMT_A8R8G8B8;
+ params.BackBufferCount = 1;
+ params.SwapEffect = D3DSWAPEFFECT_DISCARD;
+ params.hDeviceWindow = nullptr;
+ params.Windowed = TRUE;
+ params.Flags = D3DPRESENTFLAG_VIDEO;
+
+ RefPtr<IDirect3DDevice9Ex> device;
+ hr = d3d9Ex->CreateDeviceEx(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, nullptr,
+ D3DCREATE_FPU_PRESERVE | D3DCREATE_MULTITHREADED |
+ D3DCREATE_MIXED_VERTEXPROCESSING,
+ &params, nullptr, getter_AddRefs(device));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason =
+ nsPrintfCString("CreateDeviceEx failed with error %lX", hr);
+ return hr;
+ }
+
+ // Ensure we can create queries to synchronize operations between devices.
+ // Without this, when we make a copy of the frame in order to share it with
+ // another device, we can't be sure that the copy has finished before the
+ // other device starts using it.
+ RefPtr<IDirect3DQuery9> query;
+
+ hr = device->CreateQuery(D3DQUERYTYPE_EVENT, getter_AddRefs(query));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString("CreateQuery failed with error %lX", hr);
+ return hr;
+ }
+
+ // Create and initialize IDirect3DDeviceManager9.
+ UINT resetToken = 0;
+ RefPtr<IDirect3DDeviceManager9> deviceManager;
+
+ hr = wmf::DXVA2CreateDirect3DDeviceManager9(&resetToken,
+ getter_AddRefs(deviceManager));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "DXVA2CreateDirect3DDeviceManager9 failed with error %lX", hr);
+ return hr;
+ }
+ hr = deviceManager->ResetDevice(device, resetToken);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "IDirect3DDeviceManager9::ResetDevice failed with error %lX", hr);
+ return hr;
+ }
+
+ HANDLE deviceHandle;
+ RefPtr<IDirectXVideoDecoderService> decoderService;
+ hr = deviceManager->OpenDeviceHandle(&deviceHandle);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "IDirect3DDeviceManager9::OpenDeviceHandle failed with error %lX", hr);
+ return hr;
+ }
+
+ hr = deviceManager->GetVideoService(
+ deviceHandle, IID_PPV_ARGS(decoderService.StartAssignment()));
+ deviceManager->CloseDeviceHandle(deviceHandle);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "IDirectXVideoDecoderServer::GetVideoService failed with error %lX",
+ hr);
+ return hr;
+ }
+
+ UINT deviceCount;
+ GUID* decoderDevices = nullptr;
+ hr = decoderService->GetDecoderDeviceGuids(&deviceCount, &decoderDevices);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "IDirectXVideoDecoderServer::GetDecoderDeviceGuids failed with error "
+ "%lX",
+ hr);
+ return hr;
+ }
+
+ bool found = false;
+ for (UINT i = 0; i < deviceCount; i++) {
+ if (decoderDevices[i] == DXVA2_ModeH264_VLD_NoFGT ||
+ decoderDevices[i] == DXVA2_Intel_ClearVideo_ModeH264_VLD_NoFGT) {
+ mDecoderGUID = decoderDevices[i];
+ found = true;
+ break;
+ }
+ }
+ CoTaskMemFree(decoderDevices);
+
+ if (!found) {
+ aFailureReason.AssignLiteral("Failed to find an appropriate decoder GUID");
+ return E_FAIL;
+ }
+
+ D3DADAPTER_IDENTIFIER9 adapter;
+ hr = d3d9Ex->GetAdapterIdentifier(D3DADAPTER_DEFAULT, 0, &adapter);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "IDirect3D9Ex::GetAdapterIdentifier failed with error %lX", hr);
+ return hr;
+ }
+
+ if ((adapter.VendorId == 0x1022 || adapter.VendorId == 0x1002) &&
+ !StaticPrefs::media_wmf_skip_blacklist()) {
+ for (const auto& model : sAMDPreUVD4) {
+ if (adapter.DeviceId == model) {
+ mIsAMDPreUVD4 = true;
+ break;
+ }
+ }
+ if (StaticPrefs::media_wmf_dxva_d3d9_amd_pre_uvd4_disabled() &&
+ mIsAMDPreUVD4) {
+ aFailureReason.AssignLiteral(
+ "D3D9DXVA2Manager is disabled on AMDPreUVD4");
+ return E_FAIL;
+ }
+ }
+
+ RefPtr<IDirect3DSurface9> syncSurf;
+ hr = device->CreateRenderTarget(kSyncSurfaceSize, kSyncSurfaceSize,
+ D3DFMT_X8R8G8B8, D3DMULTISAMPLE_NONE, 0, TRUE,
+ getter_AddRefs(syncSurf), NULL);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mDecoderService = decoderService;
+
+ mResetToken = resetToken;
+ mD3D9 = d3d9Ex;
+ mDevice = device;
+ mDeviceManager = deviceManager;
+ mSyncSurface = syncSurf;
+
+ if (layers::ImageBridgeChild::GetSingleton()) {
+ // There's no proper KnowsCompositor for ImageBridge currently (and it
+ // implements the interface), so just use that if it's available.
+ mTextureClientAllocator = new D3D9RecycleAllocator(
+ layers::ImageBridgeChild::GetSingleton().get(), mDevice);
+ } else {
+ mTextureClientAllocator =
+ new D3D9RecycleAllocator(aKnowsCompositor, mDevice);
+ }
+ mTextureClientAllocator->SetMaxPoolSize(5);
+
+ Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED,
+ uint32_t(media::MediaDecoderBackend::WMFDXVA2D3D9));
+
+ reporter.SetSuccessful();
+
+ return S_OK;
+}
+
+HRESULT
+D3D9DXVA2Manager::CopyToImage(IMFSample* aSample, const gfx::IntRect& aRegion,
+ Image** aOutImage) {
+ RefPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = aSample->GetBufferByIndex(0, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IDirect3DSurface9> surface;
+ hr = wmf::MFGetService(buffer, MR_BUFFER_SERVICE, IID_IDirect3DSurface9,
+ getter_AddRefs(surface));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<D3D9SurfaceImage> image = new D3D9SurfaceImage();
+ hr = image->AllocateAndCopy(mTextureClientAllocator, surface, aRegion);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IDirect3DSurface9> sourceSurf = image->GetD3D9Surface();
+
+ // Copy a small rect into our sync surface, and then map it
+ // to block until decoding/color conversion completes.
+ RECT copyRect = {0, 0, kSyncSurfaceSize, kSyncSurfaceSize};
+ hr = mDevice->StretchRect(sourceSurf, &copyRect, mSyncSurface, &copyRect,
+ D3DTEXF_NONE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ D3DLOCKED_RECT lockedRect;
+ hr = mSyncSurface->LockRect(&lockedRect, NULL, D3DLOCK_READONLY);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mSyncSurface->UnlockRect();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ image.forget(aOutImage);
+ return S_OK;
+}
+
+// Count of the number of DXVAManager's we've created. This is also the
+// number of videos we're decoding with DXVA. Use on main thread only.
+static Atomic<uint32_t> sDXVAVideosCount(0);
+
+/* static */
+DXVA2Manager* DXVA2Manager::CreateD3D9DXVA(
+ layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason) {
+ HRESULT hr;
+
+ // DXVA processing takes up a lot of GPU resources, so limit the number of
+ // videos we use DXVA with at any one time.
+ uint32_t dxvaLimit = StaticPrefs::media_wmf_dxva_max_videos();
+
+ if (sDXVAVideosCount == dxvaLimit) {
+ aFailureReason.AssignLiteral("Too many DXVA videos playing");
+ return nullptr;
+ }
+
+ UniquePtr<D3D9DXVA2Manager> d3d9Manager(new D3D9DXVA2Manager());
+ hr = d3d9Manager->Init(aKnowsCompositor, aFailureReason);
+ if (SUCCEEDED(hr)) {
+ return d3d9Manager.release();
+ }
+
+ // No hardware accelerated video decoding. :(
+ return nullptr;
+}
+
+bool D3D9DXVA2Manager::CanCreateDecoder(const DXVA2_VideoDesc& aDesc) const {
+ float framerate = static_cast<float>(aDesc.OutputFrameFreq.Numerator) /
+ aDesc.OutputFrameFreq.Denominator;
+ if (IsUnsupportedResolution(aDesc.SampleWidth, aDesc.SampleHeight,
+ framerate)) {
+ return false;
+ }
+ RefPtr<IDirectXVideoDecoder> decoder = CreateDecoder(aDesc);
+ return decoder.get() != nullptr;
+}
+
+already_AddRefed<IDirectXVideoDecoder> D3D9DXVA2Manager::CreateDecoder(
+ const DXVA2_VideoDesc& aDesc) const {
+ UINT configCount;
+ DXVA2_ConfigPictureDecode* configs = nullptr;
+ HRESULT hr = mDecoderService->GetDecoderConfigurations(
+ mDecoderGUID, &aDesc, nullptr, &configCount, &configs);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ RefPtr<IDirect3DSurface9> surface;
+ hr = mDecoderService->CreateSurface(
+ aDesc.SampleWidth, aDesc.SampleHeight, 0,
+ (D3DFORMAT)MAKEFOURCC('N', 'V', '1', '2'), D3DPOOL_DEFAULT, 0,
+ DXVA2_VideoDecoderRenderTarget, surface.StartAssignment(), NULL);
+ if (!SUCCEEDED(hr)) {
+ CoTaskMemFree(configs);
+ return nullptr;
+ }
+
+ for (UINT i = 0; i < configCount; i++) {
+ RefPtr<IDirectXVideoDecoder> decoder;
+ IDirect3DSurface9* surfaces = surface;
+ hr = mDecoderService->CreateVideoDecoder(mDecoderGUID, &aDesc, &configs[i],
+ &surfaces, 1,
+ decoder.StartAssignment());
+ if (FAILED(hr)) {
+ continue;
+ }
+
+ CoTaskMemFree(configs);
+ return decoder.forget();
+ }
+
+ CoTaskMemFree(configs);
+ return nullptr;
+}
+
+class D3D11DXVA2Manager : public DXVA2Manager {
+ public:
+ D3D11DXVA2Manager();
+ virtual ~D3D11DXVA2Manager();
+
+ HRESULT Init(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason, ID3D11Device* aDevice);
+ HRESULT InitInternal(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason, ID3D11Device* aDevice);
+
+ IUnknown* GetDXVADeviceManager() override;
+
+ // Copies a region (aRegion) of the video frame stored in aVideoSample
+ // into an image which is returned by aOutImage.
+ HRESULT CopyToImage(IMFSample* aVideoSample, const gfx::IntRect& aRegion,
+ Image** aOutImage) override;
+
+ HRESULT WrapTextureWithImage(IMFSample* aVideoSample,
+ const gfx::IntRect& aRegion,
+ layers::Image** aOutImage) override;
+
+ HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture, uint32_t aArrayIndex,
+ ID3D11Texture2D** aOutTexture) override;
+
+ HRESULT ConfigureForSize(IMFMediaType* aInputType,
+ gfx::YUVColorSpace aColorSpace,
+ gfx::ColorRange aColorRange, uint32_t aWidth,
+ uint32_t aHeight) override;
+
+ bool IsD3D11() override { return true; }
+
+ bool SupportsConfig(const VideoInfo& aInfo, IMFMediaType* aInputType,
+ IMFMediaType* aOutputType) override;
+
+ void BeforeShutdownVideoMFTDecoder() override;
+
+ bool SupportsZeroCopyNV12Texture() override {
+ if (mIMFSampleUsageInfo->SupportsZeroCopyNV12Texture() &&
+ (mDevice != DeviceManagerDx::Get()->GetCompositorDevice())) {
+ mIMFSampleUsageInfo->DisableZeroCopyNV12Texture();
+ }
+ return mIMFSampleUsageInfo->SupportsZeroCopyNV12Texture();
+ }
+
+ private:
+ HRESULT CreateOutputSample(RefPtr<IMFSample>& aSample,
+ ID3D11Texture2D* aTexture);
+
+ bool CanCreateDecoder(const D3D11_VIDEO_DECODER_DESC& aDesc) const;
+
+ already_AddRefed<ID3D11VideoDecoder> CreateDecoder(
+ const D3D11_VIDEO_DECODER_DESC& aDesc) const;
+ void RefreshIMFSampleWrappers();
+ void ReleaseAllIMFSamples();
+
+ RefPtr<ID3D11Device> mDevice;
+ RefPtr<ID3D11DeviceContext> mContext;
+ RefPtr<IMFDXGIDeviceManager> mDXGIDeviceManager;
+ RefPtr<MFTDecoder> mTransform;
+ RefPtr<D3D11RecycleAllocator> mTextureClientAllocator;
+ RefPtr<layers::KnowsCompositor> mKnowsCompositor;
+ RefPtr<ID3D11VideoDecoder> mDecoder;
+ RefPtr<layers::SyncObjectClient> mSyncObject;
+ uint32_t mWidth = 0;
+ uint32_t mHeight = 0;
+ UINT mDeviceManagerToken = 0;
+ RefPtr<IMFMediaType> mInputType;
+ GUID mInputSubType;
+ gfx::YUVColorSpace mYUVColorSpace;
+ gfx::ColorRange mColorRange = gfx::ColorRange::LIMITED;
+ std::list<ThreadSafeWeakPtr<layers::IMFSampleWrapper>> mIMFSampleWrappers;
+ RefPtr<layers::IMFSampleUsageInfo> mIMFSampleUsageInfo;
+};
+
+bool D3D11DXVA2Manager::SupportsConfig(const VideoInfo& aInfo,
+ IMFMediaType* aInputType,
+ IMFMediaType* aOutputType) {
+ D3D11_VIDEO_DECODER_DESC desc = {GUID_NULL, 0, 0, DXGI_FORMAT_UNKNOWN};
+
+ HRESULT hr = MFGetAttributeSize(aInputType, MF_MT_FRAME_SIZE,
+ &desc.SampleWidth, &desc.SampleHeight);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+ NS_ENSURE_TRUE(desc.SampleWidth <= MAX_VIDEO_WIDTH, false);
+ NS_ENSURE_TRUE(desc.SampleHeight <= MAX_VIDEO_HEIGHT, false);
+
+ GUID subtype;
+ hr = aInputType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ if (subtype == MFVideoFormat_H264) {
+ // IsUnsupportedResolution is only used to work around an AMD H264 issue.
+ const float framerate = [&]() {
+ UINT32 numerator;
+ UINT32 denominator;
+ if (SUCCEEDED(MFGetAttributeRatio(aInputType, MF_MT_FRAME_RATE,
+ &numerator, &denominator))) {
+ return static_cast<float>(numerator) / denominator;
+ }
+ return 30.0f;
+ }();
+ NS_ENSURE_FALSE(
+ IsUnsupportedResolution(desc.SampleWidth, desc.SampleHeight, framerate),
+ false);
+ NS_ENSURE_TRUE(aInfo.mColorDepth == ColorDepth::COLOR_8, false);
+
+ RefPtr<ID3D11VideoDevice> videoDevice;
+ hr = mDevice->QueryInterface(
+ static_cast<ID3D11VideoDevice**>(getter_AddRefs(videoDevice)));
+
+ GUID guids[] = {DXVA2_ModeH264_VLD_NoFGT,
+ DXVA2_Intel_ClearVideo_ModeH264_VLD_NoFGT};
+ for (const GUID& guid : guids) {
+ BOOL supported = false;
+ hr = videoDevice->CheckVideoDecoderFormat(&guid, DXGI_FORMAT_NV12,
+ &supported);
+ if (SUCCEEDED(hr) && supported) {
+ desc.Guid = guid;
+ break;
+ }
+ }
+ } else if (subtype == MFVideoFormat_VP80) {
+ NS_ENSURE_TRUE(aInfo.mColorDepth == ColorDepth::COLOR_8, false);
+ desc.Guid = DXVA2_ModeVP8_VLD;
+ } else if (subtype == MFVideoFormat_VP90) {
+ NS_ENSURE_TRUE(aInfo.mColorDepth == ColorDepth::COLOR_8 ||
+ aInfo.mColorDepth == ColorDepth::COLOR_10,
+ false);
+ uint8_t profile;
+
+ if (aInfo.mExtraData && !aInfo.mExtraData->IsEmpty()) {
+ VPXDecoder::VPXStreamInfo vp9Info;
+ VPXDecoder::ReadVPCCBox(vp9Info, aInfo.mExtraData);
+ profile = vp9Info.mProfile;
+ } else {
+ // If no vpcC is present, we can't know the profile, which limits the
+ // subsampling mode, but 4:2:0 is most supported so default to profiles 0
+ // and 2:
+ // Profile 0 = 8bit, 4:2:0
+ // Profile 2 = 10/12bit, 4:2:0
+ profile = aInfo.mColorDepth == ColorDepth::COLOR_8 ? 0 : 2;
+ }
+
+ switch (profile) {
+ case 0:
+ desc.Guid = DXVA2_ModeVP9_VLD_Profile0;
+ break;
+ case 2:
+ desc.Guid = DXVA2_ModeVP9_VLD_10bit_Profile2;
+ break;
+ default:
+ break;
+ }
+ } else if (subtype == MFVideoFormat_AV1) {
+ uint8_t profile;
+ bool yuv420;
+
+ if (aInfo.mExtraData && !aInfo.mExtraData->IsEmpty()) {
+ AOMDecoder::AV1SequenceInfo av1Info;
+ bool hadSeqHdr;
+ AOMDecoder::ReadAV1CBox(aInfo.mExtraData, av1Info, hadSeqHdr);
+ profile = av1Info.mProfile;
+ yuv420 = av1Info.mSubsamplingX && av1Info.mSubsamplingY;
+ } else {
+ // If no av1C is present, we can't get profile or subsampling mode. 4:2:0
+ // subsampling is most likely to be supported in hardware, so set av1Info
+ // accordingly.
+ // 8bit/10bit = Main profile, 4:2:0
+ // 12bit = Professional, 4:2:0
+ profile = aInfo.mColorDepth == ColorDepth::COLOR_12 ? 2 : 0;
+ yuv420 = true;
+ }
+
+ switch (profile) {
+ case 0:
+ desc.Guid = DXVA2_ModeAV1_VLD_Profile0;
+ break;
+ case 1:
+ desc.Guid = DXVA2_ModeAV1_VLD_Profile1;
+ break;
+ case 2:
+ MOZ_ASSERT(aInfo.mColorDepth < ColorDepth::COLOR_16);
+ if (aInfo.mColorDepth == ColorDepth::COLOR_12) {
+ if (yuv420) {
+ desc.Guid = DXVA2_ModeAV1_VLD_12bit_Profile2_420;
+ } else {
+ desc.Guid = DXVA2_ModeAV1_VLD_12bit_Profile2;
+ }
+ } else {
+ desc.Guid = DXVA2_ModeAV1_VLD_Profile2;
+ }
+ break;
+ default:
+ break;
+ }
+ }
+
+ hr = aOutputType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ if (SUCCEEDED(hr)) {
+ if (subtype == MFVideoFormat_NV12) {
+ desc.OutputFormat = DXGI_FORMAT_NV12;
+ } else if (subtype == MFVideoFormat_P010) {
+ desc.OutputFormat = DXGI_FORMAT_P010;
+ } else if (subtype == MFVideoFormat_P016) {
+ desc.OutputFormat = DXGI_FORMAT_P016;
+ }
+ }
+
+ if (desc.Guid == GUID_NULL || desc.OutputFormat == DXGI_FORMAT_UNKNOWN) {
+ return false;
+ }
+
+ return CanCreateDecoder(desc);
+}
+
+D3D11DXVA2Manager::D3D11DXVA2Manager()
+ : mIMFSampleUsageInfo(new layers::IMFSampleUsageInfo) {}
+
+D3D11DXVA2Manager::~D3D11DXVA2Manager() {}
+
+IUnknown* D3D11DXVA2Manager::GetDXVADeviceManager() {
+ MutexAutoLock lock(mLock);
+ return mDXGIDeviceManager;
+}
+HRESULT
+D3D11DXVA2Manager::Init(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason, ID3D11Device* aDevice) {
+ if (aDevice) {
+ return InitInternal(aKnowsCompositor, aFailureReason, aDevice);
+ }
+
+ HRESULT hr;
+ ScopedGfxFeatureReporter reporter("DXVA2D3D11");
+
+ hr = InitInternal(aKnowsCompositor, aFailureReason, aDevice);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (layers::ImageBridgeChild::GetSingleton() || !aKnowsCompositor) {
+ // There's no proper KnowsCompositor for ImageBridge currently (and it
+ // implements the interface), so just use that if it's available.
+ mTextureClientAllocator = new D3D11RecycleAllocator(
+ layers::ImageBridgeChild::GetSingleton().get(), mDevice,
+ gfx::SurfaceFormat::NV12);
+
+ if (ImageBridgeChild::GetSingleton() &&
+ StaticPrefs::media_wmf_use_sync_texture_AtStartup() &&
+ mDevice != DeviceManagerDx::Get()->GetCompositorDevice()) {
+ // We use a syncobject to avoid the cost of the mutex lock when
+ // compositing, and because it allows color conversion ocurring directly
+ // from this texture DXVA does not seem to accept IDXGIKeyedMutex textures
+ // as input.
+ mSyncObject = layers::SyncObjectClient::CreateSyncObjectClient(
+ layers::ImageBridgeChild::GetSingleton()
+ ->GetTextureFactoryIdentifier()
+ .mSyncHandle,
+ mDevice);
+ }
+ } else {
+ mTextureClientAllocator = new D3D11RecycleAllocator(
+ aKnowsCompositor, mDevice, gfx::SurfaceFormat::NV12);
+ mKnowsCompositor = aKnowsCompositor;
+ if (StaticPrefs::media_wmf_use_sync_texture_AtStartup()) {
+ // We use a syncobject to avoid the cost of the mutex lock when
+ // compositing, and because it allows color conversion ocurring directly
+ // from this texture DXVA does not seem to accept IDXGIKeyedMutex textures
+ // as input.
+ mSyncObject = layers::SyncObjectClient::CreateSyncObjectClient(
+ aKnowsCompositor->GetTextureFactoryIdentifier().mSyncHandle, mDevice);
+ }
+ }
+ mTextureClientAllocator->SetMaxPoolSize(5);
+
+ Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED,
+ uint32_t(media::MediaDecoderBackend::WMFDXVA2D3D11));
+
+ reporter.SetSuccessful();
+
+ return S_OK;
+}
+
+HRESULT
+D3D11DXVA2Manager::InitInternal(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason,
+ ID3D11Device* aDevice) {
+ HRESULT hr;
+
+ mDevice = aDevice;
+
+ if (!mDevice) {
+ bool useHardwareWebRender =
+ aKnowsCompositor && aKnowsCompositor->UsingHardwareWebRender();
+ mDevice =
+ gfx::DeviceManagerDx::Get()->CreateDecoderDevice(useHardwareWebRender);
+ if (!mDevice) {
+ aFailureReason.AssignLiteral("Failed to create D3D11 device for decoder");
+ return E_FAIL;
+ }
+ }
+
+ RefPtr<ID3D10Multithread> mt;
+ hr = mDevice->QueryInterface((ID3D10Multithread**)getter_AddRefs(mt));
+ NS_ENSURE_TRUE(SUCCEEDED(hr) && mt, hr);
+ mt->SetMultithreadProtected(TRUE);
+
+ mDevice->GetImmediateContext(getter_AddRefs(mContext));
+
+ hr = wmf::MFCreateDXGIDeviceManager(&mDeviceManagerToken,
+ getter_AddRefs(mDXGIDeviceManager));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason =
+ nsPrintfCString("MFCreateDXGIDeviceManager failed with code %lX", hr);
+ return hr;
+ }
+
+ hr = mDXGIDeviceManager->ResetDevice(mDevice, mDeviceManagerToken);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "IMFDXGIDeviceManager::ResetDevice failed with code %lX", hr);
+ return hr;
+ }
+
+ // The IMFTransform interface used by MFTDecoder is documented to require to
+ // run on an MTA thread.
+ // https://msdn.microsoft.com/en-us/library/windows/desktop/ee892371(v=vs.85).aspx#components
+ // The main thread (where this function is called) is STA, not MTA.
+ RefPtr<MFTDecoder> mft;
+ mozilla::mscom::EnsureMTA([&]() -> void {
+ mft = new MFTDecoder();
+ hr = mft->Create(MFT_CATEGORY_VIDEO_PROCESSOR, MFVideoFormat_NV12,
+ MFVideoFormat_ARGB32);
+
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "MFTDecoder::Create of Video Processor MFT for color conversion "
+ "failed with code %lX",
+ hr);
+ return;
+ }
+
+ hr = mft->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER,
+ ULONG_PTR(mDXGIDeviceManager.get()));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason = nsPrintfCString(
+ "MFTDecoder::SendMFTMessage(MFT_MESSAGE_"
+ "SET_D3D_MANAGER) failed with code %lX",
+ hr);
+ return;
+ }
+ });
+
+ if (!SUCCEEDED(hr)) {
+ return hr;
+ }
+ mTransform = mft;
+
+ RefPtr<IDXGIDevice> dxgiDevice;
+ hr = mDevice->QueryInterface(
+ static_cast<IDXGIDevice**>(getter_AddRefs(dxgiDevice)));
+ if (!SUCCEEDED(hr)) {
+ aFailureReason =
+ nsPrintfCString("QI to IDXGIDevice failed with code %lX", hr);
+ return hr;
+ }
+
+ RefPtr<IDXGIAdapter> adapter;
+ hr = dxgiDevice->GetAdapter(adapter.StartAssignment());
+ if (!SUCCEEDED(hr)) {
+ aFailureReason =
+ nsPrintfCString("IDXGIDevice::GetAdapter failed with code %lX", hr);
+ return hr;
+ }
+
+ DXGI_ADAPTER_DESC adapterDesc;
+ hr = adapter->GetDesc(&adapterDesc);
+ if (!SUCCEEDED(hr)) {
+ aFailureReason =
+ nsPrintfCString("IDXGIAdapter::GetDesc failed with code %lX", hr);
+ return hr;
+ }
+
+ if ((adapterDesc.VendorId == 0x1022 || adapterDesc.VendorId == 0x1002) &&
+ !StaticPrefs::media_wmf_skip_blacklist()) {
+ for (const auto& model : sAMDPreUVD4) {
+ if (adapterDesc.DeviceId == model) {
+ mIsAMDPreUVD4 = true;
+ break;
+ }
+ }
+ }
+
+ if (!IsD3D11() || !XRE_IsGPUProcess() ||
+ (mDevice != DeviceManagerDx::Get()->GetCompositorDevice())) {
+ mIMFSampleUsageInfo->DisableZeroCopyNV12Texture();
+ }
+
+ return S_OK;
+}
+
+HRESULT
+D3D11DXVA2Manager::CreateOutputSample(RefPtr<IMFSample>& aSample,
+ ID3D11Texture2D* aTexture) {
+ RefPtr<IMFSample> sample;
+ HRESULT hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), aTexture, 0,
+ FALSE, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = sample->AddBuffer(buffer);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ aSample = sample;
+ return S_OK;
+}
+
+HRESULT
+D3D11DXVA2Manager::CopyToImage(IMFSample* aVideoSample,
+ const gfx::IntRect& aRegion, Image** aOutImage) {
+ NS_ENSURE_TRUE(aVideoSample, E_POINTER);
+ NS_ENSURE_TRUE(aOutImage, E_POINTER);
+ MOZ_ASSERT(mTextureClientAllocator);
+
+ RefPtr<D3D11ShareHandleImage> image =
+ new D3D11ShareHandleImage(gfx::IntSize(mWidth, mHeight), aRegion,
+ ToColorSpace2(mYUVColorSpace), mColorRange);
+
+ // Retrieve the DXGI_FORMAT for the current video sample.
+ RefPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFDXGIBuffer> dxgiBuf;
+ hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<ID3D11Texture2D> tex;
+ hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(tex));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ D3D11_TEXTURE2D_DESC inDesc;
+ tex->GetDesc(&inDesc);
+
+ bool ok = image->AllocateTexture(mTextureClientAllocator, mDevice);
+ NS_ENSURE_TRUE(ok, E_FAIL);
+
+ RefPtr<TextureClient> client =
+ image->GetTextureClient(ImageBridgeChild::GetSingleton().get());
+ NS_ENSURE_TRUE(client, E_FAIL);
+
+ RefPtr<ID3D11Texture2D> texture = image->GetTexture();
+ D3D11_TEXTURE2D_DESC outDesc;
+ texture->GetDesc(&outDesc);
+
+ RefPtr<IDXGIKeyedMutex> mutex;
+ texture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex));
+
+ {
+ AutoTextureLock(mutex, hr, 2000);
+ if (mutex && (FAILED(hr) || hr == WAIT_TIMEOUT || hr == WAIT_ABANDONED)) {
+ return hr;
+ }
+
+ if (!mutex && mDevice != DeviceManagerDx::Get()->GetCompositorDevice()) {
+ NS_ENSURE_TRUE(mSyncObject, E_FAIL);
+ }
+
+ UINT height = std::min(inDesc.Height, outDesc.Height);
+ PerformanceRecorder<PlaybackStage> perfRecorder(
+ MediaStage::CopyDecodedVideo, height);
+ // The D3D11TextureClientAllocator may return a different texture format
+ // than preferred. In which case the destination texture will be BGRA32.
+ if (outDesc.Format == inDesc.Format) {
+ // Our video frame is stored in a non-sharable ID3D11Texture2D. We need
+ // to create a copy of that frame as a sharable resource, save its share
+ // handle, and put that handle into the rendering pipeline.
+ UINT width = std::min(inDesc.Width, outDesc.Width);
+ D3D11_BOX srcBox = {0, 0, 0, width, height, 1};
+
+ UINT index;
+ dxgiBuf->GetSubresourceIndex(&index);
+ mContext->CopySubresourceRegion(texture, 0, 0, 0, 0, tex, index, &srcBox);
+ } else {
+ // Use MFT to do color conversion.
+ hr = E_FAIL;
+ mozilla::mscom::EnsureMTA(
+ [&]() -> void { hr = mTransform->Input(aVideoSample); });
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFSample> sample;
+ hr = CreateOutputSample(sample, texture);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = E_FAIL;
+ mozilla::mscom::EnsureMTA(
+ [&]() -> void { hr = mTransform->Output(&sample); });
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+ perfRecorder.Record();
+ }
+
+ if (!mutex && mDevice != DeviceManagerDx::Get()->GetCompositorDevice() &&
+ mSyncObject) {
+ static StaticMutex sMutex MOZ_UNANNOTATED;
+ // Ensure that we only ever attempt to synchronise via the sync object
+ // serially as when using the same D3D11 device for multiple video decoders
+ // it can lead to deadlocks.
+ StaticMutexAutoLock lock(sMutex);
+ // It appears some race-condition may allow us to arrive here even when
+ // mSyncObject is null. It's better to avoid that crash.
+ client->SyncWithObject(mSyncObject);
+ if (!mSyncObject->Synchronize(true)) {
+ return DXGI_ERROR_DEVICE_RESET;
+ }
+ }
+
+ image.forget(aOutImage);
+
+ return S_OK;
+}
+
+HRESULT D3D11DXVA2Manager::WrapTextureWithImage(IMFSample* aVideoSample,
+ const gfx::IntRect& aRegion,
+ layers::Image** aOutImage) {
+ NS_ENSURE_TRUE(aVideoSample, E_POINTER);
+ NS_ENSURE_TRUE(aOutImage, E_POINTER);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = aVideoSample->GetBufferByIndex(0, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFDXGIBuffer> dxgiBuf;
+ hr = buffer->QueryInterface((IMFDXGIBuffer**)getter_AddRefs(dxgiBuf));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<ID3D11Texture2D> texture;
+ hr = dxgiBuf->GetResource(__uuidof(ID3D11Texture2D), getter_AddRefs(texture));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ D3D11_TEXTURE2D_DESC desc;
+ texture->GetDesc(&desc);
+
+ UINT arrayIndex;
+ dxgiBuf->GetSubresourceIndex(&arrayIndex);
+
+ RefreshIMFSampleWrappers();
+
+ RefPtr<D3D11TextureIMFSampleImage> image = new D3D11TextureIMFSampleImage(
+ aVideoSample, texture, arrayIndex, gfx::IntSize(mWidth, mHeight), aRegion,
+ ToColorSpace2(mYUVColorSpace), mColorRange);
+ image->AllocateTextureClient(mKnowsCompositor, mIMFSampleUsageInfo);
+
+ RefPtr<IMFSampleWrapper> wrapper = image->GetIMFSampleWrapper();
+ ThreadSafeWeakPtr<IMFSampleWrapper> weak(wrapper);
+ mIMFSampleWrappers.push_back(weak);
+
+ image.forget(aOutImage);
+
+ return S_OK;
+}
+
+void D3D11DXVA2Manager::RefreshIMFSampleWrappers() {
+ for (auto it = mIMFSampleWrappers.begin(); it != mIMFSampleWrappers.end();) {
+ auto wrapper = RefPtr<IMFSampleWrapper>(*it);
+ if (!wrapper) {
+ // wrapper is already destroyed.
+ it = mIMFSampleWrappers.erase(it);
+ continue;
+ }
+ it++;
+ }
+}
+
+void D3D11DXVA2Manager::ReleaseAllIMFSamples() {
+ for (auto it = mIMFSampleWrappers.begin(); it != mIMFSampleWrappers.end();
+ it++) {
+ RefPtr<IMFSampleWrapper> wrapper = RefPtr<IMFSampleWrapper>(*it);
+ if (wrapper) {
+ wrapper->ClearVideoSample();
+ }
+ }
+}
+
+void D3D11DXVA2Manager::BeforeShutdownVideoMFTDecoder() {
+ ReleaseAllIMFSamples();
+}
+
+HRESULT
+D3D11DXVA2Manager::CopyToBGRATexture(ID3D11Texture2D* aInTexture,
+ uint32_t aArrayIndex,
+ ID3D11Texture2D** aOutTexture) {
+ NS_ENSURE_TRUE(aInTexture, E_POINTER);
+ NS_ENSURE_TRUE(aOutTexture, E_POINTER);
+
+ HRESULT hr;
+ RefPtr<ID3D11Texture2D> texture, inTexture;
+
+ inTexture = aInTexture;
+
+ CD3D11_TEXTURE2D_DESC desc;
+ aInTexture->GetDesc(&desc);
+
+ if (!mInputType || desc.Width != mWidth || desc.Height != mHeight) {
+ RefPtr<IMFMediaType> inputType;
+ hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ const GUID subType = [&]() {
+ switch (desc.Format) {
+ case DXGI_FORMAT_NV12:
+ return MFVideoFormat_NV12;
+ case DXGI_FORMAT_P010:
+ return MFVideoFormat_P010;
+ case DXGI_FORMAT_P016:
+ return MFVideoFormat_P016;
+ default:
+ MOZ_ASSERT_UNREACHABLE("Unexpected texture type");
+ return MFVideoFormat_NV12;
+ }
+ }();
+
+ hr = inputType->SetGUID(MF_MT_SUBTYPE, subType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE,
+ MFVideoInterlace_Progressive);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = ConfigureForSize(inputType, mYUVColorSpace, mColorRange, desc.Width,
+ desc.Height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+
+ RefPtr<IDXGIKeyedMutex> mutex;
+ inTexture->QueryInterface((IDXGIKeyedMutex**)getter_AddRefs(mutex));
+ // The rest of this function will not work if inTexture implements
+ // IDXGIKeyedMutex! In that case case we would have to copy to a
+ // non-mutex using texture.
+
+ if (mutex) {
+ RefPtr<ID3D11Texture2D> newTexture;
+
+ desc.MiscFlags = 0;
+ hr = mDevice->CreateTexture2D(&desc, nullptr, getter_AddRefs(newTexture));
+ NS_ENSURE_TRUE(SUCCEEDED(hr) && newTexture, E_FAIL);
+
+ hr = mutex->AcquireSync(0, 2000);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mContext->CopyResource(newTexture, inTexture);
+
+ mutex->ReleaseSync(0);
+ inTexture = newTexture;
+ }
+
+ desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
+ desc.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_SHADER_RESOURCE;
+
+ hr = mDevice->CreateTexture2D(&desc, nullptr, getter_AddRefs(texture));
+ NS_ENSURE_TRUE(SUCCEEDED(hr) && texture, E_FAIL);
+
+ RefPtr<IMFSample> inputSample;
+ wmf::MFCreateSample(getter_AddRefs(inputSample));
+
+ // If these aren't set the decoder fails.
+ inputSample->SetSampleTime(10);
+ inputSample->SetSampleDuration(10000);
+
+ RefPtr<IMFMediaBuffer> inputBuffer;
+ hr = wmf::MFCreateDXGISurfaceBuffer(__uuidof(ID3D11Texture2D), inTexture,
+ aArrayIndex, FALSE,
+ getter_AddRefs(inputBuffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ inputSample->AddBuffer(inputBuffer);
+
+ hr = E_FAIL;
+ mozilla::mscom::EnsureMTA(
+ [&]() -> void { hr = mTransform->Input(inputSample); });
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFSample> outputSample;
+ hr = CreateOutputSample(outputSample, texture);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = E_FAIL;
+ mozilla::mscom::EnsureMTA(
+ [&]() -> void { hr = mTransform->Output(&outputSample); });
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ texture.forget(aOutTexture);
+
+ return S_OK;
+}
+
+HRESULT
+D3D11DXVA2Manager::ConfigureForSize(IMFMediaType* aInputType,
+ gfx::YUVColorSpace aColorSpace,
+ gfx::ColorRange aColorRange,
+ uint32_t aWidth, uint32_t aHeight) {
+ GUID subType = {0};
+ HRESULT hr = aInputType->GetGUID(MF_MT_SUBTYPE, &subType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (subType == mInputSubType && aWidth == mWidth && aHeight == mHeight &&
+ mYUVColorSpace == aColorSpace && mColorRange == aColorRange) {
+ // If the media type hasn't changed, don't reconfigure.
+ return S_OK;
+ }
+
+ // Create a copy of our input type.
+ RefPtr<IMFMediaType> inputType;
+ hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ hr = aInputType->CopyAllItems(inputType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE, aWidth, aHeight);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFAttributes> attr;
+ mozilla::mscom::EnsureMTA(
+ [&]() -> void { attr = mTransform->GetAttributes(); });
+ NS_ENSURE_TRUE(attr != nullptr, E_FAIL);
+
+ hr = attr->SetUINT32(MF_XVP_PLAYBACK_MODE, TRUE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = attr->SetUINT32(MF_LOW_LATENCY, FALSE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = outputType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = E_FAIL;
+ mozilla::mscom::EnsureMTA([&]() -> void {
+ hr = mTransform->SetMediaTypes(
+ inputType, outputType, [aWidth, aHeight](IMFMediaType* aOutput) {
+ HRESULT hr = aOutput->SetUINT32(MF_MT_INTERLACE_MODE,
+ MFVideoInterlace_Progressive);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ hr = aOutput->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ hr = MFSetAttributeSize(aOutput, MF_MT_FRAME_SIZE, aWidth, aHeight);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+ });
+ });
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mWidth = aWidth;
+ mHeight = aHeight;
+ mInputType = inputType;
+ mInputSubType = subType;
+ mYUVColorSpace = aColorSpace;
+ mColorRange = aColorRange;
+ if (mTextureClientAllocator) {
+ gfx::SurfaceFormat format = [&]() {
+ if (subType == MFVideoFormat_NV12) {
+ return gfx::SurfaceFormat::NV12;
+ } else if (subType == MFVideoFormat_P010) {
+ return gfx::SurfaceFormat::P010;
+ } else if (subType == MFVideoFormat_P016) {
+ return gfx::SurfaceFormat::P016;
+ } else {
+ MOZ_ASSERT_UNREACHABLE("Unexpected texture type");
+ return gfx::SurfaceFormat::NV12;
+ }
+ }();
+ mTextureClientAllocator->SetPreferredSurfaceFormat(format);
+ }
+ return S_OK;
+}
+
+bool D3D11DXVA2Manager::CanCreateDecoder(
+ const D3D11_VIDEO_DECODER_DESC& aDesc) const {
+ RefPtr<ID3D11VideoDecoder> decoder = CreateDecoder(aDesc);
+ return decoder.get() != nullptr;
+}
+
+already_AddRefed<ID3D11VideoDecoder> D3D11DXVA2Manager::CreateDecoder(
+ const D3D11_VIDEO_DECODER_DESC& aDesc) const {
+ RefPtr<ID3D11VideoDevice> videoDevice;
+ HRESULT hr = mDevice->QueryInterface(
+ static_cast<ID3D11VideoDevice**>(getter_AddRefs(videoDevice)));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ UINT configCount = 0;
+ hr = videoDevice->GetVideoDecoderConfigCount(&aDesc, &configCount);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ for (UINT i = 0; i < configCount; i++) {
+ D3D11_VIDEO_DECODER_CONFIG config;
+ hr = videoDevice->GetVideoDecoderConfig(&aDesc, i, &config);
+ if (SUCCEEDED(hr)) {
+ RefPtr<ID3D11VideoDecoder> decoder;
+ hr = videoDevice->CreateVideoDecoder(&aDesc, &config,
+ decoder.StartAssignment());
+ return decoder.forget();
+ }
+ }
+ return nullptr;
+}
+
+/* static */
+DXVA2Manager* DXVA2Manager::CreateD3D11DXVA(
+ layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason,
+ ID3D11Device* aDevice) {
+ // DXVA processing takes up a lot of GPU resources, so limit the number of
+ // videos we use DXVA with at any one time.
+ uint32_t dxvaLimit = StaticPrefs::media_wmf_dxva_max_videos();
+
+ if (sDXVAVideosCount == dxvaLimit) {
+ aFailureReason.AssignLiteral("Too many DXVA videos playing");
+ return nullptr;
+ }
+
+ UniquePtr<D3D11DXVA2Manager> manager(new D3D11DXVA2Manager());
+ HRESULT hr = manager->Init(aKnowsCompositor, aFailureReason, aDevice);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ return manager.release();
+}
+
+DXVA2Manager::DXVA2Manager() : mLock("DXVA2Manager") { ++sDXVAVideosCount; }
+
+DXVA2Manager::~DXVA2Manager() { --sDXVAVideosCount; }
+
+bool DXVA2Manager::IsUnsupportedResolution(const uint32_t& aWidth,
+ const uint32_t& aHeight,
+ const float& aFramerate) const {
+ // AMD cards with UVD3 or earlier perform poorly trying to decode 1080p60 in
+ // hardware, so use software instead. Pick 45 as an arbitrary upper bound for
+ // the framerate we can handle.
+ return !StaticPrefs::media_wmf_amd_highres_enabled() && mIsAMDPreUVD4 &&
+ (aWidth >= 1920 || aHeight >= 1088) && aFramerate > 45;
+}
+
+/* static */
+bool DXVA2Manager::IsNV12Supported(uint32_t aVendorID, uint32_t aDeviceID,
+ const nsAString& aDriverVersionString) {
+ if (aVendorID == 0x1022 || aVendorID == 0x1002) {
+ // AMD
+ // Block old cards regardless of driver version.
+ for (const auto& model : sAMDPreUVD4) {
+ if (aDeviceID == model) {
+ return false;
+ }
+ }
+ // AMD driver earlier than 21.19.411.0 have bugs in their handling of NV12
+ // surfaces.
+ uint64_t driverVersion;
+ if (!widget::ParseDriverVersion(aDriverVersionString, &driverVersion) ||
+ driverVersion < widget::V(21, 19, 411, 0)) {
+ return false;
+ }
+ } else if (aVendorID == 0x10DE) {
+ // NVidia
+ for (const auto& model : sNVIDIABrokenNV12) {
+ if (aDeviceID == model) {
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/DXVA2Manager.h b/dom/media/platforms/wmf/DXVA2Manager.h
new file mode 100644
index 0000000000..1fb501b406
--- /dev/null
+++ b/dom/media/platforms/wmf/DXVA2Manager.h
@@ -0,0 +1,94 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+#if !defined(DXVA2Manager_h_)
+# define DXVA2Manager_h_
+
+# include "MediaInfo.h"
+# include "WMF.h"
+# include "mozilla/Mutex.h"
+# include "mozilla/gfx/Rect.h"
+# include "d3d11.h"
+
+namespace mozilla {
+
+namespace layers {
+class Image;
+class ImageContainer;
+class KnowsCompositor;
+} // namespace layers
+
+class DXVA2Manager {
+ public:
+ // Creates and initializes a DXVA2Manager. We can use DXVA2 via either
+ // D3D9Ex or D3D11.
+ static DXVA2Manager* CreateD3D9DXVA(layers::KnowsCompositor* aKnowsCompositor,
+ nsACString& aFailureReason);
+ static DXVA2Manager* CreateD3D11DXVA(
+ layers::KnowsCompositor* aKnowsCompositor, nsACString& aFailureReason,
+ ID3D11Device* aDevice = nullptr);
+
+ // Returns a pointer to the D3D device manager responsible for managing the
+ // device we're using for hardware accelerated video decoding. If we're using
+ // D3D9Ex, this is an IDirect3DDeviceManager9. For D3D11 this is an
+ // IMFDXGIDeviceManager. It is safe to call this on any thread.
+ virtual IUnknown* GetDXVADeviceManager() = 0;
+
+ // Creates an Image for the video frame stored in aVideoSample.
+ virtual HRESULT CopyToImage(IMFSample* aVideoSample,
+ const gfx::IntRect& aRegion,
+ layers::Image** aOutImage) = 0;
+
+ virtual HRESULT WrapTextureWithImage(IMFSample* aVideoSample,
+ const gfx::IntRect& aRegion,
+ layers::Image** aOutImage) {
+ // Not implemented!
+ MOZ_CRASH("WrapTextureWithImage not implemented on this manager.");
+ return E_FAIL;
+ }
+
+ virtual HRESULT CopyToBGRATexture(ID3D11Texture2D* aInTexture,
+ uint32_t aArrayIndex,
+ ID3D11Texture2D** aOutTexture) {
+ // Not implemented!
+ MOZ_CRASH("CopyToBGRATexture not implemented on this manager.");
+ return E_FAIL;
+ }
+
+ virtual HRESULT ConfigureForSize(IMFMediaType* aInputType,
+ gfx::YUVColorSpace aColorSpace,
+ gfx::ColorRange aColorRange, uint32_t aWidth,
+ uint32_t aHeight) {
+ return S_OK;
+ }
+
+ virtual bool IsD3D11() { return false; }
+
+ virtual ~DXVA2Manager();
+
+ virtual bool SupportsConfig(const VideoInfo& aInfo, IMFMediaType* aInputType,
+ IMFMediaType* aOutputType) = 0;
+
+ // Called before shutdown video MFTDecoder.
+ virtual void BeforeShutdownVideoMFTDecoder() {}
+
+ virtual bool SupportsZeroCopyNV12Texture() { return false; }
+
+ static bool IsNV12Supported(uint32_t aVendorID, uint32_t aDeviceID,
+ const nsAString& aDriverVersionString);
+
+ protected:
+ Mutex mLock MOZ_UNANNOTATED;
+ DXVA2Manager();
+
+ bool IsUnsupportedResolution(const uint32_t& aWidth, const uint32_t& aHeight,
+ const float& aFramerate) const;
+
+ bool mIsAMDPreUVD4 = false;
+};
+
+} // namespace mozilla
+
+#endif // DXVA2Manager_h_
diff --git a/dom/media/platforms/wmf/MFCDMExtra.h b/dom/media/platforms/wmf/MFCDMExtra.h
new file mode 100644
index 0000000000..04e625d854
--- /dev/null
+++ b/dom/media/platforms/wmf/MFCDMExtra.h
@@ -0,0 +1,307 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFCDMEXTRA_H
+#define DOM_MEDIA_PLATFORM_WMF_MFCDMEXTRA_H
+
+// Currently, we build with WINVER=0x601 (Win7), which means the declarations in
+// mfcontentdecryptionmodule.h will not be visible, which is only available on
+// Win10 (0x0A00). Also, we don't yet have the Fall Creators Update SDK
+// available on build machines, so even with updated WINVER, some of the
+// interfaces we need would not be present. To work around this, until the build
+// environment is updated, we include copies of the relevant classes/interfaces
+// we need.
+#if defined(WINVER) && WINVER >= 0x0A00
+# include <mfcontentdecryptionmodule.h>
+#else
+// For `IMFCdmSuspendNotify`
+# include "MFMediaEngineExtra.h"
+
+typedef enum MF_MEDIAKEYS_REQUIREMENT {
+ MF_MEDIAKEYS_REQUIREMENT_REQUIRED = 1,
+ MF_MEDIAKEYS_REQUIREMENT_OPTIONAL = 2,
+ MF_MEDIAKEYS_REQUIREMENT_NOT_ALLOWED = 3
+} MF_MEDIAKEYS_REQUIREMENT;
+
+EXTERN_C const DECLSPEC_SELECTANY PROPERTYKEY
+ MF_CONTENTDECRYPTIONMODULE_STOREPATH = {
+ {0x77d993b9,
+ 0xba61,
+ 0x4bb7,
+ {0x92, 0xc6, 0x18, 0xc8, 0x6a, 0x18, 0x9c, 0x06}},
+ 0x02};
+EXTERN_C const DECLSPEC_SELECTANY PROPERTYKEY MF_EME_INITDATATYPES = {
+ {0x497d231b,
+ 0x4eb9,
+ 0x4df0,
+ {0xb4, 0x74, 0xb9, 0xaf, 0xeb, 0x0a, 0xdf, 0x38}},
+ PID_FIRST_USABLE + 0x00000001};
+EXTERN_C const DECLSPEC_SELECTANY PROPERTYKEY MF_EME_DISTINCTIVEID = {
+ {0x7dc9c4a5,
+ 0x12be,
+ 0x497e,
+ {0x8b, 0xff, 0x9b, 0x60, 0xb2, 0xdc, 0x58, 0x45}},
+ PID_FIRST_USABLE + 0x00000002};
+EXTERN_C const DECLSPEC_SELECTANY PROPERTYKEY MF_EME_PERSISTEDSTATE = {
+ {0x5d4df6ae,
+ 0x9af1,
+ 0x4e3d,
+ {0x95, 0x5b, 0x0e, 0x4b, 0xd2, 0x2f, 0xed, 0xf0}},
+ PID_FIRST_USABLE + 0x00000003};
+EXTERN_C const DECLSPEC_SELECTANY PROPERTYKEY MF_EME_AUDIOCAPABILITIES = {
+ {0x980fbb84,
+ 0x297d,
+ 0x4ea7,
+ {0x89, 0x5f, 0xbc, 0xf2, 0x8a, 0x46, 0x28, 0x81}},
+ PID_FIRST_USABLE + 0x00000004};
+EXTERN_C const DECLSPEC_SELECTANY PROPERTYKEY MF_EME_VIDEOCAPABILITIES = {
+ {0xb172f83d,
+ 0x30dd,
+ 0x4c10,
+ {0x80, 0x06, 0xed, 0x53, 0xda, 0x4d, 0x3b, 0xdb}},
+ PID_FIRST_USABLE + 0x00000005};
+EXTERN_C const DECLSPEC_SELECTANY PROPERTYKEY MF_EME_ROBUSTNESS = {
+ {0x9d3d2b9e,
+ 0x7023,
+ 0x4944,
+ {0xa8, 0xf5, 0xec, 0xca, 0x52, 0xa4, 0x69, 0x90}},
+ PID_FIRST_USABLE + 0x00000001};
+
+typedef enum MF_MEDIAKEYSESSION_TYPE {
+ MF_MEDIAKEYSESSION_TYPE_TEMPORARY = 0,
+ MF_MEDIAKEYSESSION_TYPE_PERSISTENT_LICENSE =
+ (MF_MEDIAKEYSESSION_TYPE_TEMPORARY + 1),
+ MF_MEDIAKEYSESSION_TYPE_PERSISTENT_RELEASE_MESSAGE =
+ (MF_MEDIAKEYSESSION_TYPE_PERSISTENT_LICENSE + 1),
+ MF_MEDIAKEYSESSION_TYPE_PERSISTENT_USAGE_RECORD =
+ (MF_MEDIAKEYSESSION_TYPE_PERSISTENT_RELEASE_MESSAGE + 1)
+} MF_MEDIAKEYSESSION_TYPE;
+
+typedef enum MF_MEDIAKEYSESSION_MESSAGETYPE {
+ MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_REQUEST = 0,
+ MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_RENEWAL = 1,
+ MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_RELEASE = 2,
+ MF_MEDIAKEYSESSION_MESSAGETYPE_INDIVIDUALIZATION_REQUEST = 3
+} MF_MEDIAKEYSESSION_MESSAGETYPE;
+
+typedef enum MF_MEDIAKEY_STATUS {
+ MF_MEDIAKEY_STATUS_USABLE = 0,
+ MF_MEDIAKEY_STATUS_EXPIRED = (MF_MEDIAKEY_STATUS_USABLE + 1),
+ MF_MEDIAKEY_STATUS_OUTPUT_DOWNSCALED = (MF_MEDIAKEY_STATUS_EXPIRED + 1),
+ MF_MEDIAKEY_STATUS_OUTPUT_NOT_ALLOWED =
+ (MF_MEDIAKEY_STATUS_OUTPUT_DOWNSCALED + 1),
+ MF_MEDIAKEY_STATUS_STATUS_PENDING =
+ (MF_MEDIAKEY_STATUS_OUTPUT_NOT_ALLOWED + 1),
+ MF_MEDIAKEY_STATUS_INTERNAL_ERROR = (MF_MEDIAKEY_STATUS_STATUS_PENDING + 1),
+ MF_MEDIAKEY_STATUS_RELEASED = (MF_MEDIAKEY_STATUS_INTERNAL_ERROR + 1),
+ MF_MEDIAKEY_STATUS_OUTPUT_RESTRICTED = (MF_MEDIAKEY_STATUS_RELEASED + 1)
+} MF_MEDIAKEY_STATUS;
+
+typedef struct MFMediaKeyStatus {
+ BYTE* pbKeyId;
+ UINT cbKeyId;
+ MF_MEDIAKEY_STATUS eMediaKeyStatus;
+} MFMediaKeyStatus;
+
+EXTERN_GUID(MF_CONTENTDECRYPTIONMODULE_SERVICE, 0x15320c45, 0xff80, 0x484a,
+ 0x9d, 0xcb, 0xd, 0xf8, 0x94, 0xe6, 0x9a, 0x1);
+EXTERN_GUID(GUID_ObjectStream, 0x3e73735c, 0xe6c0, 0x481d, 0x82, 0x60, 0xee,
+ 0x5d, 0xb1, 0x34, 0x3b, 0x5f);
+EXTERN_GUID(GUID_ClassName, 0x77631a31, 0xe5e7, 0x4785, 0xbf, 0x17, 0x20, 0xf5,
+ 0x7b, 0x22, 0x48, 0x02);
+EXTERN_GUID(CLSID_EMEStoreActivate, 0x2df7b51e, 0x797b, 0x4d06, 0xbe, 0x71,
+ 0xd1, 0x4a, 0x52, 0xcf, 0x84, 0x21);
+
+# ifndef __IMFContentDecryptionModuleSessionCallbacks_INTERFACE_DEFINED__
+# define __IMFContentDecryptionModuleSessionCallbacks_INTERFACE_DEFINED__
+
+/* interface IMFContentDecryptionModuleSessionCallbacks */
+/* [unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFContentDecryptionModuleSessionCallbacks;
+
+MIDL_INTERFACE("3f96ee40-ad81-4096-8470-59a4b770f89a")
+IMFContentDecryptionModuleSessionCallbacks : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE KeyMessage(
+ /* [in] */ MF_MEDIAKEYSESSION_MESSAGETYPE messageType,
+ /* [size_is][in] */
+ __RPC__in_ecount_full(messageSize) const BYTE* message,
+ /* [in] */ DWORD messageSize,
+ /* [optional][in] */ __RPC__in LPCWSTR destinationURL) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE KeyStatusChanged(void) = 0;
+};
+
+# endif /* __IMFContentDecryptionModuleSessionCallbacks_INTERFACE_DEFINED__ \
+ */
+
+# ifndef __IMFContentDecryptionModuleSession_INTERFACE_DEFINED__
+# define __IMFContentDecryptionModuleSession_INTERFACE_DEFINED__
+
+/* interface IMFContentDecryptionModuleSession */
+/* [unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFContentDecryptionModuleSession;
+
+MIDL_INTERFACE("4e233efd-1dd2-49e8-b577-d63eee4c0d33")
+IMFContentDecryptionModuleSession : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE GetSessionId(
+ /* [out] */ __RPC__deref_out_opt LPWSTR * sessionId) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetExpiration(
+ /* [out] */ __RPC__out double* expiration) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetKeyStatuses(
+ /* [size_is][size_is][out] */ __RPC__deref_out_ecount_full_opt(
+ *numKeyStatuses) MFMediaKeyStatus *
+ *keyStatuses,
+ /* [out] */ __RPC__out UINT * numKeyStatuses) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE Load(
+ /* [in] */ __RPC__in LPCWSTR sessionId,
+ /* [out] */ __RPC__out BOOL * loaded) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GenerateRequest(
+ /* [in] */ __RPC__in LPCWSTR initDataType,
+ /* [size_is][in] */
+ __RPC__in_ecount_full(initDataSize) const BYTE* initData,
+ /* [in] */ DWORD initDataSize) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE Update(
+ /* [size_is][in] */ __RPC__in_ecount_full(responseSize)
+ const BYTE* response,
+ /* [in] */ DWORD responseSize) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE Close(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE Remove(void) = 0;
+};
+
+# endif /* __IMFContentDecryptionModuleSession_INTERFACE_DEFINED__ */
+
+# ifndef __IMFPMPHostApp_INTERFACE_DEFINED__
+# define __IMFPMPHostApp_INTERFACE_DEFINED__
+
+/* interface IMFPMPHostApp */
+/* [uuid][object] */
+
+EXTERN_C const IID IID_IMFPMPHostApp;
+
+MIDL_INTERFACE("84d2054a-3aa1-4728-a3b0-440a418cf49c")
+IMFPMPHostApp : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE LockProcess(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE UnlockProcess(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE ActivateClassById(
+ /* [in] */ __RPC__in LPCWSTR id,
+ /* [unique][in] */ __RPC__in_opt IStream * pStream,
+ /* [in] */ __RPC__in REFIID riid,
+ /* [iid_is][out] */ __RPC__deref_out_opt void** ppv) = 0;
+};
+
+# endif /* __IMFPMPHostApp_INTERFACE_DEFINED__ */
+
+# ifndef __IMFContentDecryptionModule_INTERFACE_DEFINED__
+# define __IMFContentDecryptionModule_INTERFACE_DEFINED__
+
+/* interface IMFContentDecryptionModule */
+/* [unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFContentDecryptionModule;
+
+MIDL_INTERFACE("87be986c-10be-4943-bf48-4b54ce1983a2")
+IMFContentDecryptionModule : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE SetContentEnabler(
+ /* [in] */ __RPC__in_opt IMFContentEnabler * contentEnabler,
+ /* [in] */ __RPC__in_opt IMFAsyncResult * result) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetSuspendNotify(
+ /* [out] */ __RPC__deref_out_opt IMFCdmSuspendNotify * *notify) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetPMPHostApp(
+ /* [in] */ __RPC__in_opt IMFPMPHostApp * pmpHostApp) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE CreateSession(
+ /* [in] */ MF_MEDIAKEYSESSION_TYPE sessionType,
+ /* [in] */ __RPC__in_opt IMFContentDecryptionModuleSessionCallbacks *
+ callbacks,
+ /* [out] */ __RPC__deref_out_opt IMFContentDecryptionModuleSession *
+ *session) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetServerCertificate(
+ /* [size_is][in] */ __RPC__in_ecount_full(certificateSize)
+ const BYTE* certificate,
+ /* [in] */ DWORD certificateSize) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE CreateTrustedInput(
+ /* [size_is][in] */ __RPC__in_ecount_full(contentInitDataSize)
+ const BYTE* contentInitData,
+ /* [in] */ DWORD contentInitDataSize,
+ /* [out] */ __RPC__deref_out_opt IMFTrustedInput** trustedInput) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetProtectionSystemIds(
+ /* [size_is][size_is][out] */ __RPC__deref_out_ecount_full_opt(*count)
+ GUID *
+ *systemIds,
+ /* [out] */ __RPC__out DWORD * count) = 0;
+};
+
+# endif /* __IMFContentDecryptionModule_INTERFACE_DEFINED__ */
+
+# ifndef __IMFContentDecryptionModuleAccess_INTERFACE_DEFINED__
+# define __IMFContentDecryptionModuleAccess_INTERFACE_DEFINED__
+
+/* interface IMFContentDecryptionModuleAccess */
+/* [unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFContentDecryptionModuleAccess;
+
+MIDL_INTERFACE("a853d1f4-e2a0-4303-9edc-f1a68ee43136")
+IMFContentDecryptionModuleAccess : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE CreateContentDecryptionModule(
+ /* [in] */ __RPC__in_opt IPropertyStore *
+ contentDecryptionModuleProperties,
+ /* [out] */ __RPC__deref_out_opt IMFContentDecryptionModule *
+ *contentDecryptionModule) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetConfiguration(
+ /* [out] */ __RPC__deref_out_opt IPropertyStore * *configuration) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetKeySystem(
+ /* [out] */ __RPC__deref_out_opt LPWSTR * keySystem) = 0;
+};
+# endif /* __IMFContentDecryptionModuleAccess_INTERFACE_DEFINED__ */
+
+# ifndef __IMFContentDecryptionModuleFactory_INTERFACE_DEFINED__
+# define __IMFContentDecryptionModuleFactory_INTERFACE_DEFINED__
+
+/* interface IMFContentDecryptionModuleFactory */
+/* [local][uuid][object] */
+
+EXTERN_C const IID IID_IMFContentDecryptionModuleFactory;
+MIDL_INTERFACE("7d5abf16-4cbb-4e08-b977-9ba59049943e")
+IMFContentDecryptionModuleFactory : public IUnknown {
+ public:
+ virtual BOOL STDMETHODCALLTYPE IsTypeSupported(
+ /* [in] */ LPCWSTR keySystem,
+ /* [optional][in] */ LPCWSTR contentType) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE CreateContentDecryptionModuleAccess(
+ /* [in] */ LPCWSTR keySystem,
+ /* [size_is][size_is][in] */ IPropertyStore * *configurations,
+ /* [in] */ DWORD numConfigurations,
+ /* [out] */ IMFContentDecryptionModuleAccess *
+ *contentDecryptionModuleAccess) = 0;
+};
+# endif /* __IMFContentDecryptionModuleFactory_INTERFACE_DEFINED__ */
+
+#endif // defined(WINVER) && WINVER >= 0x0A00
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFCDMEXTRA_H
diff --git a/dom/media/platforms/wmf/MFCDMProxy.cpp b/dom/media/platforms/wmf/MFCDMProxy.cpp
new file mode 100644
index 0000000000..f460f5fb02
--- /dev/null
+++ b/dom/media/platforms/wmf/MFCDMProxy.cpp
@@ -0,0 +1,74 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFCDMProxy.h"
+
+#include "MFMediaEngineUtils.h"
+
+namespace mozilla {
+
+using Microsoft::WRL::ComPtr;
+
+#define LOG(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \
+ ("MFCDMProxy=%p, " msg, this, ##__VA_ARGS__))
+
+HRESULT MFCDMProxy::GetPMPServer(REFIID aRiid, LPVOID* aPMPServerOut) {
+ ComPtr<IMFGetService> cdmServices;
+ RETURN_IF_FAILED(mCDM.As(&cdmServices));
+ RETURN_IF_FAILED(cdmServices->GetService(MF_CONTENTDECRYPTIONMODULE_SERVICE,
+ aRiid, aPMPServerOut));
+ return S_OK;
+}
+
+HRESULT MFCDMProxy::GetInputTrustAuthority(uint32_t aStreamId,
+ const uint8_t* aContentInitData,
+ uint32_t aContentInitDataSize,
+ REFIID aRiid,
+ IUnknown** aInputTrustAuthorityOut) {
+ if (mInputTrustAuthorities.count(aStreamId)) {
+ RETURN_IF_FAILED(
+ mInputTrustAuthorities[aStreamId].CopyTo(aInputTrustAuthorityOut));
+ return S_OK;
+ }
+
+ if (!mTrustedInput) {
+ RETURN_IF_FAILED(mCDM->CreateTrustedInput(
+ aContentInitData, aContentInitDataSize, &mTrustedInput));
+ LOG("Created a trust input for stream %u", aStreamId);
+ }
+
+ // GetInputTrustAuthority takes IUnknown* as the output. Using other COM
+ // interface will have a v-table mismatch issue.
+ ComPtr<IUnknown> unknown;
+ RETURN_IF_FAILED(
+ mTrustedInput->GetInputTrustAuthority(aStreamId, aRiid, &unknown));
+
+ ComPtr<IMFInputTrustAuthority> inputTrustAuthority;
+ RETURN_IF_FAILED(unknown.As(&inputTrustAuthority));
+ RETURN_IF_FAILED(unknown.CopyTo(aInputTrustAuthorityOut));
+
+ mInputTrustAuthorities[aStreamId] = inputTrustAuthority;
+ return S_OK;
+}
+
+HRESULT MFCDMProxy::SetContentEnabler(IUnknown* aRequest,
+ IMFAsyncResult* aResult) {
+ LOG("SetContentEnabler");
+ ComPtr<IMFContentEnabler> contentEnabler;
+ RETURN_IF_FAILED(aRequest->QueryInterface(IID_PPV_ARGS(&contentEnabler)));
+ return mCDM->SetContentEnabler(contentEnabler.Get(), aResult);
+}
+
+void MFCDMProxy::OnHardwareContextReset() {
+ LOG("OnHardwareContextReset");
+ // Hardware context reset happens, all the crypto sessions are in invalid
+ // states. So drop everything here.
+ mTrustedInput.Reset();
+ mInputTrustAuthorities.clear();
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFCDMProxy.h b/dom/media/platforms/wmf/MFCDMProxy.h
new file mode 100644
index 0000000000..605755705c
--- /dev/null
+++ b/dom/media/platforms/wmf/MFCDMProxy.h
@@ -0,0 +1,71 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFCDMPROXY_H
+#define DOM_MEDIA_PLATFORM_WMF_MFCDMPROXY_H
+
+#include <map>
+#include <mfobjects.h>
+#include <unknwn.h>
+#include <windef.h>
+#include <wrl.h>
+
+#include "MFCDMExtra.h"
+#include "nsISupportsImpl.h"
+
+namespace mozilla {
+
+/**
+ * MFCDMProxy wraps a IMFContentDecryptionModule and provides some high level
+ * helper methods in order to allow caller to interact with the wrapped CDM.
+ */
+class MFCDMProxy {
+ NS_INLINE_DECL_REFCOUNTING(MFCDMProxy);
+
+ explicit MFCDMProxy(IMFContentDecryptionModule* aCDM) : mCDM(aCDM) {}
+
+ public:
+ // Return a IMediaProtectionPMPServer from the existing CDM.
+ HRESULT GetPMPServer(REFIID aRiid, LPVOID* aPMPServerOut);
+
+ // Return a IMFInputTrustAuthority for given stream id, the same stream ID
+ // always maps to the same IMFInputTrustAuthority. In addition,
+ // `aContentInitData` is optional initialization data as in
+ // https://www.w3.org/TR/encrypted-media/#initialization-data
+ HRESULT GetInputTrustAuthority(uint32_t aStreamId,
+ const uint8_t* aContentInitData,
+ uint32_t aContentInitDataSize, REFIID aRiid,
+ IUnknown** aInputTrustAuthorityOut);
+
+ // Set IMFContentEnabler to the existing CDM, `aRequest` should be a inherited
+ // class of `IMFContentEnabler`.
+ HRESULT SetContentEnabler(IUnknown* aRequest, IMFAsyncResult* aResult);
+
+ // Notify the CDM on DRM_E_TEE_INVALID_HWDRM_STATE (0x8004cd12), which happens
+ // in cases like OS Sleep. In this case, the CDM should close all sessions
+ // because they are in bad state.
+ void OnHardwareContextReset();
+
+ // TODO : set last key id in order to let CDM use the key IDs information to
+ // perform some optimization.
+
+ private:
+ ~MFCDMProxy() = default;
+
+ Microsoft::WRL::ComPtr<IMFContentDecryptionModule> mCDM;
+
+ // The same ITA is always mapping to the same stream Id.
+ std::map<uint32_t /* stream Id */,
+ Microsoft::WRL::ComPtr<IMFInputTrustAuthority>>
+ mInputTrustAuthorities;
+
+ Microsoft::WRL::ComPtr<IMFTrustedInput> mTrustedInput;
+
+ // TODO : need some events? (Eg. significant playback, error, hardware context
+ // reset)
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFCDMPROXY_H
diff --git a/dom/media/platforms/wmf/MFCDMSession.cpp b/dom/media/platforms/wmf/MFCDMSession.cpp
new file mode 100644
index 0000000000..d693c5a731
--- /dev/null
+++ b/dom/media/platforms/wmf/MFCDMSession.cpp
@@ -0,0 +1,314 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFCDMSession.h"
+
+#include <limits>
+#include <vcruntime.h>
+#include <winerror.h>
+
+#include "MFMediaEngineUtils.h"
+#include "GMPUtils.h" // ToHexString
+#include "mozilla/EMEUtils.h"
+#include "mozilla/dom/MediaKeyMessageEventBinding.h"
+#include "mozilla/dom/MediaKeyStatusMapBinding.h"
+#include "nsThreadUtils.h"
+
+namespace mozilla {
+
+using Microsoft::WRL::ComPtr;
+using Microsoft::WRL::MakeAndInitialize;
+
+#define LOG(msg, ...) EME_LOG("MFCDMSession=%p, " msg, this, ##__VA_ARGS__)
+
+static inline MF_MEDIAKEYSESSION_TYPE ConvertSessionType(
+ KeySystemConfig::SessionType aType) {
+ switch (aType) {
+ case KeySystemConfig::SessionType::Temporary:
+ return MF_MEDIAKEYSESSION_TYPE_TEMPORARY;
+ case KeySystemConfig::SessionType::PersistentLicense:
+ return MF_MEDIAKEYSESSION_TYPE_PERSISTENT_LICENSE;
+ }
+}
+
+static inline LPCWSTR InitDataTypeToString(const nsAString& aInitDataType) {
+ // The strings are defined in https://www.w3.org/TR/eme-initdata-registry/
+ if (aInitDataType.EqualsLiteral("webm")) {
+ return L"webm";
+ } else if (aInitDataType.EqualsLiteral("cenc")) {
+ return L"cenc";
+ } else if (aInitDataType.EqualsLiteral("keyids")) {
+ return L"keyids";
+ } else {
+ return L"unknown";
+ }
+}
+
+// The callback interface which IMFContentDecryptionModuleSession uses for
+// communicating with the session.
+class MFCDMSession::SessionCallbacks final
+ : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<Microsoft::WRL::ClassicCom>,
+ IMFContentDecryptionModuleSessionCallbacks> {
+ public:
+ SessionCallbacks() = default;
+ SessionCallbacks(const SessionCallbacks&) = delete;
+ SessionCallbacks& operator=(const SessionCallbacks&) = delete;
+ ~SessionCallbacks() = default;
+
+ HRESULT RuntimeClassInitialize() { return S_OK; }
+
+ // IMFContentDecryptionModuleSessionCallbacks
+ STDMETHODIMP KeyMessage(MF_MEDIAKEYSESSION_MESSAGETYPE aType,
+ const BYTE* aMessage, DWORD aMessageSize,
+ LPCWSTR aUrl) final {
+ CopyableTArray<uint8_t> msg{static_cast<const uint8_t*>(aMessage),
+ aMessageSize};
+ mKeyMessageEvent.Notify(aType, std::move(msg));
+ return S_OK;
+ }
+
+ STDMETHODIMP KeyStatusChanged() final {
+ mKeyChangeEvent.Notify();
+ return S_OK;
+ }
+
+ MediaEventSource<MF_MEDIAKEYSESSION_MESSAGETYPE, nsTArray<uint8_t>>&
+ KeyMessageEvent() {
+ return mKeyMessageEvent;
+ }
+ MediaEventSource<void>& KeyChangeEvent() { return mKeyChangeEvent; }
+
+ private:
+ MediaEventProducer<MF_MEDIAKEYSESSION_MESSAGETYPE, nsTArray<uint8_t>>
+ mKeyMessageEvent;
+ MediaEventProducer<void> mKeyChangeEvent;
+};
+
+/* static*/
+MFCDMSession* MFCDMSession::Create(KeySystemConfig::SessionType aSessionType,
+ IMFContentDecryptionModule* aCdm,
+ nsISerialEventTarget* aManagerThread) {
+ MOZ_ASSERT(aCdm);
+ MOZ_ASSERT(aManagerThread);
+ ComPtr<SessionCallbacks> callbacks;
+ RETURN_PARAM_IF_FAILED(MakeAndInitialize<SessionCallbacks>(&callbacks),
+ nullptr);
+
+ ComPtr<IMFContentDecryptionModuleSession> session;
+ RETURN_PARAM_IF_FAILED(aCdm->CreateSession(ConvertSessionType(aSessionType),
+ callbacks.Get(), &session),
+ nullptr);
+ return new MFCDMSession(session.Get(), callbacks.Get(), aManagerThread);
+}
+
+MFCDMSession::MFCDMSession(IMFContentDecryptionModuleSession* aSession,
+ SessionCallbacks* aCallback,
+ nsISerialEventTarget* aManagerThread)
+ : mSession(aSession),
+ mManagerThread(aManagerThread),
+ mExpiredTimeMilliSecondsSinceEpoch(
+ std::numeric_limits<double>::quiet_NaN()) {
+ MOZ_ASSERT(aSession);
+ MOZ_ASSERT(aCallback);
+ MOZ_ASSERT(aManagerThread);
+ mKeyMessageListener = aCallback->KeyMessageEvent().Connect(
+ mManagerThread, this, &MFCDMSession::OnSessionKeyMessage);
+ mKeyChangeListener = aCallback->KeyChangeEvent().Connect(
+ mManagerThread, this, &MFCDMSession::OnSessionKeysChange);
+}
+
+MFCDMSession::~MFCDMSession() {
+ // TODO : maybe disconnect them in `Close()`?
+ mKeyChangeListener.DisconnectIfExists();
+ mKeyMessageListener.DisconnectIfExists();
+}
+
+HRESULT MFCDMSession::GenerateRequest(const nsAString& aInitDataType,
+ const uint8_t* aInitData,
+ uint32_t aInitDataSize) {
+ AssertOnManagerThread();
+ LOG("GenerateRequest for %s (init sz=%u)",
+ NS_ConvertUTF16toUTF8(aInitDataType).get(), aInitDataSize);
+ RETURN_IF_FAILED(mSession->GenerateRequest(
+ InitDataTypeToString(aInitDataType), aInitData, aInitDataSize));
+ Unused << RetrieveSessionId();
+ return S_OK;
+}
+
+HRESULT MFCDMSession::Load(const nsAString& aSessionId) {
+ AssertOnManagerThread();
+ // TODO : do we need to implement this? Chromium doesn't implement this one.
+ // Also, how do we know is this given session ID is equal to the session Id
+ // asked from CDM session or not?
+ BOOL rv = FALSE;
+ mSession->Load(char16ptr_t(aSessionId.BeginReading()), &rv);
+ LOG("Load, id=%s, rv=%s", NS_ConvertUTF16toUTF8(aSessionId).get(),
+ rv ? "success" : "fail");
+ return rv ? S_OK : S_FALSE;
+}
+
+HRESULT MFCDMSession::Update(const nsTArray<uint8_t>& aMessage) {
+ AssertOnManagerThread();
+ LOG("Update");
+ RETURN_IF_FAILED(mSession->Update(
+ static_cast<const BYTE*>(aMessage.Elements()), aMessage.Length()));
+ RETURN_IF_FAILED(UpdateExpirationIfNeeded());
+ return S_OK;
+}
+
+HRESULT MFCDMSession::Close() {
+ AssertOnManagerThread();
+ LOG("Close");
+ RETURN_IF_FAILED(mSession->Close());
+ return S_OK;
+}
+
+HRESULT MFCDMSession::Remove() {
+ AssertOnManagerThread();
+ LOG("Remove");
+ RETURN_IF_FAILED(mSession->Remove());
+ RETURN_IF_FAILED(UpdateExpirationIfNeeded());
+ return S_OK;
+}
+
+bool MFCDMSession::RetrieveSessionId() {
+ AssertOnManagerThread();
+ if (mSessionId) {
+ return true;
+ }
+ ScopedCoMem<wchar_t> sessionId;
+ if (FAILED(mSession->GetSessionId(&sessionId)) || !sessionId) {
+ LOG("Can't get session id or empty session ID!");
+ return false;
+ }
+ LOG("Set session Id %ls", sessionId.Get());
+ mSessionId = Some(sessionId.Get());
+ return true;
+}
+
+void MFCDMSession::OnSessionKeysChange() {
+ AssertOnManagerThread();
+ LOG("OnSessionKeysChange");
+
+ if (!mSessionId) {
+ LOG("Unexpected session keys change ignored");
+ return;
+ }
+
+ ScopedCoMem<MFMediaKeyStatus> keyStatuses;
+ UINT count = 0;
+ RETURN_VOID_IF_FAILED(mSession->GetKeyStatuses(&keyStatuses, &count));
+
+ static auto ToMediaKeyStatus = [](MF_MEDIAKEY_STATUS aStatus) {
+ // https://learn.microsoft.com/en-us/windows/win32/api/mfidl/ne-mfidl-mf_mediakey_status
+ switch (aStatus) {
+ case MF_MEDIAKEY_STATUS_USABLE:
+ return dom::MediaKeyStatus::Usable;
+ case MF_MEDIAKEY_STATUS_EXPIRED:
+ return dom::MediaKeyStatus::Expired;
+ case MF_MEDIAKEY_STATUS_OUTPUT_DOWNSCALED:
+ return dom::MediaKeyStatus::Output_downscaled;
+ // This is for legacy use and should not happen in normal cases. Map it to
+ // internal error in case it happens.
+ case MF_MEDIAKEY_STATUS_OUTPUT_NOT_ALLOWED:
+ return dom::MediaKeyStatus::Internal_error;
+ case MF_MEDIAKEY_STATUS_STATUS_PENDING:
+ return dom::MediaKeyStatus::Status_pending;
+ case MF_MEDIAKEY_STATUS_INTERNAL_ERROR:
+ return dom::MediaKeyStatus::Internal_error;
+ case MF_MEDIAKEY_STATUS_RELEASED:
+ return dom::MediaKeyStatus::Released;
+ case MF_MEDIAKEY_STATUS_OUTPUT_RESTRICTED:
+ return dom::MediaKeyStatus::Output_restricted;
+ }
+ MOZ_ASSERT_UNREACHABLE("Invalid MF_MEDIAKEY_STATUS enum value");
+ return dom::MediaKeyStatus::Internal_error;
+ };
+
+ CopyableTArray<MFCDMKeyInformation> keyInfos;
+ for (uint32_t idx = 0; idx < count; idx++) {
+ const MFMediaKeyStatus& keyStatus = keyStatuses[idx];
+ if (keyStatus.cbKeyId != sizeof(GUID)) {
+ LOG("Key ID with unsupported size ignored");
+ continue;
+ }
+ CopyableTArray<uint8_t> keyId;
+ ByteArrayFromGUID(reinterpret_cast<REFGUID>(keyStatus.pbKeyId), keyId);
+
+ nsAutoCString keyIdString(ToHexString(keyId));
+ LOG("Append keyid-sz=%u, keyid=%s, status=%s", keyStatus.cbKeyId,
+ keyIdString.get(),
+ ToMediaKeyStatusStr(ToMediaKeyStatus(keyStatus.eMediaKeyStatus)));
+ keyInfos.AppendElement(MFCDMKeyInformation{
+ std::move(keyId), ToMediaKeyStatus(keyStatus.eMediaKeyStatus)});
+ }
+ LOG("Notify 'keychange' for %s", NS_ConvertUTF16toUTF8(*mSessionId).get());
+ mKeyChangeEvent.Notify(
+ MFCDMKeyStatusChange{*mSessionId, std::move(keyInfos)});
+
+ // ScopedCoMem<MFMediaKeyStatus> only releases memory for |keyStatuses|. We
+ // need to manually release memory for |pbKeyId| here.
+ for (size_t idx = 0; idx < count; idx++) {
+ if (const auto& keyStatus = keyStatuses[idx]; keyStatus.pbKeyId) {
+ CoTaskMemFree(keyStatus.pbKeyId);
+ }
+ }
+}
+
+HRESULT MFCDMSession::UpdateExpirationIfNeeded() {
+ AssertOnManagerThread();
+ MOZ_ASSERT(mSessionId);
+
+ // The msdn document doesn't mention the unit for the expiration time,
+ // follow chromium's implementation to treat them as millisecond.
+ double newExpiredEpochTimeMs = 0.0;
+ RETURN_IF_FAILED(mSession->GetExpiration(&newExpiredEpochTimeMs));
+
+ if (newExpiredEpochTimeMs == mExpiredTimeMilliSecondsSinceEpoch ||
+ (std::isnan(newExpiredEpochTimeMs) &&
+ std::isnan(mExpiredTimeMilliSecondsSinceEpoch))) {
+ return S_OK;
+ }
+
+ LOG("Session expiration change from %f to %f, notify 'expiration' for %s",
+ mExpiredTimeMilliSecondsSinceEpoch, newExpiredEpochTimeMs,
+ NS_ConvertUTF16toUTF8(*mSessionId).get());
+ mExpiredTimeMilliSecondsSinceEpoch = newExpiredEpochTimeMs;
+ mExpirationEvent.Notify(
+ MFCDMKeyExpiration{*mSessionId, mExpiredTimeMilliSecondsSinceEpoch});
+ return S_OK;
+}
+
+void MFCDMSession::OnSessionKeyMessage(
+ const MF_MEDIAKEYSESSION_MESSAGETYPE& aType,
+ const nsTArray<uint8_t>& aMessage) {
+ AssertOnManagerThread();
+ // Only send key message after the session Id is ready.
+ if (!RetrieveSessionId()) {
+ return;
+ }
+ static auto ToMediaKeyMessageType = [](MF_MEDIAKEYSESSION_MESSAGETYPE aType) {
+ switch (aType) {
+ case MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_REQUEST:
+ return dom::MediaKeyMessageType::License_request;
+ case MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_RENEWAL:
+ return dom::MediaKeyMessageType::License_renewal;
+ case MF_MEDIAKEYSESSION_MESSAGETYPE_LICENSE_RELEASE:
+ return dom::MediaKeyMessageType::License_release;
+ case MF_MEDIAKEYSESSION_MESSAGETYPE_INDIVIDUALIZATION_REQUEST:
+ return dom::MediaKeyMessageType::Individualization_request;
+ default:
+ MOZ_ASSERT_UNREACHABLE("Unknown session message type");
+ return dom::MediaKeyMessageType::EndGuard_;
+ }
+ };
+ LOG("Notify 'keymessage' for %s", NS_ConvertUTF16toUTF8(*mSessionId).get());
+ mKeyMessageEvent.Notify(MFCDMKeyMessage{
+ *mSessionId, ToMediaKeyMessageType(aType), std::move(aMessage)});
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFCDMSession.h b/dom/media/platforms/wmf/MFCDMSession.h
new file mode 100644
index 0000000000..44b7c3b239
--- /dev/null
+++ b/dom/media/platforms/wmf/MFCDMSession.h
@@ -0,0 +1,93 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFCDMSESSION_H
+#define DOM_MEDIA_PLATFORM_WMF_MFCDMSESSION_H
+
+#include <vector>
+#include <wrl.h>
+#include <wrl/client.h>
+
+#include "MFCDMExtra.h"
+#include "MediaEventSource.h"
+#include "mozilla/PMFCDM.h"
+#include "mozilla/KeySystemConfig.h"
+#include "nsAString.h"
+
+namespace mozilla {
+
+// MFCDMSession represents a key session defined by the EME spec, it operates
+// the IMFContentDecryptionModuleSession directly and forward events from
+// IMFContentDecryptionModuleSession to its caller. It's not thread-safe and
+// can only be used on the manager thread for now.
+class MFCDMSession final {
+ public:
+ ~MFCDMSession();
+
+ static MFCDMSession* Create(KeySystemConfig::SessionType aSessionType,
+ IMFContentDecryptionModule* aCdm,
+ nsISerialEventTarget* aManagerThread);
+
+ // APIs corresponding to EME APIs (MediaKeySession)
+ HRESULT GenerateRequest(const nsAString& aInitDataType,
+ const uint8_t* aInitData, uint32_t aInitDataSize);
+ HRESULT Load(const nsAString& aSessionId);
+ HRESULT Update(const nsTArray<uint8_t>& aMessage);
+ HRESULT Close();
+ HRESULT Remove();
+
+ // Session status related events
+ MediaEventSource<MFCDMKeyMessage>& KeyMessageEvent() {
+ return mKeyMessageEvent;
+ }
+ MediaEventSource<MFCDMKeyStatusChange>& KeyChangeEvent() {
+ return mKeyChangeEvent;
+ }
+ MediaEventSource<MFCDMKeyExpiration>& ExpirationEvent() {
+ return mExpirationEvent;
+ }
+
+ const Maybe<nsString>& SessionID() const { return mSessionId; }
+
+ private:
+ class SessionCallbacks;
+
+ MFCDMSession(IMFContentDecryptionModuleSession* aSession,
+ SessionCallbacks* aCallback,
+ nsISerialEventTarget* aManagerThread);
+ MFCDMSession(const MFCDMSession&) = delete;
+ MFCDMSession& operator=(const MFCDMSession&) = delete;
+
+ bool RetrieveSessionId();
+ void OnSessionKeysChange();
+ void OnSessionKeyMessage(const MF_MEDIAKEYSESSION_MESSAGETYPE& aType,
+ const nsTArray<uint8_t>& aMessage);
+
+ HRESULT UpdateExpirationIfNeeded();
+
+ void AssertOnManagerThread() const {
+ MOZ_ASSERT(mManagerThread->IsOnCurrentThread());
+ }
+
+ const Microsoft::WRL::ComPtr<IMFContentDecryptionModuleSession> mSession;
+ const nsCOMPtr<nsISerialEventTarget> mManagerThread;
+
+ MediaEventProducer<MFCDMKeyMessage> mKeyMessageEvent;
+ MediaEventProducer<MFCDMKeyStatusChange> mKeyChangeEvent;
+ MediaEventProducer<MFCDMKeyExpiration> mExpirationEvent;
+ MediaEventListener mKeyMessageListener;
+ MediaEventListener mKeyChangeListener;
+
+ // IMFContentDecryptionModuleSession's id might not be ready immediately after
+ // the session gets created.
+ Maybe<nsString> mSessionId;
+
+ // NaN when the CDM doesn't explicitly define the time or the time never
+ // expires.
+ double mExpiredTimeMilliSecondsSinceEpoch;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFCDMSESSION_H
diff --git a/dom/media/platforms/wmf/MFContentProtectionManager.cpp b/dom/media/platforms/wmf/MFContentProtectionManager.cpp
new file mode 100644
index 0000000000..79189d59b3
--- /dev/null
+++ b/dom/media/platforms/wmf/MFContentProtectionManager.cpp
@@ -0,0 +1,164 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFContentProtectionManager.h"
+
+#include <hstring.h>
+#include <winnt.h>
+
+#include "MFMediaEngineUtils.h"
+#include "WMF.h"
+
+namespace mozilla {
+
+using Microsoft::WRL::ComPtr;
+
+#define LOG(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \
+ ("MFContentProtectionManager=%p, " msg, this, ##__VA_ARGS__))
+
+class ScopedHString final {
+ public:
+ explicit ScopedHString(const WCHAR aCharArray[]) {
+ WindowsCreateString(aCharArray, wcslen(aCharArray), &mString);
+ }
+ ~ScopedHString() { WindowsDeleteString(mString); }
+ const HSTRING& Get() { return mString; }
+
+ private:
+ HSTRING mString;
+};
+
+HRESULT MFContentProtectionManager::RuntimeClassInitialize() {
+ ScopedHString propertyId(
+ RuntimeClass_Windows_Foundation_Collections_PropertySet);
+ RETURN_IF_FAILED(RoActivateInstance(propertyId.Get(), &mPMPServerSet));
+ return S_OK;
+}
+
+HRESULT MFContentProtectionManager::BeginEnableContent(
+ IMFActivate* aEnablerActivate, IMFTopology* aTopology,
+ IMFAsyncCallback* aCallback, IUnknown* aState) {
+ LOG("BeginEnableContent");
+ ComPtr<IUnknown> unknownObject;
+ ComPtr<IMFAsyncResult> asyncResult;
+ RETURN_IF_FAILED(
+ wmf::MFCreateAsyncResult(nullptr, aCallback, aState, &asyncResult));
+ RETURN_IF_FAILED(
+ aEnablerActivate->ActivateObject(IID_PPV_ARGS(&unknownObject)));
+
+ GUID enablerType = GUID_NULL;
+ ComPtr<IMFContentEnabler> contentEnabler;
+ if (SUCCEEDED(unknownObject.As(&contentEnabler))) {
+ RETURN_IF_FAILED(contentEnabler->GetEnableType(&enablerType));
+ } else {
+ ComPtr<ABI::Windows::Media::Protection::IMediaProtectionServiceRequest>
+ serviceRequest;
+ RETURN_IF_FAILED(unknownObject.As(&serviceRequest));
+ RETURN_IF_FAILED(serviceRequest->get_Type(&enablerType));
+ }
+
+ if (enablerType == MFENABLETYPE_MF_RebootRequired) {
+ LOG("Error - MFENABLETYPE_MF_RebootRequired");
+ return MF_E_REBOOT_REQUIRED;
+ } else if (enablerType == MFENABLETYPE_MF_UpdateRevocationInformation) {
+ LOG("Error - MFENABLETYPE_MF_UpdateRevocationInformation");
+ return MF_E_GRL_VERSION_TOO_LOW;
+ } else if (enablerType == MFENABLETYPE_MF_UpdateUntrustedComponent) {
+ LOG("Error - MFENABLETYPE_MF_UpdateUntrustedComponent");
+ return HRESULT_FROM_WIN32(ERROR_INVALID_IMAGE_HASH);
+ }
+
+ MOZ_ASSERT(mCDMProxy);
+ RETURN_IF_FAILED(
+ mCDMProxy->SetContentEnabler(unknownObject.Get(), asyncResult.Get()));
+
+ // TODO : maybe need to notify waiting for key status?
+ LOG("Finished BeginEnableContent");
+ return S_OK;
+}
+
+HRESULT MFContentProtectionManager::EndEnableContent(
+ IMFAsyncResult* aAsyncResult) {
+ HRESULT hr = aAsyncResult->GetStatus();
+ if (FAILED(hr)) {
+ // Follow Chromium to not to return failure, which avoid doing additional
+ // work here.
+ LOG("Content enabling failed. hr=%lx", hr);
+ } else {
+ LOG("Content enabling succeeded");
+ }
+ return S_OK;
+}
+
+HRESULT MFContentProtectionManager::add_ServiceRequested(
+ ABI::Windows::Media::Protection::IServiceRequestedEventHandler* aHandler,
+ EventRegistrationToken* aCookie) {
+ return E_NOTIMPL;
+}
+
+HRESULT MFContentProtectionManager::remove_ServiceRequested(
+ EventRegistrationToken aCookie) {
+ return E_NOTIMPL;
+}
+
+HRESULT MFContentProtectionManager::add_RebootNeeded(
+ ABI::Windows::Media::Protection::IRebootNeededEventHandler* aHandler,
+ EventRegistrationToken* aCookie) {
+ return E_NOTIMPL;
+}
+
+HRESULT MFContentProtectionManager::remove_RebootNeeded(
+ EventRegistrationToken aCookie) {
+ return E_NOTIMPL;
+}
+
+HRESULT MFContentProtectionManager::add_ComponentLoadFailed(
+ ABI::Windows::Media::Protection::IComponentLoadFailedEventHandler* aHandler,
+ EventRegistrationToken* aCookie) {
+ return E_NOTIMPL;
+}
+
+HRESULT MFContentProtectionManager::remove_ComponentLoadFailed(
+ EventRegistrationToken aCookie) {
+ return E_NOTIMPL;
+}
+
+HRESULT MFContentProtectionManager::get_Properties(
+ ABI::Windows::Foundation::Collections::IPropertySet** properties) {
+ if (!mPMPServerSet) {
+ return E_POINTER;
+ }
+ return mPMPServerSet.CopyTo(properties);
+}
+
+HRESULT MFContentProtectionManager::SetCDMProxy(MFCDMProxy* aCDMProxy) {
+ MOZ_ASSERT(aCDMProxy);
+ mCDMProxy = aCDMProxy;
+ ComPtr<ABI::Windows::Media::Protection::IMediaProtectionPMPServer> pmpServer;
+ RETURN_IF_FAILED(mCDMProxy->GetPMPServer(IID_PPV_ARGS(&pmpServer)));
+ RETURN_IF_FAILED(SetPMPServer(pmpServer.Get()));
+ return S_OK;
+}
+
+HRESULT MFContentProtectionManager::SetPMPServer(
+ ABI::Windows::Media::Protection::IMediaProtectionPMPServer* aPMPServer) {
+ MOZ_ASSERT(aPMPServer);
+
+ ComPtr<ABI::Windows::Foundation::Collections::IMap<HSTRING, IInspectable*>>
+ serverMap;
+ RETURN_IF_FAILED(mPMPServerSet.As(&serverMap));
+
+ // MFMediaEngine uses |serverKey| to get the Protected Media Path (PMP)
+ // server used for playing protected content. This is not currently documented
+ // in MSDN.
+ boolean replaced = false;
+ ScopedHString serverKey{L"Windows.Media.Protection.MediaProtectionPMPServer"};
+ RETURN_IF_FAILED(serverMap->Insert(serverKey.Get(), aPMPServer, &replaced));
+ return S_OK;
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFContentProtectionManager.h b/dom/media/platforms/wmf/MFContentProtectionManager.h
new file mode 100644
index 0000000000..964c965c32
--- /dev/null
+++ b/dom/media/platforms/wmf/MFContentProtectionManager.h
@@ -0,0 +1,79 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFCONTENTPROTECTIONMANAGER_H
+#define DOM_MEDIA_PLATFORM_WMF_MFCONTENTPROTECTIONMANAGER_H
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <windows.media.protection.h>
+#include <wrl.h>
+
+#include "MFCDMProxy.h"
+
+namespace mozilla {
+
+/**
+ * MFContentProtectionManager is used to enable the encrypted playback for the
+ * media engine.
+ * https://docs.microsoft.com/en-us/windows/win32/api/mfidl/nn-mfidl-imfcontentprotectionmanager
+ * https://docs.microsoft.com/en-us/uwp/api/windows.media.protection.mediaprotectionmanager
+ */
+class MFContentProtectionManager
+ : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<
+ Microsoft::WRL::RuntimeClassType::WinRtClassicComMix |
+ Microsoft::WRL::RuntimeClassType::InhibitRoOriginateError>,
+ IMFContentProtectionManager,
+ ABI::Windows::Media::Protection::IMediaProtectionManager> {
+ public:
+ MFContentProtectionManager() = default;
+ ~MFContentProtectionManager() = default;
+
+ HRESULT RuntimeClassInitialize();
+
+ // IMFContentProtectionManager.
+ IFACEMETHODIMP BeginEnableContent(IMFActivate* aEnablerActivate,
+ IMFTopology* aTopology,
+ IMFAsyncCallback* aCallback,
+ IUnknown* aState) override;
+ IFACEMETHODIMP EndEnableContent(IMFAsyncResult* aAsyncResult) override;
+
+ // IMediaProtectionManager.
+ // MFMediaEngine can query this interface to invoke get_Properties().
+ IFACEMETHODIMP add_ServiceRequested(
+ ABI::Windows::Media::Protection::IServiceRequestedEventHandler* aHandler,
+ EventRegistrationToken* aCookie) override;
+ IFACEMETHODIMP remove_ServiceRequested(
+ EventRegistrationToken aCookie) override;
+ IFACEMETHODIMP add_RebootNeeded(
+ ABI::Windows::Media::Protection::IRebootNeededEventHandler* aHandler,
+ EventRegistrationToken* aCookie) override;
+ IFACEMETHODIMP remove_RebootNeeded(EventRegistrationToken aCookie) override;
+ IFACEMETHODIMP add_ComponentLoadFailed(
+ ABI::Windows::Media::Protection::IComponentLoadFailedEventHandler*
+ aHandler,
+ EventRegistrationToken* aCookie) override;
+ IFACEMETHODIMP remove_ComponentLoadFailed(
+ EventRegistrationToken aCookie) override;
+ IFACEMETHODIMP get_Properties(
+ ABI::Windows::Foundation::Collections::IPropertySet** aValue) override;
+
+ HRESULT SetCDMProxy(MFCDMProxy* aCDMProxy);
+
+ MFCDMProxy* GetCDMProxy() const { return mCDMProxy; }
+
+ private:
+ HRESULT SetPMPServer(
+ ABI::Windows::Media::Protection::IMediaProtectionPMPServer* aPMPServer);
+
+ RefPtr<MFCDMProxy> mCDMProxy;
+
+ Microsoft::WRL::ComPtr<ABI::Windows::Foundation::Collections::IPropertySet>
+ mPMPServerSet;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFCONTENTPROTECTIONMANAGER_H
diff --git a/dom/media/platforms/wmf/MFMediaEngineAudioStream.cpp b/dom/media/platforms/wmf/MFMediaEngineAudioStream.cpp
new file mode 100644
index 0000000000..4acf26e041
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineAudioStream.cpp
@@ -0,0 +1,137 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFMediaEngineAudioStream.h"
+
+#include <mferror.h>
+#include <mfapi.h>
+
+#include "MFMediaEngineUtils.h"
+#include "WMFUtils.h"
+#include "mozilla/StaticPrefs_media.h"
+
+namespace mozilla {
+
+#define LOG(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \
+ ("MFMediaStream=%p (%s), " msg, this, \
+ this->GetDescriptionName().get(), ##__VA_ARGS__))
+
+using Microsoft::WRL::ComPtr;
+using Microsoft::WRL::MakeAndInitialize;
+
+/* static */
+MFMediaEngineAudioStream* MFMediaEngineAudioStream::Create(
+ uint64_t aStreamId, const TrackInfo& aInfo, MFMediaSource* aParentSource) {
+ MOZ_ASSERT(aInfo.IsAudio());
+ MFMediaEngineAudioStream* stream;
+ if (FAILED(MakeAndInitialize<MFMediaEngineAudioStream>(
+ &stream, aStreamId, aInfo, aParentSource))) {
+ return nullptr;
+ }
+ return stream;
+}
+
+HRESULT MFMediaEngineAudioStream::CreateMediaType(const TrackInfo& aInfo,
+ IMFMediaType** aMediaType) {
+ const AudioInfo& info = *aInfo.GetAsAudioInfo();
+ mAudioInfo = info;
+ GUID subType = AudioMimeTypeToMediaFoundationSubtype(info.mMimeType);
+ NS_ENSURE_TRUE(subType != GUID_NULL, MF_E_TOPO_CODEC_NOT_FOUND);
+
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/media-type-attributes
+ ComPtr<IMFMediaType> mediaType;
+ RETURN_IF_FAILED(wmf::MFCreateMediaType(&mediaType));
+ RETURN_IF_FAILED(mediaType->SetGUID(MF_MT_SUBTYPE, subType));
+ RETURN_IF_FAILED(mediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio));
+ RETURN_IF_FAILED(
+ mediaType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, info.mChannels));
+ RETURN_IF_FAILED(
+ mediaType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, info.mRate));
+ uint64_t bitDepth = info.mBitDepth != 0 ? info.mBitDepth : 16;
+ RETURN_IF_FAILED(mediaType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, bitDepth));
+ if (subType == MFAudioFormat_AAC) {
+ if (mAACUserData.IsEmpty()) {
+ MOZ_ASSERT(info.mCodecSpecificConfig.is<AacCodecSpecificData>() ||
+ info.mCodecSpecificConfig.is<AudioCodecSpecificBinaryBlob>());
+ RefPtr<MediaByteBuffer> blob;
+ if (info.mCodecSpecificConfig.is<AacCodecSpecificData>()) {
+ blob = info.mCodecSpecificConfig.as<AacCodecSpecificData>()
+ .mDecoderConfigDescriptorBinaryBlob;
+ } else {
+ blob = info.mCodecSpecificConfig.as<AudioCodecSpecificBinaryBlob>()
+ .mBinaryBlob;
+ }
+ AACAudioSpecificConfigToUserData(info.mExtendedProfile, blob->Elements(),
+ blob->Length(), mAACUserData);
+ LOG("Generated AAC user data");
+ }
+ RETURN_IF_FAILED(
+ mediaType->SetUINT32(MF_MT_AAC_PAYLOAD_TYPE, 0x0)); // Raw AAC packet
+ RETURN_IF_FAILED(mediaType->SetBlob(
+ MF_MT_USER_DATA, mAACUserData.Elements(), mAACUserData.Length()));
+ }
+ LOG("Created audio type, subtype=%s, channel=%" PRIu32 ", rate=%" PRIu32
+ ", bitDepth=%" PRIu64 ", encrypted=%d",
+ GUIDToStr(subType), info.mChannels, info.mRate, bitDepth,
+ mAudioInfo.mCrypto.IsEncrypted());
+
+ if (IsEncrypted()) {
+ ComPtr<IMFMediaType> protectedMediaType;
+ RETURN_IF_FAILED(wmf::MFWrapMediaType(mediaType.Get(),
+ MFMediaType_Protected, subType,
+ protectedMediaType.GetAddressOf()));
+ LOG("Wrap MFMediaType_Audio into MFMediaType_Protected");
+ *aMediaType = protectedMediaType.Detach();
+ } else {
+ *aMediaType = mediaType.Detach();
+ }
+ return S_OK;
+}
+
+bool MFMediaEngineAudioStream::HasEnoughRawData() const {
+ // If more than this much raw audio is queued, we'll hold off request more
+ // audio.
+ return mRawDataQueueForFeedingEngine.Duration() >=
+ StaticPrefs::media_wmf_media_engine_raw_data_threshold_audio();
+}
+
+already_AddRefed<MediaData> MFMediaEngineAudioStream::OutputDataInternal() {
+ AssertOnTaskQueue();
+ if (mRawDataQueueForGeneratingOutput.GetSize() == 0) {
+ return nullptr;
+ }
+ // The media engine doesn't provide a way to allow us to access decoded audio
+ // frames, and the audio playback will be handled internally inside the media
+ // engine. So we simply return fake audio data.
+ RefPtr<MediaRawData> input = mRawDataQueueForGeneratingOutput.PopFront();
+ RefPtr<MediaData> output =
+ new AudioData(input->mOffset, input->mTime, AlignedAudioBuffer{},
+ mAudioInfo.mChannels, mAudioInfo.mRate);
+ return output.forget();
+}
+
+nsCString MFMediaEngineAudioStream::GetCodecName() const {
+ WMFStreamType type = GetStreamTypeFromMimeType(mAudioInfo.mMimeType);
+ switch (type) {
+ case WMFStreamType::MP3:
+ return "mp3"_ns;
+ case WMFStreamType::AAC:
+ return "aac"_ns;
+ case WMFStreamType::OPUS:
+ return "opus"_ns;
+ case WMFStreamType::VORBIS:
+ return "vorbis"_ns;
+ default:
+ return "unknown"_ns;
+ }
+}
+
+bool MFMediaEngineAudioStream::IsEncrypted() const {
+ return mAudioInfo.mCrypto.IsEncrypted();
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFMediaEngineAudioStream.h b/dom/media/platforms/wmf/MFMediaEngineAudioStream.h
new file mode 100644
index 0000000000..14a72b9f63
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineAudioStream.h
@@ -0,0 +1,51 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEAUDIOSTREAM_H
+#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEAUDIOSTREAM_H
+
+#include "MFMediaEngineStream.h"
+
+namespace mozilla {
+
+class MFMediaSource;
+
+class MFMediaEngineAudioStream final : public MFMediaEngineStream {
+ public:
+ MFMediaEngineAudioStream() = default;
+
+ static MFMediaEngineAudioStream* Create(uint64_t aStreamId,
+ const TrackInfo& aInfo,
+ MFMediaSource* aParentSource);
+
+ nsCString GetDescriptionName() const override {
+ return "media engine audio stream"_ns;
+ }
+
+ nsCString GetCodecName() const override;
+
+ TrackInfo::TrackType TrackType() override {
+ return TrackInfo::TrackType::kAudioTrack;
+ }
+
+ bool IsEncrypted() const override;
+
+ private:
+ HRESULT CreateMediaType(const TrackInfo& aInfo,
+ IMFMediaType** aMediaType) override;
+
+ bool HasEnoughRawData() const override;
+
+ already_AddRefed<MediaData> OutputDataInternal() override;
+
+ // For MF_MT_USER_DATA. Currently only used for AAC.
+ nsTArray<BYTE> mAACUserData;
+
+ // Set when `CreateMediaType()` is called.
+ AudioInfo mAudioInfo;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEAUDIOSTREAM_H
diff --git a/dom/media/platforms/wmf/MFMediaEngineDecoderModule.cpp b/dom/media/platforms/wmf/MFMediaEngineDecoderModule.cpp
new file mode 100644
index 0000000000..13be162af5
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineDecoderModule.cpp
@@ -0,0 +1,174 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFMediaEngineDecoderModule.h"
+
+#include "MFTDecoder.h"
+#include "VideoUtils.h"
+#include "mozilla/MFMediaEngineParent.h"
+#include "mozilla/MFMediaEngineUtils.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/WindowsVersion.h"
+#include "mozilla/mscom/EnsureMTA.h"
+
+namespace mozilla {
+
+#define LOG(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, (msg, ##__VA_ARGS__))
+
+/* static */
+void MFMediaEngineDecoderModule::Init() {
+ // TODO : Init any thing that media engine would need. Implement this when we
+ // start implementing media engine in following patches.
+}
+
+/* static */
+already_AddRefed<PlatformDecoderModule> MFMediaEngineDecoderModule::Create() {
+ RefPtr<MFMediaEngineDecoderModule> module = new MFMediaEngineDecoderModule();
+ return module.forget();
+}
+
+/* static */
+bool MFMediaEngineDecoderModule::SupportsConfig(const TrackInfo& aConfig) {
+ RefPtr<MFMediaEngineDecoderModule> module = new MFMediaEngineDecoderModule();
+ return module->SupportInternal(SupportDecoderParams(aConfig), nullptr) !=
+ media::DecodeSupport::Unsupported;
+}
+
+already_AddRefed<MediaDataDecoder>
+MFMediaEngineDecoderModule::CreateVideoDecoder(
+ const CreateDecoderParams& aParams) {
+ if (!aParams.mMediaEngineId ||
+ !StaticPrefs::media_wmf_media_engine_enabled()) {
+ return nullptr;
+ }
+ RefPtr<MFMediaEngineParent> mediaEngine =
+ MFMediaEngineParent::GetMediaEngineById(*aParams.mMediaEngineId);
+ if (!mediaEngine) {
+ LOG("Can't find media engine %" PRIu64 " for video decoder",
+ *aParams.mMediaEngineId);
+ return nullptr;
+ }
+ LOG("MFMediaEngineDecoderModule, CreateVideoDecoder");
+ RefPtr<MediaDataDecoder> decoder = mediaEngine->GetMediaEngineStream(
+ TrackInfo::TrackType::kVideoTrack, aParams);
+ return decoder.forget();
+}
+
+already_AddRefed<MediaDataDecoder>
+MFMediaEngineDecoderModule::CreateAudioDecoder(
+ const CreateDecoderParams& aParams) {
+ if (!aParams.mMediaEngineId ||
+ !StaticPrefs::media_wmf_media_engine_enabled()) {
+ return nullptr;
+ }
+ RefPtr<MFMediaEngineParent> mediaEngine =
+ MFMediaEngineParent::GetMediaEngineById(*aParams.mMediaEngineId);
+ if (!mediaEngine) {
+ LOG("Can't find media engine %" PRIu64 " for audio decoder",
+ *aParams.mMediaEngineId);
+ return nullptr;
+ }
+ LOG("MFMediaEngineDecoderModule, CreateAudioDecoder");
+ RefPtr<MediaDataDecoder> decoder = mediaEngine->GetMediaEngineStream(
+ TrackInfo::TrackType::kAudioTrack, aParams);
+ return decoder.forget();
+}
+
+media::DecodeSupportSet MFMediaEngineDecoderModule::SupportsMimeType(
+ const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const {
+ UniquePtr<TrackInfo> trackInfo = CreateTrackInfoWithMIMEType(aMimeType);
+ if (!trackInfo) {
+ return media::DecodeSupport::Unsupported;
+ }
+ return SupportInternal(SupportDecoderParams(*trackInfo), aDiagnostics);
+}
+
+media::DecodeSupportSet MFMediaEngineDecoderModule::Supports(
+ const SupportDecoderParams& aParams,
+ DecoderDoctorDiagnostics* aDiagnostics) const {
+ if (!aParams.mMediaEngineId) {
+ return media::DecodeSupport::Unsupported;
+ }
+ return SupportInternal(aParams, aDiagnostics);
+}
+
+media::DecodeSupportSet MFMediaEngineDecoderModule::SupportInternal(
+ const SupportDecoderParams& aParams,
+ DecoderDoctorDiagnostics* aDiagnostics) const {
+ if (!StaticPrefs::media_wmf_media_engine_enabled()) {
+ return media::DecodeSupport::Unsupported;
+ }
+ bool supports = false;
+ WMFStreamType type = GetStreamTypeFromMimeType(aParams.MimeType());
+ if (type != WMFStreamType::Unknown) {
+ supports = CanCreateMFTDecoder(type);
+ }
+ MOZ_LOG(sPDMLog, LogLevel::Debug,
+ ("MFMediaEngine decoder %s requested type '%s'",
+ supports ? "supports" : "rejects", aParams.MimeType().get()));
+ return supports ? media::DecodeSupport::SoftwareDecode
+ : media::DecodeSupport::Unsupported;
+}
+
+static bool CreateMFTDecoderOnMTA(const WMFStreamType& aType) {
+ RefPtr<MFTDecoder> decoder = new MFTDecoder();
+ static std::unordered_map<WMFStreamType, bool> sResults;
+ if (auto rv = sResults.find(aType); rv != sResults.end()) {
+ return rv->second;
+ }
+
+ bool result = false;
+ switch (aType) {
+ case WMFStreamType::MP3:
+ result = SUCCEEDED(decoder->Create(CLSID_CMP3DecMediaObject));
+ break;
+ case WMFStreamType::AAC:
+ result = SUCCEEDED(decoder->Create(CLSID_CMSAACDecMFT));
+ break;
+ // Opus and vorbis are supported via extension.
+ // https://www.microsoft.com/en-us/p/web-media-extensions/9n5tdp8vcmhs
+ case WMFStreamType::OPUS:
+ result = SUCCEEDED(decoder->Create(CLSID_MSOpusDecoder));
+ break;
+ case WMFStreamType::VORBIS:
+ result = SUCCEEDED(decoder->Create(
+ MFT_CATEGORY_AUDIO_DECODER, MFAudioFormat_Vorbis, MFAudioFormat_PCM));
+ break;
+ case WMFStreamType::H264:
+ result = SUCCEEDED(decoder->Create(CLSID_CMSH264DecoderMFT));
+ break;
+ case WMFStreamType::VP8:
+ case WMFStreamType::VP9: {
+ static const uint32_t VPX_USABLE_BUILD = 16287;
+ if (IsWindowsBuildOrLater(VPX_USABLE_BUILD)) {
+ result = SUCCEEDED(decoder->Create(CLSID_CMSVPXDecMFT));
+ }
+ break;
+ }
+#ifdef MOZ_AV1
+ case WMFStreamType::AV1:
+ result = SUCCEEDED(decoder->Create(MFT_CATEGORY_VIDEO_DECODER,
+ MFVideoFormat_AV1, GUID_NULL));
+ break;
+#endif
+ default:
+ MOZ_ASSERT_UNREACHABLE("Unexpected type");
+ }
+ sResults.insert({aType, result});
+ return result;
+}
+
+bool MFMediaEngineDecoderModule::CanCreateMFTDecoder(
+ const WMFStreamType& aType) const {
+ // TODO : caching the result to prevent performing on MTA thread everytime.
+ bool canCreateDecoder = false;
+ mozilla::mscom::EnsureMTA(
+ [&]() { canCreateDecoder = CreateMFTDecoderOnMTA(aType); });
+ return canCreateDecoder;
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFMediaEngineDecoderModule.h b/dom/media/platforms/wmf/MFMediaEngineDecoderModule.h
new file mode 100644
index 0000000000..97a434fca8
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineDecoderModule.h
@@ -0,0 +1,45 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEDECODERMODULE_H
+#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEDECODERMODULE_H
+
+#include "PlatformDecoderModule.h"
+#include "WMFUtils.h"
+
+namespace mozilla {
+
+class MFMediaEngineDecoderModule final : public PlatformDecoderModule {
+ public:
+ static void Init();
+
+ static already_AddRefed<PlatformDecoderModule> Create();
+
+ static bool SupportsConfig(const TrackInfo& aConfig);
+
+ already_AddRefed<MediaDataDecoder> CreateVideoDecoder(
+ const CreateDecoderParams& aParams) override;
+
+ already_AddRefed<MediaDataDecoder> CreateAudioDecoder(
+ const CreateDecoderParams& aParams) override;
+
+ media::DecodeSupportSet SupportsMimeType(
+ const nsACString& aMimeType,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+ media::DecodeSupportSet Supports(
+ const SupportDecoderParams& aParams,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+
+ private:
+ media::DecodeSupportSet SupportInternal(
+ const SupportDecoderParams& aParams,
+ DecoderDoctorDiagnostics* aDiagnostics) const;
+ bool CanCreateMFTDecoder(const WMFStreamType& aType) const;
+ MFMediaEngineDecoderModule() = default;
+ ~MFMediaEngineDecoderModule() = default;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEDECODERMODULE_H
diff --git a/dom/media/platforms/wmf/MFMediaEngineExtension.cpp b/dom/media/platforms/wmf/MFMediaEngineExtension.cpp
new file mode 100644
index 0000000000..eb761da364
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineExtension.cpp
@@ -0,0 +1,88 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFMediaEngineExtension.h"
+
+#include <mfapi.h>
+#include <mferror.h>
+
+#include "MFMediaSource.h"
+#include "MFMediaEngineUtils.h"
+#include "WMF.h"
+
+namespace mozilla {
+
+#define LOG(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \
+ ("MFMediaEngineExtension=%p, " msg, this, ##__VA_ARGS__))
+
+using Microsoft::WRL::ComPtr;
+
+void MFMediaEngineExtension::SetMediaSource(IMFMediaSource* aMediaSource) {
+ LOG("SetMediaSource=%p", aMediaSource);
+ mMediaSource = aMediaSource;
+}
+
+// https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/nf-mfmediaengine-imfmediaengineextension-begincreateobject
+IFACEMETHODIMP MFMediaEngineExtension::BeginCreateObject(
+ BSTR aUrl, IMFByteStream* aByteStream, MF_OBJECT_TYPE aType,
+ IUnknown** aCancelCookie, IMFAsyncCallback* aCallback, IUnknown* aState) {
+ if (aCancelCookie) {
+ // We don't support a cancel cookie.
+ *aCancelCookie = nullptr;
+ }
+
+ if (aType != MF_OBJECT_MEDIASOURCE) {
+ LOG("Only support media source type");
+ return MF_E_UNEXPECTED;
+ }
+
+ MOZ_ASSERT(mMediaSource);
+ ComPtr<IMFAsyncResult> result;
+ ComPtr<IUnknown> sourceUnknown = mMediaSource;
+ RETURN_IF_FAILED(wmf::MFCreateAsyncResult(sourceUnknown.Get(), aCallback,
+ aState, &result));
+ RETURN_IF_FAILED(result->SetStatus(S_OK));
+
+ LOG("Creating object");
+ mIsObjectCreating = true;
+
+ RETURN_IF_FAILED(aCallback->Invoke(result.Get()));
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaEngineExtension::CancelObjectCreation(
+ IUnknown* aCancelCookie) {
+ return MF_E_UNEXPECTED;
+}
+
+IFACEMETHODIMP MFMediaEngineExtension::EndCreateObject(IMFAsyncResult* aResult,
+ IUnknown** aRetObj) {
+ *aRetObj = nullptr;
+ if (!mIsObjectCreating) {
+ LOG("No object is creating, not an expected call");
+ return MF_E_UNEXPECTED;
+ }
+
+ RETURN_IF_FAILED(aResult->GetStatus());
+ RETURN_IF_FAILED(aResult->GetObject(aRetObj));
+
+ LOG("End of creating object");
+ mIsObjectCreating = false;
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaEngineExtension::CanPlayType(
+ BOOL aIsAudioOnly, BSTR aMimeType, MF_MEDIA_ENGINE_CANPLAY* aResult) {
+ // We use MF_MEDIA_ENGINE_EXTENSION to resolve as custom media source for
+ // MFMediaEngine, MIME types are not used.
+ *aResult = MF_MEDIA_ENGINE_CANPLAY_NOT_SUPPORTED;
+ return S_OK;
+}
+
+// TODO : break cycle of mMediaSource
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFMediaEngineExtension.h b/dom/media/platforms/wmf/MFMediaEngineExtension.h
new file mode 100644
index 0000000000..e6b9dde96d
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineExtension.h
@@ -0,0 +1,49 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTENSION_H
+#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTENSION_H
+
+#include <wrl.h>
+
+#include "MFMediaEngineExtra.h"
+
+namespace mozilla {
+
+/**
+ * MFMediaEngineNotify is used to load media resources in the media engine.
+ * https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/nn-mfmediaengine-imfmediaengineextension
+ */
+class MFMediaEngineExtension final
+ : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<
+ Microsoft::WRL::RuntimeClassType::ClassicCom>,
+ IMFMediaEngineExtension> {
+ public:
+ MFMediaEngineExtension() = default;
+
+ HRESULT RuntimeClassInitialize() { return S_OK; }
+
+ void SetMediaSource(IMFMediaSource* aMediaSource);
+
+ // Method for MFMediaEngineExtension
+ IFACEMETHODIMP BeginCreateObject(BSTR aUrl, IMFByteStream* aByteStream,
+ MF_OBJECT_TYPE aType,
+ IUnknown** aCancelCookie,
+ IMFAsyncCallback* aCallback,
+ IUnknown* aState) override;
+ IFACEMETHODIMP CancelObjectCreation(IUnknown* aCancelCookie) override;
+ IFACEMETHODIMP EndCreateObject(IMFAsyncResult* aResult,
+ IUnknown** aRetObj) override;
+ IFACEMETHODIMP CanPlayType(BOOL aIsAudioOnly, BSTR aMimeType,
+ MF_MEDIA_ENGINE_CANPLAY* aResult) override;
+
+ private:
+ bool mIsObjectCreating = false;
+ Microsoft::WRL::ComPtr<IMFMediaSource> mMediaSource;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTENSION_H
diff --git a/dom/media/platforms/wmf/MFMediaEngineExtra.h b/dom/media/platforms/wmf/MFMediaEngineExtra.h
new file mode 100644
index 0000000000..238db9e238
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineExtra.h
@@ -0,0 +1,715 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTRA_H
+#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEEXTRA_H
+
+#include <evr.h>
+#include <mfmediaengine.h>
+
+// Currently, we build with WINVER=0x601 (Win7), which means newer
+// declarations in mfmediaengine.h will not be visible. Also, we don't
+// yet have the Fall Creators Update SDK available on build machines,
+// so even with updated WINVER, some of the interfaces we need would
+// not be present.
+// To work around this, until the build environment is updated,
+// we include copies of the relevant classes/interfaces we need.
+#if !defined(WINVER) || WINVER < 0x0602
+
+# define WS_EX_NOREDIRECTIONBITMAP 0x00200000L
+
+EXTERN_GUID(MF_MEDIA_ENGINE_CALLBACK, 0xc60381b8, 0x83a4, 0x41f8, 0xa3, 0xd0,
+ 0xde, 0x05, 0x07, 0x68, 0x49, 0xa9);
+EXTERN_GUID(MF_MEDIA_ENGINE_DXGI_MANAGER, 0x065702da, 0x1094, 0x486d, 0x86,
+ 0x17, 0xee, 0x7c, 0xc4, 0xee, 0x46, 0x48);
+EXTERN_GUID(MF_MEDIA_ENGINE_EXTENSION, 0x3109fd46, 0x060d, 0x4b62, 0x8d, 0xcf,
+ 0xfa, 0xff, 0x81, 0x13, 0x18, 0xd2);
+EXTERN_GUID(MF_MEDIA_ENGINE_PLAYBACK_HWND, 0xd988879b, 0x67c9, 0x4d92, 0xba,
+ 0xa7, 0x6e, 0xad, 0xd4, 0x46, 0x03, 0x9d);
+EXTERN_GUID(MF_MEDIA_ENGINE_OPM_HWND, 0xa0be8ee7, 0x0572, 0x4f2c, 0xa8, 0x01,
+ 0x2a, 0x15, 0x1b, 0xd3, 0xe7, 0x26);
+EXTERN_GUID(MF_MEDIA_ENGINE_PLAYBACK_VISUAL, 0x6debd26f, 0x6ab9, 0x4d7e, 0xb0,
+ 0xee, 0xc6, 0x1a, 0x73, 0xff, 0xad, 0x15);
+EXTERN_GUID(MF_MEDIA_ENGINE_COREWINDOW, 0xfccae4dc, 0x0b7f, 0x41c2, 0x9f, 0x96,
+ 0x46, 0x59, 0x94, 0x8a, 0xcd, 0xdc);
+EXTERN_GUID(MF_MEDIA_ENGINE_VIDEO_OUTPUT_FORMAT, 0x5066893c, 0x8cf9, 0x42bc,
+ 0x8b, 0x8a, 0x47, 0x22, 0x12, 0xe5, 0x27, 0x26);
+EXTERN_GUID(MF_MEDIA_ENGINE_CONTENT_PROTECTION_FLAGS, 0xe0350223, 0x5aaf,
+ 0x4d76, 0xa7, 0xc3, 0x06, 0xde, 0x70, 0x89, 0x4d, 0xb4);
+EXTERN_GUID(MF_MEDIA_ENGINE_CONTENT_PROTECTION_MANAGER, 0xfdd6dfaa, 0xbd85,
+ 0x4af3, 0x9e, 0x0f, 0xa0, 0x1d, 0x53, 0x9d, 0x87, 0x6a);
+EXTERN_GUID(MF_MEDIA_ENGINE_AUDIO_ENDPOINT_ROLE, 0xd2cb93d1, 0x116a, 0x44f2,
+ 0x93, 0x85, 0xf7, 0xd0, 0xfd, 0xa2, 0xfb, 0x46);
+EXTERN_GUID(MF_MEDIA_ENGINE_AUDIO_CATEGORY, 0xc8d4c51d, 0x350e, 0x41f2, 0xba,
+ 0x46, 0xfa, 0xeb, 0xbb, 0x08, 0x57, 0xf6);
+EXTERN_GUID(MF_MEDIA_ENGINE_STREAM_CONTAINS_ALPHA_CHANNEL, 0x5cbfaf44, 0xd2b2,
+ 0x4cfb, 0x80, 0xa7, 0xd4, 0x29, 0xc7, 0x4c, 0x78, 0x9d);
+EXTERN_GUID(MF_MEDIA_ENGINE_BROWSER_COMPATIBILITY_MODE, 0x4e0212e2, 0xe18f,
+ 0x41e1, 0x95, 0xe5, 0xc0, 0xe7, 0xe9, 0x23, 0x5b, 0xc3);
+EXTERN_GUID(MF_MEDIA_ENGINE_BROWSER_COMPATIBILITY_MODE_IE9, 0x052c2d39, 0x40c0,
+ 0x4188, 0xab, 0x86, 0xf8, 0x28, 0x27, 0x3b, 0x75, 0x22);
+EXTERN_GUID(MF_MEDIA_ENGINE_BROWSER_COMPATIBILITY_MODE_IE10, 0x11a47afd, 0x6589,
+ 0x4124, 0xb3, 0x12, 0x61, 0x58, 0xec, 0x51, 0x7f, 0xc3);
+EXTERN_GUID(MF_MEDIA_ENGINE_BROWSER_COMPATIBILITY_MODE_IE11, 0x1cf1315f, 0xce3f,
+ 0x4035, 0x93, 0x91, 0x16, 0x14, 0x2f, 0x77, 0x51, 0x89);
+EXTERN_GUID(MF_MEDIA_ENGINE_BROWSER_COMPATIBILITY_MODE_IE_EDGE, 0xa6f3e465,
+ 0x3aca, 0x442c, 0xa3, 0xf0, 0xad, 0x6d, 0xda, 0xd8, 0x39, 0xae);
+EXTERN_GUID(MF_MEDIA_ENGINE_COMPATIBILITY_MODE, 0x3ef26ad4, 0xdc54, 0x45de,
+ 0xb9, 0xaf, 0x76, 0xc8, 0xc6, 0x6b, 0xfa, 0x8e);
+EXTERN_GUID(MF_MEDIA_ENGINE_COMPATIBILITY_MODE_WWA_EDGE, 0x15b29098, 0x9f01,
+ 0x4e4d, 0xb6, 0x5a, 0xc0, 0x6c, 0x6c, 0x89, 0xda, 0x2a);
+EXTERN_GUID(MF_MEDIA_ENGINE_COMPATIBILITY_MODE_WIN10, 0x5b25e089, 0x6ca7,
+ 0x4139, 0xa2, 0xcb, 0xfc, 0xaa, 0xb3, 0x95, 0x52, 0xa3);
+EXTERN_GUID(MF_MEDIA_ENGINE_SOURCE_RESOLVER_CONFIG_STORE, 0x0ac0c497, 0xb3c4,
+ 0x48c9, 0x9c, 0xde, 0xbb, 0x8c, 0xa2, 0x44, 0x2c, 0xa3);
+EXTERN_GUID(MF_MEDIA_ENGINE_TRACK_ID, 0x65bea312, 0x4043, 0x4815, 0x8e, 0xab,
+ 0x44, 0xdc, 0xe2, 0xef, 0x8f, 0x2a);
+EXTERN_GUID(MF_MEDIA_ENGINE_TELEMETRY_APPLICATION_ID, 0x1e7b273b, 0xa7e4,
+ 0x402a, 0x8f, 0x51, 0xc4, 0x8e, 0x88, 0xa2, 0xca, 0xbc);
+EXTERN_GUID(MF_MEDIA_ENGINE_SYNCHRONOUS_CLOSE, 0xc3c2e12f, 0x7e0e, 0x4e43, 0xb9,
+ 0x1c, 0xdc, 0x99, 0x2c, 0xcd, 0xfa, 0x5e);
+EXTERN_GUID(MF_MEDIA_ENGINE_MEDIA_PLAYER_MODE, 0x3ddd8d45, 0x5aa1, 0x4112, 0x82,
+ 0xe5, 0x36, 0xf6, 0xa2, 0x19, 0x7e, 0x6e);
+EXTERN_GUID(CLSID_MFMediaEngineClassFactory, 0xb44392da, 0x499b, 0x446b, 0xa4,
+ 0xcb, 0x0, 0x5f, 0xea, 0xd0, 0xe6, 0xd5);
+EXTERN_GUID(MF_MT_VIDEO_ROTATION, 0xc380465d, 0x2271, 0x428c, 0x9b, 0x83, 0xec,
+ 0xea, 0x3b, 0x4a, 0x85, 0xc1);
+
+typedef enum _MFVideoRotationFormat {
+ MFVideoRotationFormat_0 = 0,
+ MFVideoRotationFormat_90 = 90,
+ MFVideoRotationFormat_180 = 180,
+ MFVideoRotationFormat_270 = 270
+} MFVideoRotationFormat;
+
+typedef enum MF_MEDIA_ENGINE_EVENT {
+ MF_MEDIA_ENGINE_EVENT_LOADSTART = 1,
+ MF_MEDIA_ENGINE_EVENT_PROGRESS = 2,
+ MF_MEDIA_ENGINE_EVENT_SUSPEND = 3,
+ MF_MEDIA_ENGINE_EVENT_ABORT = 4,
+ MF_MEDIA_ENGINE_EVENT_ERROR = 5,
+ MF_MEDIA_ENGINE_EVENT_EMPTIED = 6,
+ MF_MEDIA_ENGINE_EVENT_STALLED = 7,
+ MF_MEDIA_ENGINE_EVENT_PLAY = 8,
+ MF_MEDIA_ENGINE_EVENT_PAUSE = 9,
+ MF_MEDIA_ENGINE_EVENT_LOADEDMETADATA = 10,
+ MF_MEDIA_ENGINE_EVENT_LOADEDDATA = 11,
+ MF_MEDIA_ENGINE_EVENT_WAITING = 12,
+ MF_MEDIA_ENGINE_EVENT_PLAYING = 13,
+ MF_MEDIA_ENGINE_EVENT_CANPLAY = 14,
+ MF_MEDIA_ENGINE_EVENT_CANPLAYTHROUGH = 15,
+ MF_MEDIA_ENGINE_EVENT_SEEKING = 16,
+ MF_MEDIA_ENGINE_EVENT_SEEKED = 17,
+ MF_MEDIA_ENGINE_EVENT_TIMEUPDATE = 18,
+ MF_MEDIA_ENGINE_EVENT_ENDED = 19,
+ MF_MEDIA_ENGINE_EVENT_RATECHANGE = 20,
+ MF_MEDIA_ENGINE_EVENT_DURATIONCHANGE = 21,
+ MF_MEDIA_ENGINE_EVENT_VOLUMECHANGE = 22,
+ MF_MEDIA_ENGINE_EVENT_FORMATCHANGE = 1000,
+ MF_MEDIA_ENGINE_EVENT_PURGEQUEUEDEVENTS = 1001,
+ MF_MEDIA_ENGINE_EVENT_TIMELINE_MARKER = 1002,
+ MF_MEDIA_ENGINE_EVENT_BALANCECHANGE = 1003,
+ MF_MEDIA_ENGINE_EVENT_DOWNLOADCOMPLETE = 1004,
+ MF_MEDIA_ENGINE_EVENT_BUFFERINGSTARTED = 1005,
+ MF_MEDIA_ENGINE_EVENT_BUFFERINGENDED = 1006,
+ MF_MEDIA_ENGINE_EVENT_FRAMESTEPCOMPLETED = 1007,
+ MF_MEDIA_ENGINE_EVENT_NOTIFYSTABLESTATE = 1008,
+ MF_MEDIA_ENGINE_EVENT_FIRSTFRAMEREADY = 1009,
+ MF_MEDIA_ENGINE_EVENT_TRACKSCHANGE = 1010,
+ MF_MEDIA_ENGINE_EVENT_OPMINFO = 1011,
+ MF_MEDIA_ENGINE_EVENT_RESOURCELOST = 1012,
+ MF_MEDIA_ENGINE_EVENT_DELAYLOADEVENT_CHANGED = 1013,
+ MF_MEDIA_ENGINE_EVENT_STREAMRENDERINGERROR = 1014,
+ MF_MEDIA_ENGINE_EVENT_SUPPORTEDRATES_CHANGED = 1015,
+ MF_MEDIA_ENGINE_EVENT_AUDIOENDPOINTCHANGE = 1016
+} MF_MEDIA_ENGINE_EVENT;
+
+typedef enum MF_MEDIA_ENGINE_PROTECTION_FLAGS {
+ MF_MEDIA_ENGINE_ENABLE_PROTECTED_CONTENT = 1,
+ MF_MEDIA_ENGINE_USE_PMP_FOR_ALL_CONTENT = 2,
+ MF_MEDIA_ENGINE_USE_UNPROTECTED_PMP = 4
+} MF_MEDIA_ENGINE_PROTECTION_FLAGS;
+
+typedef enum MF_MEDIA_ENGINE_CREATEFLAGS {
+ MF_MEDIA_ENGINE_AUDIOONLY = 0x1,
+ MF_MEDIA_ENGINE_WAITFORSTABLE_STATE = 0x2,
+ MF_MEDIA_ENGINE_FORCEMUTE = 0x4,
+ MF_MEDIA_ENGINE_REAL_TIME_MODE = 0x8,
+ MF_MEDIA_ENGINE_DISABLE_LOCAL_PLUGINS = 0x10,
+ MF_MEDIA_ENGINE_CREATEFLAGS_MASK = 0x1f
+} MF_MEDIA_ENGINE_CREATEFLAGS;
+
+typedef enum MF_MEDIA_ENGINE_S3D_PACKING_MODE {
+ MF_MEDIA_ENGINE_S3D_PACKING_MODE_NONE = 0,
+ MF_MEDIA_ENGINE_S3D_PACKING_MODE_SIDE_BY_SIDE = 1,
+ MF_MEDIA_ENGINE_S3D_PACKING_MODE_TOP_BOTTOM = 2
+} MF_MEDIA_ENGINE_S3D_PACKING_MODE;
+
+typedef enum MF_MEDIA_ENGINE_STATISTIC {
+ MF_MEDIA_ENGINE_STATISTIC_FRAMES_RENDERED = 0,
+ MF_MEDIA_ENGINE_STATISTIC_FRAMES_DROPPED = 1,
+ MF_MEDIA_ENGINE_STATISTIC_BYTES_DOWNLOADED = 2,
+ MF_MEDIA_ENGINE_STATISTIC_BUFFER_PROGRESS = 3,
+ MF_MEDIA_ENGINE_STATISTIC_FRAMES_PER_SECOND = 4,
+ MF_MEDIA_ENGINE_STATISTIC_PLAYBACK_JITTER = 5,
+ MF_MEDIA_ENGINE_STATISTIC_FRAMES_CORRUPTED = 6,
+ MF_MEDIA_ENGINE_STATISTIC_TOTAL_FRAME_DELAY = 7
+} MF_MEDIA_ENGINE_STATISTIC;
+
+typedef enum MF_MEDIA_ENGINE_SEEK_MODE {
+ MF_MEDIA_ENGINE_SEEK_MODE_NORMAL = 0,
+ MF_MEDIA_ENGINE_SEEK_MODE_APPROXIMATE = 1
+} MF_MEDIA_ENGINE_SEEK_MODE;
+
+typedef enum MF_MEDIA_ENGINE_ERR {
+ MF_MEDIA_ENGINE_ERR_NOERROR = 0,
+ MF_MEDIA_ENGINE_ERR_ABORTED = 1,
+ MF_MEDIA_ENGINE_ERR_NETWORK = 2,
+ MF_MEDIA_ENGINE_ERR_DECODE = 3,
+ MF_MEDIA_ENGINE_ERR_SRC_NOT_SUPPORTED = 4,
+ MF_MEDIA_ENGINE_ERR_ENCRYPTED = 5
+} MF_MEDIA_ENGINE_ERR;
+
+typedef enum MF_MEDIA_ENGINE_NETWORK {
+ MF_MEDIA_ENGINE_NETWORK_EMPTY = 0,
+ MF_MEDIA_ENGINE_NETWORK_IDLE = 1,
+ MF_MEDIA_ENGINE_NETWORK_LOADING = 2,
+ MF_MEDIA_ENGINE_NETWORK_NO_SOURCE = 3
+} MF_MEDIA_ENGINE_NETWORK;
+
+typedef enum MF_MEDIA_ENGINE_READY {
+ MF_MEDIA_ENGINE_READY_HAVE_NOTHING = 0,
+ MF_MEDIA_ENGINE_READY_HAVE_METADATA = 1,
+ MF_MEDIA_ENGINE_READY_HAVE_CURRENT_DATA = 2,
+ MF_MEDIA_ENGINE_READY_HAVE_FUTURE_DATA = 3,
+ MF_MEDIA_ENGINE_READY_HAVE_ENOUGH_DATA = 4
+} MF_MEDIA_ENGINE_READY;
+
+typedef enum MF_MEDIA_ENGINE_CANPLAY {
+ MF_MEDIA_ENGINE_CANPLAY_NOT_SUPPORTED = 0,
+ MF_MEDIA_ENGINE_CANPLAY_MAYBE = 1,
+ MF_MEDIA_ENGINE_CANPLAY_PROBABLY = 2
+} MF_MEDIA_ENGINE_CANPLAY;
+
+typedef enum MF_MEDIA_ENGINE_PRELOAD {
+ MF_MEDIA_ENGINE_PRELOAD_MISSING = 0,
+ MF_MEDIA_ENGINE_PRELOAD_EMPTY = 1,
+ MF_MEDIA_ENGINE_PRELOAD_NONE = 2,
+ MF_MEDIA_ENGINE_PRELOAD_METADATA = 3,
+ MF_MEDIA_ENGINE_PRELOAD_AUTOMATIC = 4
+} MF_MEDIA_ENGINE_PRELOAD;
+
+typedef enum _MF3DVideoOutputType {
+ MF3DVideoOutputType_BaseView = 0,
+ MF3DVideoOutputType_Stereo = 1
+} MF3DVideoOutputType;
+
+# ifndef __IMFMediaEngineNotify_INTERFACE_DEFINED__
+# define __IMFMediaEngineNotify_INTERFACE_DEFINED__
+
+/* interface IMFMediaEngineNotify */
+/* [local][unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFMediaEngineNotify;
+MIDL_INTERFACE("fee7c112-e776-42b5-9bbf-0048524e2bd5")
+IMFMediaEngineNotify : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE EventNotify(
+ /* [annotation][in] */
+ _In_ DWORD event,
+ /* [annotation][in] */
+ _In_ DWORD_PTR param1,
+ /* [annotation][in] */
+ _In_ DWORD param2) = 0;
+};
+
+# endif /* __IMFMediaEngineNotify_INTERFACE_DEFINED__ */
+
+# ifndef __IMFMediaEngineExtension_INTERFACE_DEFINED__
+# define __IMFMediaEngineExtension_INTERFACE_DEFINED__
+
+/* interface IMFMediaEngineExtension */
+/* [local][unique][uuid][object] */
+EXTERN_C const IID IID_IMFMediaEngineExtension;
+MIDL_INTERFACE("2f69d622-20b5-41e9-afdf-89ced1dda04e")
+IMFMediaEngineExtension : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE CanPlayType(
+ /* [annotation][in] */
+ _In_ BOOL AudioOnly,
+ /* [annotation][in] */
+ _In_ BSTR MimeType,
+ /* [annotation][out] */
+ _Out_ MF_MEDIA_ENGINE_CANPLAY * pAnswer) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE BeginCreateObject(
+ /* [annotation][in] */
+ _In_ BSTR bstrURL,
+ /* [annotation][in] */
+ _In_opt_ IMFByteStream * pByteStream,
+ /* [annotation][in] */
+ _In_ MF_OBJECT_TYPE type,
+ /* [annotation][out] */
+ _Outptr_ IUnknown * *ppIUnknownCancelCookie,
+ /* [annotation][in] */
+ _In_ IMFAsyncCallback * pCallback,
+ /* [annotation][in] */
+ _In_opt_ IUnknown * punkState) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE CancelObjectCreation(
+ /* [annotation][in] */
+ _In_ IUnknown * pIUnknownCancelCookie) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE EndCreateObject(
+ /* [annotation][in] */
+ _In_ IMFAsyncResult * pResult,
+ /* [annotation][out] */
+ _Outptr_ IUnknown * *ppObject) = 0;
+};
+
+# endif /* __IMFMediaEngineExtension_INTERFACE_DEFINED__ */
+
+# ifndef __IMFMediaEngineClassFactory_INTERFACE_DEFINED__
+# define __IMFMediaEngineClassFactory_INTERFACE_DEFINED__
+
+/* interface IMFMediaEngineClassFactory */
+/* [local][unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFMediaEngineClassFactory;
+
+MIDL_INTERFACE("4D645ACE-26AA-4688-9BE1-DF3516990B93")
+IMFMediaEngineClassFactory : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE CreateInstance(
+ /* [annotation][in] */
+ _In_ DWORD dwFlags,
+ /* [annotation][in] */
+ _In_ IMFAttributes * pAttr,
+ /* [annotation][out] */
+ _Outptr_ IMFMediaEngine * *ppPlayer) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE CreateTimeRange(
+ /* [annotation][out] */
+ _Outptr_ IMFMediaTimeRange * *ppTimeRange) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE CreateError(
+ /* [annotation][out] */
+ _Outptr_ IMFMediaError * *ppError) = 0;
+};
+
+# endif /* __IMFMediaEngineClassFactory_INTERFACE_DEFINED__ */
+
+# ifndef __IMFMediaEngineClassFactory4_INTERFACE_DEFINED__
+# define __IMFMediaEngineClassFactory4_INTERFACE_DEFINED__
+
+/* interface IMFMediaEngineClassFactory4 */
+/* [local][uuid][object] */
+
+EXTERN_C const IID IID_IMFMediaEngineClassFactory4;
+
+MIDL_INTERFACE("fbe256c1-43cf-4a9b-8cb8-ce8632a34186")
+IMFMediaEngineClassFactory4 : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE CreateContentDecryptionModuleFactory(
+ /* [annotation][in] */
+ _In_ LPCWSTR keySystem,
+ /* [annotation][in] */
+ _In_ REFIID riid,
+ /* [annotation][iid_is][out] */
+ _Outptr_ LPVOID * ppvObject) = 0;
+};
+# endif // __IMFMediaEngineClassFactory4_INTERFACE_DEFINED__
+
+# ifndef __IMFMediaEngine_INTERFACE_DEFINED__
+# define __IMFMediaEngine_INTERFACE_DEFINED__
+
+/* interface IMFMediaEngine */
+/* [local][unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFMediaEngine;
+MIDL_INTERFACE("98a1b0bb-03eb-4935-ae7c-93c1fa0e1c93")
+IMFMediaEngine : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE GetError(
+ /* [annotation][out] */
+ _Outptr_ IMFMediaError * *ppError) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetErrorCode(
+ /* [annotation][in] */
+ _In_ MF_MEDIA_ENGINE_ERR error) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetSourceElements(
+ /* [annotation][in] */
+ _In_ IMFMediaEngineSrcElements * pSrcElements) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetSource(
+ /* [annotation][in] */
+ _In_ BSTR pUrl) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetCurrentSource(
+ /* [annotation][out] */
+ _Out_ BSTR * ppUrl) = 0;
+
+ virtual USHORT STDMETHODCALLTYPE GetNetworkState(void) = 0;
+
+ virtual MF_MEDIA_ENGINE_PRELOAD STDMETHODCALLTYPE GetPreload(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetPreload(
+ /* [annotation][in] */
+ _In_ MF_MEDIA_ENGINE_PRELOAD Preload) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetBuffered(
+ /* [annotation][out] */
+ _Outptr_ IMFMediaTimeRange * *ppBuffered) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE Load(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE CanPlayType(
+ /* [annotation][in] */
+ _In_ BSTR type,
+ /* [annotation][out] */
+ _Out_ MF_MEDIA_ENGINE_CANPLAY * pAnswer) = 0;
+
+ virtual USHORT STDMETHODCALLTYPE GetReadyState(void) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE IsSeeking(void) = 0;
+
+ virtual double STDMETHODCALLTYPE GetCurrentTime(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetCurrentTime(
+ /* [annotation][in] */
+ _In_ double seekTime) = 0;
+
+ virtual double STDMETHODCALLTYPE GetStartTime(void) = 0;
+
+ virtual double STDMETHODCALLTYPE GetDuration(void) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE IsPaused(void) = 0;
+
+ virtual double STDMETHODCALLTYPE GetDefaultPlaybackRate(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetDefaultPlaybackRate(
+ /* [annotation][in] */
+ _In_ double Rate) = 0;
+
+ virtual double STDMETHODCALLTYPE GetPlaybackRate(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetPlaybackRate(
+ /* [annotation][in] */
+ _In_ double Rate) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetPlayed(
+ /* [annotation][out] */
+ _Outptr_ IMFMediaTimeRange * *ppPlayed) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetSeekable(
+ /* [annotation][out] */
+ _Outptr_ IMFMediaTimeRange * *ppSeekable) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE IsEnded(void) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE GetAutoPlay(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetAutoPlay(
+ /* [annotation][in] */
+ _In_ BOOL AutoPlay) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE GetLoop(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetLoop(
+ /* [annotation][in] */
+ _In_ BOOL Loop) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE Play(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE Pause(void) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE GetMuted(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetMuted(
+ /* [annotation][in] */
+ _In_ BOOL Muted) = 0;
+
+ virtual double STDMETHODCALLTYPE GetVolume(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetVolume(
+ /* [annotation][in] */
+ _In_ double Volume) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE HasVideo(void) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE HasAudio(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetNativeVideoSize(
+ /* [annotation][out] */
+ _Out_opt_ DWORD * cx,
+ /* [annotation][out] */
+ _Out_opt_ DWORD * cy) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetVideoAspectRatio(
+ /* [annotation][out] */
+ _Out_opt_ DWORD * cx,
+ /* [annotation][out] */
+ _Out_opt_ DWORD * cy) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE Shutdown(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE TransferVideoFrame(
+ /* [annotation][in] */
+ _In_ IUnknown * pDstSurf,
+ /* [annotation][in] */
+ _In_opt_ const MFVideoNormalizedRect* pSrc,
+ /* [annotation][in] */
+ _In_ const RECT* pDst,
+ /* [annotation][in] */
+ _In_opt_ const MFARGB* pBorderClr) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE OnVideoStreamTick(
+ /* [annotation][out] */
+ _Out_ LONGLONG * pPts) = 0;
+};
+# endif /* __IMFMediaEngine_INTERFACE_DEFINED__ */
+
+# ifndef __IMFMediaEngineEx_INTERFACE_DEFINED__
+# define __IMFMediaEngineEx_INTERFACE_DEFINED__
+
+/* interface IMFMediaEngineEx */
+/* [local][unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFMediaEngineEx;
+MIDL_INTERFACE("83015ead-b1e6-40d0-a98a-37145ffe1ad1")
+IMFMediaEngineEx : public IMFMediaEngine {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE SetSourceFromByteStream(
+ /* [annotation][in] */
+ _In_ IMFByteStream * pByteStream,
+ /* [annotation][in] */
+ _In_ BSTR pURL) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetStatistics(
+ /* [annotation][in] */
+ _In_ MF_MEDIA_ENGINE_STATISTIC StatisticID,
+ /* [annotation][out] */
+ _Out_ PROPVARIANT * pStatistic) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE UpdateVideoStream(
+ /* [annotation][in] */
+ _In_opt_ const MFVideoNormalizedRect* pSrc,
+ /* [annotation][in] */
+ _In_opt_ const RECT* pDst,
+ /* [annotation][in] */
+ _In_opt_ const MFARGB* pBorderClr) = 0;
+
+ virtual double STDMETHODCALLTYPE GetBalance(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetBalance(
+ /* [annotation][in] */
+ _In_ double balance) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE IsPlaybackRateSupported(
+ /* [annotation][in] */
+ _In_ double rate) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE FrameStep(
+ /* [annotation][in] */
+ _In_ BOOL Forward) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetResourceCharacteristics(
+ /* [annotation][out] */
+ _Out_ DWORD * pCharacteristics) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetPresentationAttribute(
+ /* [annotation][in] */
+ _In_ REFGUID guidMFAttribute,
+ /* [annotation][out] */
+ _Out_ PROPVARIANT * pvValue) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetNumberOfStreams(
+ /* [annotation][out] */
+ _Out_ DWORD * pdwStreamCount) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetStreamAttribute(
+ /* [annotation][in] */
+ _In_ DWORD dwStreamIndex,
+ /* [annotation][in] */
+ _In_ REFGUID guidMFAttribute,
+ /* [annotation][out] */
+ _Out_ PROPVARIANT * pvValue) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetStreamSelection(
+ /* [annotation][in] */
+ _In_ DWORD dwStreamIndex,
+ /* [annotation][out] */
+ _Out_ BOOL * pEnabled) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetStreamSelection(
+ /* [annotation][in] */
+ _In_ DWORD dwStreamIndex,
+ /* [annotation][in] */
+ _In_ BOOL Enabled) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE ApplyStreamSelections(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE IsProtected(
+ /* [annotation][out] */
+ _Out_ BOOL * pProtected) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE InsertVideoEffect(
+ /* [annotation][in] */
+ _In_ IUnknown * pEffect,
+ /* [annotation][in] */
+ _In_ BOOL fOptional) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE InsertAudioEffect(
+ /* [annotation][in] */
+ _In_ IUnknown * pEffect,
+ /* [annotation][in] */
+ _In_ BOOL fOptional) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE RemoveAllEffects(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetTimelineMarkerTimer(
+ /* [annotation][in] */
+ _In_ double timeToFire) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetTimelineMarkerTimer(
+ /* [annotation][out] */
+ _Out_ double* pTimeToFire) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE CancelTimelineMarkerTimer(void) = 0;
+
+ virtual BOOL STDMETHODCALLTYPE IsStereo3D(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetStereo3DFramePackingMode(
+ /* [annotation][out] */
+ _Out_ MF_MEDIA_ENGINE_S3D_PACKING_MODE * packMode) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetStereo3DFramePackingMode(
+ /* [annotation][in] */
+ _In_ MF_MEDIA_ENGINE_S3D_PACKING_MODE packMode) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetStereo3DRenderMode(
+ /* [annotation][out] */
+ _Out_ MF3DVideoOutputType * outputType) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetStereo3DRenderMode(
+ /* [annotation][in] */
+ _In_ MF3DVideoOutputType outputType) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE EnableWindowlessSwapchainMode(
+ /* [annotation][in] */
+ _In_ BOOL fEnable) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetVideoSwapchainHandle(
+ /* [annotation][out] */
+ _Out_ HANDLE * phSwapchain) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE EnableHorizontalMirrorMode(
+ /* [annotation][in] */
+ _In_ BOOL fEnable) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetAudioStreamCategory(
+ /* [annotation][out] */
+ _Out_ UINT32 * pCategory) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetAudioStreamCategory(
+ /* [annotation][in] */
+ _In_ UINT32 category) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetAudioEndpointRole(
+ /* [annotation][out] */
+ _Out_ UINT32 * pRole) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetAudioEndpointRole(
+ /* [annotation][in] */
+ _In_ UINT32 role) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetRealTimeMode(
+ /* [annotation][out] */
+ _Out_ BOOL * pfEnabled) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetRealTimeMode(
+ /* [annotation][in] */
+ _In_ BOOL fEnable) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetCurrentTimeEx(
+ /* [annotation][in] */
+ _In_ double seekTime,
+ /* [annotation][in] */
+ _In_ MF_MEDIA_ENGINE_SEEK_MODE seekMode) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE EnableTimeUpdateTimer(
+ /* [annotation][in] */
+ _In_ BOOL fEnableTimer) = 0;
+};
+# endif /* __IMFMediaEngineEx_INTERFACE_DEFINED__ */
+
+# ifndef __IMFCdmSuspendNotify_INTERFACE_DEFINED__
+# define __IMFCdmSuspendNotify_INTERFACE_DEFINED__
+
+/* interface IMFCdmSuspendNotify */
+/* [unique][uuid][object] */
+
+EXTERN_C const IID IID_IMFCdmSuspendNotify;
+
+MIDL_INTERFACE("7a5645d2-43bd-47fd-87b7-dcd24cc7d692")
+IMFCdmSuspendNotify : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE Begin(void) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE End(void) = 0;
+};
+
+# endif /* __IMFCdmSuspendNotify_INTERFACE_DEFINED__ */
+
+# ifndef __IMFMediaEngineProtectedContent_INTERFACE_DEFINED__
+# define __IMFMediaEngineProtectedContent_INTERFACE_DEFINED__
+
+/* interface IMFMediaEngineProtectedContent */
+/* [local][uuid][object] */
+
+EXTERN_C const IID IID_IMFMediaEngineProtectedContent;
+
+MIDL_INTERFACE("9f8021e8-9c8c-487e-bb5c-79aa4779938c")
+IMFMediaEngineProtectedContent : public IUnknown {
+ public:
+ virtual HRESULT STDMETHODCALLTYPE ShareResources(
+ /* [annotation] */
+ _In_ IUnknown * pUnkDeviceContext) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE GetRequiredProtections(
+ /* [annotation][out] */
+ _Out_ DWORD * pFrameProtectionFlags) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetOPMWindow(
+ /* [annotation][in] */
+ _In_ HWND hwnd) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE TransferVideoFrame(
+ /* [annotation][in] */
+ _In_ IUnknown * pDstSurf,
+ /* [annotation][in] */
+ _In_opt_ const MFVideoNormalizedRect* pSrc,
+ /* [annotation][in] */
+ _In_ const RECT* pDst,
+ /* [annotation][in] */
+ _In_opt_ const MFARGB* pBorderClr,
+ /* [annotation][out] */
+ _Out_ DWORD* pFrameProtectionFlags) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetContentProtectionManager(
+ /* [annotation][in] */
+ _In_opt_ IMFContentProtectionManager * pCPM) = 0;
+
+ virtual HRESULT STDMETHODCALLTYPE SetApplicationCertificate(
+ /* [annotation][in] */
+ _In_reads_bytes_(cbBlob) const BYTE* pbBlob,
+ /* [annotation][in] */
+ _In_ DWORD cbBlob) = 0;
+};
+
+# endif /* __IMFMediaEngineProtectedContent_INTERFACE_DEFINED__ */
+
+#endif // extra class copy from mfmediaengine.h
+#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINENOTIFY_H
diff --git a/dom/media/platforms/wmf/MFMediaEngineNotify.cpp b/dom/media/platforms/wmf/MFMediaEngineNotify.cpp
new file mode 100644
index 0000000000..a33757ac26
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineNotify.cpp
@@ -0,0 +1,32 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFMediaEngineNotify.h"
+
+#include "MFMediaEngineUtils.h"
+
+namespace mozilla {
+
+#define LOG(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \
+ ("MFMediaEngineNotify=%p, " msg, this, ##__VA_ARGS__))
+
+IFACEMETHODIMP MFMediaEngineNotify::EventNotify(DWORD aEvent, DWORD_PTR aParam1,
+ DWORD aParam2) {
+ auto event = static_cast<MF_MEDIA_ENGINE_EVENT>(aEvent);
+ LOG("Received media engine event %s", MediaEngineEventToStr(event));
+ MFMediaEngineEventWrapper engineEvent{event};
+ if (event == MF_MEDIA_ENGINE_EVENT_ERROR ||
+ event == MF_MEDIA_ENGINE_EVENT_FORMATCHANGE ||
+ event == MF_MEDIA_ENGINE_EVENT_NOTIFYSTABLESTATE) {
+ engineEvent.mParam1 = Some(aParam1);
+ engineEvent.mParam2 = Some(aParam2);
+ }
+ mEngineEvents.Notify(engineEvent);
+ return S_OK;
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFMediaEngineNotify.h b/dom/media/platforms/wmf/MFMediaEngineNotify.h
new file mode 100644
index 0000000000..9e42e115c0
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineNotify.h
@@ -0,0 +1,55 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINENOTIFY_H
+#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINENOTIFY_H
+
+#include <wrl.h>
+
+#include "MediaEventSource.h"
+#include "MFMediaEngineExtra.h"
+#include "mozilla/Maybe.h"
+
+namespace mozilla {
+
+const char* MediaEngineEventToStr(MF_MEDIA_ENGINE_EVENT aEvent);
+
+// https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/ne-mfmediaengine-mf_media_engine_event
+struct MFMediaEngineEventWrapper final {
+ explicit MFMediaEngineEventWrapper(MF_MEDIA_ENGINE_EVENT aEvent)
+ : mEvent(aEvent) {}
+ MF_MEDIA_ENGINE_EVENT mEvent;
+ Maybe<DWORD_PTR> mParam1;
+ Maybe<DWORD> mParam2;
+};
+
+/**
+ * MFMediaEngineNotify is used to handle the event sent from the media engine.
+ * https://docs.microsoft.com/en-us/windows/win32/api/mfmediaengine/nn-mfmediaengine-imfmediaenginenotify
+ */
+class MFMediaEngineNotify final
+ : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<
+ Microsoft::WRL::RuntimeClassType::ClassicCom>,
+ IMFMediaEngineNotify> {
+ public:
+ MFMediaEngineNotify() = default;
+
+ HRESULT RuntimeClassInitialize() { return S_OK; }
+
+ // Method for IMFMediaEngineNotify
+ IFACEMETHODIMP EventNotify(DWORD aEvent, DWORD_PTR aParam1,
+ DWORD aParam2) override;
+
+ MediaEventSource<MFMediaEngineEventWrapper>& MediaEngineEvent() {
+ return mEngineEvents;
+ }
+
+ private:
+ MediaEventProducer<MFMediaEngineEventWrapper> mEngineEvents;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINENOTIFY_H
diff --git a/dom/media/platforms/wmf/MFMediaEngineStream.cpp b/dom/media/platforms/wmf/MFMediaEngineStream.cpp
new file mode 100644
index 0000000000..6dce37ee35
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineStream.cpp
@@ -0,0 +1,596 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFMediaEngineStream.h"
+#include <vcruntime.h>
+
+#include "AudioConverter.h"
+#include "MFMediaSource.h"
+#include "MFMediaEngineUtils.h"
+#include "TimeUnits.h"
+#include "mozilla/ProfilerLabels.h"
+#include "mozilla/ProfilerMarkerTypes.h"
+#include "WMF.h"
+#include "WMFUtils.h"
+
+namespace mozilla {
+
+// Don't use this log on the task queue, because it would be racy for `mStream`.
+#define WLOGV(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Verbose, \
+ ("MFMediaEngineStreamWrapper for stream %p (%s, id=%lu), " msg, \
+ mStream.Get(), mStream->GetDescriptionName().get(), \
+ mStream->DescriptorId(), ##__VA_ARGS__))
+
+#define SLOG(msg, ...) \
+ MOZ_LOG( \
+ gMFMediaEngineLog, LogLevel::Debug, \
+ ("MFMediaStream=%p (%s, id=%lu), " msg, this, \
+ this->GetDescriptionName().get(), this->DescriptorId(), ##__VA_ARGS__))
+
+#define SLOGV(msg, ...) \
+ MOZ_LOG( \
+ gMFMediaEngineLog, LogLevel::Verbose, \
+ ("MFMediaStream=%p (%s, id=%lu), " msg, this, \
+ this->GetDescriptionName().get(), this->DescriptorId(), ##__VA_ARGS__))
+
+using Microsoft::WRL::ComPtr;
+
+RefPtr<MediaDataDecoder::InitPromise> MFMediaEngineStreamWrapper::Init() {
+ MOZ_ASSERT(mStream->DescriptorId(), "Stream hasn't been initialized!");
+ WLOGV("Init");
+ return InitPromise::CreateAndResolve(mStream->TrackType(), __func__);
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> MFMediaEngineStreamWrapper::Decode(
+ MediaRawData* aSample) {
+ WLOGV("Decode");
+ if (!mStream || mStream->IsShutdown()) {
+ return DecodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_FAILURE, "MFMediaEngineStreamWrapper is shutdown"),
+ __func__);
+ }
+ RefPtr<MediaRawData> sample = aSample;
+ return InvokeAsync(mTaskQueue, mStream.Get(), __func__,
+ &MFMediaEngineStream::OutputData, std::move(sample));
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> MFMediaEngineStreamWrapper::Drain() {
+ WLOGV("Drain");
+ if (!mStream || mStream->IsShutdown()) {
+ return DecodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_FAILURE, "MFMediaEngineStreamWrapper is shutdown"),
+ __func__);
+ }
+ return InvokeAsync(mTaskQueue, mStream.Get(), __func__,
+ &MFMediaEngineStream::Drain);
+}
+
+RefPtr<MediaDataDecoder::FlushPromise> MFMediaEngineStreamWrapper::Flush() {
+ WLOGV("Flush");
+ if (!mStream || mStream->IsShutdown()) {
+ return FlushPromise::CreateAndReject(
+ MediaResult(NS_ERROR_FAILURE, "MFMediaEngineStreamWrapper is shutdown"),
+ __func__);
+ }
+ return InvokeAsync(mTaskQueue, mStream.Get(), __func__,
+ &MFMediaEngineStream::Flush);
+}
+
+RefPtr<ShutdownPromise> MFMediaEngineStreamWrapper::Shutdown() {
+ // Stream shutdown is controlled by the media source, so we don't need to call
+ // its shutdown.
+ WLOGV("Disconnect wrapper");
+ if (!mStream) {
+ // This promise must only ever be resolved. See the definition of the
+ // original abstract function.
+ return ShutdownPromise::CreateAndResolve(false, __func__);
+ }
+ mStream = nullptr;
+ mTaskQueue = nullptr;
+ return ShutdownPromise::CreateAndResolve(true, __func__);
+}
+
+nsCString MFMediaEngineStreamWrapper::GetDescriptionName() const {
+ return mStream ? mStream->GetDescriptionName() : nsLiteralCString("none");
+}
+
+nsCString MFMediaEngineStreamWrapper::GetCodecName() const {
+ return mStream ? mStream->GetCodecName() : nsLiteralCString("none");
+}
+
+MediaDataDecoder::ConversionRequired
+MFMediaEngineStreamWrapper::NeedsConversion() const {
+ return mStream ? mStream->NeedsConversion()
+ : MediaDataDecoder::ConversionRequired::kNeedNone;
+}
+
+MFMediaEngineStream::MFMediaEngineStream()
+ : mIsShutdown(false), mIsSelected(false), mReceivedEOS(false) {
+ MOZ_COUNT_CTOR(MFMediaEngineStream);
+}
+
+MFMediaEngineStream::~MFMediaEngineStream() {
+ MOZ_ASSERT(IsShutdown());
+ MOZ_COUNT_DTOR(MFMediaEngineStream);
+}
+
+HRESULT MFMediaEngineStream::RuntimeClassInitialize(
+ uint64_t aStreamId, const TrackInfo& aInfo, MFMediaSource* aParentSource) {
+ mParentSource = aParentSource;
+ mTaskQueue = aParentSource->GetTaskQueue();
+ MOZ_ASSERT(mTaskQueue);
+ mStreamId = aStreamId;
+ RETURN_IF_FAILED(wmf::MFCreateEventQueue(&mMediaEventQueue));
+
+ ComPtr<IMFMediaType> mediaType;
+ // The inherited stream would return different type based on their media info.
+ RETURN_IF_FAILED(CreateMediaType(aInfo, mediaType.GetAddressOf()));
+ RETURN_IF_FAILED(GenerateStreamDescriptor(mediaType));
+ SLOG("Initialized %s (id=%" PRIu64 ", descriptorId=%lu)",
+ GetDescriptionName().get(), aStreamId, mStreamDescriptorId);
+ return S_OK;
+}
+
+HRESULT MFMediaEngineStream::GenerateStreamDescriptor(
+ ComPtr<IMFMediaType>& aMediaType) {
+ RETURN_IF_FAILED(wmf::MFCreateStreamDescriptor(
+ mStreamId, 1 /* stream amount */, aMediaType.GetAddressOf(),
+ &mStreamDescriptor));
+ RETURN_IF_FAILED(
+ mStreamDescriptor->GetStreamIdentifier(&mStreamDescriptorId));
+ if (IsEncrypted()) {
+ RETURN_IF_FAILED(mStreamDescriptor->SetUINT32(MF_SD_PROTECTED, 1));
+ }
+ return S_OK;
+}
+
+HRESULT MFMediaEngineStream::Start(const PROPVARIANT* aPosition) {
+ AssertOnMFThreadPool();
+ if (!IsSelected()) {
+ SLOG("No need to start non-selected stream");
+ return S_OK;
+ }
+ if (IsShutdown()) {
+ return MF_E_SHUTDOWN;
+ }
+ SLOG("Start");
+ const bool isFromCurrentPosition = aPosition->vt == VT_EMPTY;
+ RETURN_IF_FAILED(QueueEvent(MEStreamStarted, GUID_NULL, S_OK, aPosition));
+ MOZ_ASSERT(mTaskQueue);
+ Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction(
+ "MFMediaEngineStream::Start",
+ [self = RefPtr{this}, isFromCurrentPosition, this]() {
+ if (!isFromCurrentPosition && IsEnded()) {
+ SLOG("Stream restarts again from a new position, reset EOS");
+ mReceivedEOS = false;
+ }
+ // Process pending requests (if any) which happened when the stream
+ // wasn't allowed to serve samples. Eg. stream is paused. Or resend the
+ // ended event if the stream is ended already.
+ ReplySampleRequestIfPossible();
+ }));
+ return S_OK;
+}
+
+HRESULT MFMediaEngineStream::Seek(const PROPVARIANT* aPosition) {
+ AssertOnMFThreadPool();
+ if (!IsSelected()) {
+ SLOG("No need to seek non-selected stream");
+ return S_OK;
+ }
+ SLOG("Seek");
+ RETURN_IF_FAILED(QueueEvent(MEStreamSeeked, GUID_NULL, S_OK, aPosition));
+ return S_OK;
+}
+
+HRESULT MFMediaEngineStream::Stop() {
+ AssertOnMFThreadPool();
+ if (!IsSelected()) {
+ SLOG("No need to stop non-selected stream");
+ return S_OK;
+ }
+ SLOG("Stop");
+ RETURN_IF_FAILED(QueueEvent(MEStreamStopped, GUID_NULL, S_OK, nullptr));
+ return S_OK;
+}
+
+HRESULT MFMediaEngineStream::Pause() {
+ AssertOnMFThreadPool();
+ if (!IsSelected()) {
+ SLOG("No need to pause non-selected stream");
+ return S_OK;
+ }
+ SLOG("Pause");
+ RETURN_IF_FAILED(QueueEvent(MEStreamPaused, GUID_NULL, S_OK, nullptr));
+ return S_OK;
+}
+
+void MFMediaEngineStream::Shutdown() {
+ AssertOnMFThreadPool();
+ if (IsShutdown()) {
+ return;
+ }
+ SLOG("Shutdown");
+ mIsShutdown = true;
+ // After this method is called, all IMFMediaEventQueue methods return
+ // MF_E_SHUTDOWN.
+ RETURN_VOID_IF_FAILED(mMediaEventQueue->Shutdown());
+ ComPtr<MFMediaEngineStream> self = this;
+ MOZ_ASSERT(mTaskQueue);
+ Unused << mTaskQueue->Dispatch(
+ NS_NewRunnableFunction("MFMediaEngineStream::Shutdown", [self]() {
+ self->mParentSource = nullptr;
+ self->mRawDataQueueForFeedingEngine.Reset();
+ self->mRawDataQueueForGeneratingOutput.Reset();
+ self->ShutdownCleanUpOnTaskQueue();
+ self->mTaskQueue = nullptr;
+ }));
+}
+
+IFACEMETHODIMP
+MFMediaEngineStream::GetMediaSource(IMFMediaSource** aMediaSource) {
+ AssertOnMFThreadPool();
+ if (IsShutdown()) {
+ return MF_E_SHUTDOWN;
+ }
+ RETURN_IF_FAILED(mParentSource.CopyTo(aMediaSource));
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaEngineStream::GetStreamDescriptor(
+ IMFStreamDescriptor** aStreamDescriptor) {
+ AssertOnMFThreadPool();
+ if (IsShutdown()) {
+ return MF_E_SHUTDOWN;
+ }
+ if (!mStreamDescriptor) {
+ SLOG("Hasn't initialized stream descriptor");
+ return MF_E_NOT_INITIALIZED;
+ }
+ RETURN_IF_FAILED(mStreamDescriptor.CopyTo(aStreamDescriptor));
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaEngineStream::RequestSample(IUnknown* aToken) {
+ AssertOnMFThreadPool();
+ if (IsShutdown()) {
+ return MF_E_SHUTDOWN;
+ }
+
+ ComPtr<IUnknown> token = aToken;
+ ComPtr<MFMediaEngineStream> self = this;
+ MOZ_ASSERT(mTaskQueue);
+ Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction(
+ "MFMediaEngineStream::RequestSample", [token, self, this]() {
+ AssertOnTaskQueue();
+ mSampleRequestTokens.push(token);
+ SLOGV("RequestSample, token amount=%zu", mSampleRequestTokens.size());
+ ReplySampleRequestIfPossible();
+ if (!HasEnoughRawData() && mParentSource && !IsEnded()) {
+ SendRequestSampleEvent(false /* isEnough */);
+ }
+ }));
+ return S_OK;
+}
+
+void MFMediaEngineStream::ReplySampleRequestIfPossible() {
+ AssertOnTaskQueue();
+ if (IsEnded()) {
+ // We have no more sample to return, clean all pending requests.
+ while (!mSampleRequestTokens.empty()) {
+ mSampleRequestTokens.pop();
+ }
+
+ SLOG("Notify end events");
+ MOZ_ASSERT(mRawDataQueueForFeedingEngine.GetSize() == 0);
+ MOZ_ASSERT(mSampleRequestTokens.empty());
+ RETURN_VOID_IF_FAILED(mMediaEventQueue->QueueEventParamUnk(
+ MEEndOfStream, GUID_NULL, S_OK, nullptr));
+ mEndedEvent.Notify(TrackType());
+ PROFILER_MARKER_TEXT(
+ "MFMediaEngineStream:NotifyEnd", MEDIA_PLAYBACK, {},
+ nsPrintfCString("stream=%s, id=%" PRIu64, GetDescriptionName().get(),
+ mStreamId));
+ return;
+ }
+
+ if (mSampleRequestTokens.empty() ||
+ mRawDataQueueForFeedingEngine.GetSize() == 0) {
+ return;
+ }
+
+ if (!ShouldServeSamples()) {
+ SLOGV("Not deliver samples if the stream is not started");
+ return;
+ }
+
+ // Push data into the mf media event queue if the media engine is already
+ // waiting for data.
+ ComPtr<IMFSample> inputSample;
+ RETURN_VOID_IF_FAILED(CreateInputSample(inputSample.GetAddressOf()));
+ ComPtr<IUnknown> token = mSampleRequestTokens.front();
+ RETURN_VOID_IF_FAILED(
+ inputSample->SetUnknown(MFSampleExtension_Token, token.Get()));
+ mSampleRequestTokens.pop();
+ RETURN_VOID_IF_FAILED(mMediaEventQueue->QueueEventParamUnk(
+ MEMediaSample, GUID_NULL, S_OK, inputSample.Get()));
+}
+
+bool MFMediaEngineStream::ShouldServeSamples() const {
+ AssertOnTaskQueue();
+ return mParentSource &&
+ mParentSource->GetState() == MFMediaSource::State::Started &&
+ mIsSelected;
+}
+
+HRESULT MFMediaEngineStream::CreateInputSample(IMFSample** aSample) {
+ AssertOnTaskQueue();
+
+ ComPtr<IMFSample> sample;
+ RETURN_IF_FAILED(wmf::MFCreateSample(&sample));
+
+ MOZ_ASSERT(mRawDataQueueForFeedingEngine.GetSize() != 0);
+ RefPtr<MediaRawData> data = mRawDataQueueForFeedingEngine.PopFront();
+ SLOGV("CreateInputSample, pop data [%" PRId64 ", %" PRId64
+ "] (duration=%" PRId64 ", kf=%d), queue size=%zu",
+ data->mTime.ToMicroseconds(), data->GetEndTime().ToMicroseconds(),
+ data->mDuration.ToMicroseconds(), data->mKeyframe,
+ mRawDataQueueForFeedingEngine.GetSize());
+ PROFILER_MARKER(
+ nsPrintfCString(
+ "pop %s (stream=%" PRIu64 ")",
+ TrackType() == TrackInfo::TrackType::kVideoTrack ? "video" : "audio",
+ mStreamId),
+ MEDIA_PLAYBACK, {}, MediaSampleMarker, data->mTime.ToMicroseconds(),
+ data->GetEndTime().ToMicroseconds(),
+ mRawDataQueueForFeedingEngine.GetSize());
+
+ // Copy data into IMFMediaBuffer
+ ComPtr<IMFMediaBuffer> buffer;
+ BYTE* dst = nullptr;
+ DWORD maxLength = 0;
+ RETURN_IF_FAILED(
+ wmf::MFCreateMemoryBuffer(data->Size(), buffer.GetAddressOf()));
+ RETURN_IF_FAILED(buffer->Lock(&dst, &maxLength, 0));
+ memcpy(dst, data->Data(), data->Size());
+ RETURN_IF_FAILED(buffer->Unlock());
+ RETURN_IF_FAILED(buffer->SetCurrentLength(data->Size()));
+
+ // Setup sample attributes
+ RETURN_IF_FAILED(sample->AddBuffer(buffer.Get()));
+ RETURN_IF_FAILED(
+ sample->SetSampleTime(UsecsToHNs(data->mTime.ToMicroseconds())));
+ RETURN_IF_FAILED(
+ sample->SetSampleDuration(UsecsToHNs(data->mDuration.ToMicroseconds())));
+ if (data->mKeyframe) {
+ RETURN_IF_FAILED(sample->SetUINT32(MFSampleExtension_CleanPoint, 1));
+ }
+
+ // Setup encrypt attributes
+ if (data->mCrypto.IsEncrypted()) {
+ RETURN_IF_FAILED(AddEncryptAttributes(sample.Get(), data->mCrypto));
+ }
+
+ *aSample = sample.Detach();
+ return S_OK;
+}
+
+HRESULT MFMediaEngineStream::AddEncryptAttributes(
+ IMFSample* aSample, const CryptoSample& aCryptoConfig) {
+ // Scheme
+ MFSampleEncryptionProtectionScheme protectionScheme;
+ if (aCryptoConfig.mCryptoScheme == CryptoScheme::Cenc) {
+ protectionScheme = MFSampleEncryptionProtectionScheme::
+ MF_SAMPLE_ENCRYPTION_PROTECTION_SCHEME_AES_CTR;
+ } else if (aCryptoConfig.mCryptoScheme == CryptoScheme::Cbcs) {
+ protectionScheme = MFSampleEncryptionProtectionScheme::
+ MF_SAMPLE_ENCRYPTION_PROTECTION_SCHEME_AES_CBC;
+ } else {
+ SLOG("Unexpected encryption scheme");
+ return MF_E_UNEXPECTED;
+ }
+ RETURN_IF_FAILED(aSample->SetUINT32(
+ MFSampleExtension_Encryption_ProtectionScheme, protectionScheme));
+
+ // KID
+ if (aCryptoConfig.mKeyId.Length() != sizeof(GUID)) {
+ SLOG("Unsupported key ID size (%zu)", aCryptoConfig.mKeyId.Length());
+ return MF_E_UNEXPECTED;
+ }
+ GUID keyId;
+ GUIDFromByteArray(aCryptoConfig.mKeyId, keyId);
+ RETURN_IF_FAILED(aSample->SetGUID(MFSampleExtension_Content_KeyID, keyId));
+ // TODO : if we want to suspend/resume the media engine, then we can consider
+ // to store last key id and set it in CDM to refresh the decryptor.
+
+ // IV
+ RETURN_IF_FAILED(aSample->SetBlob(
+ MFSampleExtension_Encryption_SampleID,
+ reinterpret_cast<const uint8_t*>(aCryptoConfig.mIV.Elements()),
+ aCryptoConfig.mIVSize));
+
+ // Subsample entries.
+ MOZ_ASSERT(aCryptoConfig.mEncryptedSizes.Length() ==
+ aCryptoConfig.mPlainSizes.Length());
+ size_t numSubsamples = aCryptoConfig.mEncryptedSizes.Length();
+ if (numSubsamples != 0) {
+ std::vector<MediaFoundationSubsampleEntry> subsampleEntries;
+ for (size_t idx = 0; idx < numSubsamples; idx++) {
+ subsampleEntries.push_back(MediaFoundationSubsampleEntry{
+ aCryptoConfig.mPlainSizes[idx], aCryptoConfig.mEncryptedSizes[idx]});
+ }
+ const uint32_t entriesSize =
+ sizeof(MediaFoundationSubsampleEntry) * numSubsamples;
+ RETURN_IF_FAILED(aSample->SetBlob(
+ MFSampleExtension_Encryption_SubSample_Mapping,
+ reinterpret_cast<const uint8_t*>(subsampleEntries.data()),
+ entriesSize));
+ }
+
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaEngineStream::GetEvent(DWORD aFlags,
+ IMFMediaEvent** aEvent) {
+ AssertOnMFThreadPool();
+ MOZ_ASSERT(mMediaEventQueue);
+ RETURN_IF_FAILED(mMediaEventQueue->GetEvent(aFlags, aEvent));
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaEngineStream::BeginGetEvent(IMFAsyncCallback* aCallback,
+ IUnknown* aState) {
+ AssertOnMFThreadPool();
+ MOZ_ASSERT(mMediaEventQueue);
+ RETURN_IF_FAILED(mMediaEventQueue->BeginGetEvent(aCallback, aState));
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaEngineStream::EndGetEvent(IMFAsyncResult* aResult,
+ IMFMediaEvent** aEvent) {
+ AssertOnMFThreadPool();
+ MOZ_ASSERT(mMediaEventQueue);
+ RETURN_IF_FAILED(mMediaEventQueue->EndGetEvent(aResult, aEvent));
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaEngineStream::QueueEvent(MediaEventType aType,
+ REFGUID aExtendedType,
+ HRESULT aStatus,
+ const PROPVARIANT* aValue) {
+ AssertOnMFThreadPool();
+ MOZ_ASSERT(mMediaEventQueue);
+ RETURN_IF_FAILED(mMediaEventQueue->QueueEventParamVar(aType, aExtendedType,
+ aStatus, aValue));
+ SLOG("Queued event %s", MediaEventTypeToStr(aType));
+ return S_OK;
+}
+
+void MFMediaEngineStream::SetSelected(bool aSelected) {
+ AssertOnMFThreadPool();
+ SLOG("Select=%d", aSelected);
+ mIsSelected = aSelected;
+}
+
+void MFMediaEngineStream::NotifyNewData(MediaRawData* aSample) {
+ AssertOnTaskQueue();
+ if (IsShutdown()) {
+ return;
+ }
+ const bool wasEnough = HasEnoughRawData();
+ mRawDataQueueForFeedingEngine.Push(aSample);
+ mRawDataQueueForGeneratingOutput.Push(aSample);
+ SLOGV("NotifyNewData, push data [%" PRId64 ", %" PRId64
+ "], queue size=%zu, queue duration=%" PRId64,
+ aSample->mTime.ToMicroseconds(), aSample->GetEndTime().ToMicroseconds(),
+ mRawDataQueueForFeedingEngine.GetSize(),
+ mRawDataQueueForFeedingEngine.Duration());
+ if (mReceivedEOS) {
+ SLOG("Receive a new data, cancel old EOS flag");
+ mReceivedEOS = false;
+ }
+ ReplySampleRequestIfPossible();
+ if (!wasEnough && HasEnoughRawData()) {
+ SendRequestSampleEvent(true /* isEnough */);
+ }
+}
+
+void MFMediaEngineStream::SendRequestSampleEvent(bool aIsEnough) {
+ AssertOnTaskQueue();
+ SLOGV("data is %s, queue duration=%" PRId64,
+ aIsEnough ? "enough" : "not enough",
+ mRawDataQueueForFeedingEngine.Duration());
+ mParentSource->mRequestSampleEvent.Notify(
+ SampleRequest{TrackType(), aIsEnough});
+}
+
+void MFMediaEngineStream::NotifyEndOfStreamInternal() {
+ AssertOnTaskQueue();
+ if (mReceivedEOS) {
+ return;
+ }
+ SLOG("EOS");
+ mReceivedEOS = true;
+ ReplySampleRequestIfPossible();
+}
+
+bool MFMediaEngineStream::IsEnded() const {
+ AssertOnTaskQueue();
+ return mReceivedEOS && mRawDataQueueForFeedingEngine.GetSize() == 0;
+}
+
+RefPtr<MediaDataDecoder::FlushPromise> MFMediaEngineStream::Flush() {
+ if (IsShutdown()) {
+ return MediaDataDecoder::FlushPromise::CreateAndReject(
+ MediaResult(NS_ERROR_FAILURE,
+ RESULT_DETAIL("MFMediaEngineStream is shutdown")),
+ __func__);
+ }
+ AssertOnTaskQueue();
+ SLOG("Flush");
+ mRawDataQueueForFeedingEngine.Reset();
+ mRawDataQueueForGeneratingOutput.Reset();
+ mReceivedEOS = false;
+ return MediaDataDecoder::FlushPromise::CreateAndResolve(true, __func__);
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> MFMediaEngineStream::OutputData(
+ RefPtr<MediaRawData> aSample) {
+ if (IsShutdown()) {
+ return MediaDataDecoder::DecodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_FAILURE,
+ RESULT_DETAIL("MFMediaEngineStream is shutdown")),
+ __func__);
+ }
+ AssertOnTaskQueue();
+ NotifyNewData(aSample);
+ MediaDataDecoder::DecodedData outputs;
+ if (RefPtr<MediaData> outputData = OutputDataInternal()) {
+ outputs.AppendElement(outputData);
+ SLOGV("Output data [%" PRId64 ",%" PRId64 "]",
+ outputData->mTime.ToMicroseconds(),
+ outputData->GetEndTime().ToMicroseconds());
+ }
+ return MediaDataDecoder::DecodePromise::CreateAndResolve(std::move(outputs),
+ __func__);
+};
+
+RefPtr<MediaDataDecoder::DecodePromise> MFMediaEngineStream::Drain() {
+ if (IsShutdown()) {
+ return MediaDataDecoder::DecodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_FAILURE,
+ RESULT_DETAIL("MFMediaEngineStream is shutdown")),
+ __func__);
+ }
+ AssertOnTaskQueue();
+ MediaDataDecoder::DecodedData outputs;
+ while (RefPtr<MediaData> outputData = OutputDataInternal()) {
+ outputs.AppendElement(outputData);
+ SLOGV("Output data [%" PRId64 ",%" PRId64 "]",
+ outputData->mTime.ToMicroseconds(),
+ outputData->GetEndTime().ToMicroseconds());
+ }
+ return MediaDataDecoder::DecodePromise::CreateAndResolve(std::move(outputs),
+ __func__);
+}
+
+void MFMediaEngineStream::AssertOnTaskQueue() const {
+ MOZ_ASSERT(mTaskQueue && mTaskQueue->IsCurrentThreadIn());
+}
+
+void MFMediaEngineStream::AssertOnMFThreadPool() const {
+ // We can't really assert the thread id from thread pool, because it would
+ // change any time. So we just assert this is not the task queue, and use the
+ // explicit function name to indicate what thread we should run on.
+ // TODO : this assertion is not precise, because the running thread could be
+ // the stream wrapper thread as well,
+ MOZ_ASSERT(!mTaskQueue || !mTaskQueue->IsCurrentThreadIn());
+}
+
+#undef WLOGV
+#undef SLOG
+#undef SLOGV
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFMediaEngineStream.h b/dom/media/platforms/wmf/MFMediaEngineStream.h
new file mode 100644
index 0000000000..aa3bf7e65d
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineStream.h
@@ -0,0 +1,228 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINESTREAM_H
+#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINESTREAM_H
+
+#include <mfidl.h>
+#include <wrl.h>
+
+#include <queue>
+
+#include "BlankDecoderModule.h"
+#include "MediaQueue.h"
+#include "PlatformDecoderModule.h"
+#include "mozilla/Atomics.h"
+#include "mozilla/Mutex.h"
+#include "mozilla/SPSCQueue.h"
+
+namespace mozilla {
+
+class MFMediaEngineVideoStream;
+class MFMediaSource;
+
+/**
+ * MFMediaEngineStream represents a track which would be responsible to provide
+ * encoded data into the media engine. The media engine can access this stream
+ * by the presentation descriptor which was acquired from the custom media
+ * source.
+ */
+class MFMediaEngineStream
+ : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<
+ Microsoft::WRL::RuntimeClassType::ClassicCom>,
+ IMFMediaStream> {
+ public:
+ MFMediaEngineStream();
+ ~MFMediaEngineStream();
+
+ virtual nsCString GetDescriptionName() const = 0;
+
+ virtual nsCString GetCodecName() const = 0;
+
+ HRESULT RuntimeClassInitialize(uint64_t aStreamId, const TrackInfo& aInfo,
+ MFMediaSource* aParentSource);
+
+ // Called by MFMediaSource.
+ HRESULT Start(const PROPVARIANT* aPosition);
+ HRESULT Seek(const PROPVARIANT* aPosition);
+ HRESULT Stop();
+ HRESULT Pause();
+ void Shutdown();
+
+ void SetSelected(bool aSelected);
+ bool IsSelected() const { return mIsSelected; }
+ DWORD DescriptorId() const { return mStreamDescriptorId; }
+
+ // Methods for IMFMediaStream
+ IFACEMETHODIMP GetMediaSource(IMFMediaSource** aMediaSource) override;
+ IFACEMETHODIMP GetStreamDescriptor(
+ IMFStreamDescriptor** aStreamDescriptor) override;
+ IFACEMETHODIMP RequestSample(IUnknown* aToken) override;
+
+ // Methods for IMFMediaEventGenerator, IMFMediaStream derives from
+ // IMFMediaEventGenerator.
+ IFACEMETHODIMP GetEvent(DWORD aFlags, IMFMediaEvent** aEvent) override;
+ IFACEMETHODIMP BeginGetEvent(IMFAsyncCallback* aCallback,
+ IUnknown* aState) override;
+ IFACEMETHODIMP EndGetEvent(IMFAsyncResult* aResult,
+ IMFMediaEvent** aEvent) override;
+ IFACEMETHODIMP QueueEvent(MediaEventType aType, REFGUID aExtendedType,
+ HRESULT aStatus,
+ const PROPVARIANT* aValue) override;
+
+ TaskQueue* GetTaskQueue() { return mTaskQueue; }
+
+ void NotifyEndOfStream() {
+ Microsoft::WRL::ComPtr<MFMediaEngineStream> self = this;
+ Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction(
+ "MFMediaEngineStream::NotifyEndOfStream",
+ [self]() { self->NotifyEndOfStreamInternal(); }));
+ }
+
+ // Return the type of the track, the result should be either audio or video.
+ virtual TrackInfo::TrackType TrackType() = 0;
+
+ RefPtr<MediaDataDecoder::FlushPromise> Flush();
+
+ MediaEventProducer<TrackInfo::TrackType>& EndedEvent() { return mEndedEvent; }
+
+ // True if the stream has been shutdown, it's a thread safe method.
+ bool IsShutdown() const { return mIsShutdown; }
+
+ virtual MFMediaEngineVideoStream* AsVideoStream() { return nullptr; }
+
+ RefPtr<MediaDataDecoder::DecodePromise> OutputData(
+ RefPtr<MediaRawData> aSample);
+
+ virtual RefPtr<MediaDataDecoder::DecodePromise> Drain();
+
+ virtual MediaDataDecoder::ConversionRequired NeedsConversion() const {
+ return MediaDataDecoder::ConversionRequired::kNeedNone;
+ }
+
+ virtual bool IsEncrypted() const = 0;
+
+ protected:
+ HRESULT GenerateStreamDescriptor(
+ Microsoft::WRL::ComPtr<IMFMediaType>& aMediaType);
+
+ // Create a IMFMediaType which includes the details about the stream.
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/media-type-attributes
+ virtual HRESULT CreateMediaType(const TrackInfo& aInfo,
+ IMFMediaType** aMediaType) = 0;
+
+ // True if the stream already has enough raw data.
+ virtual bool HasEnoughRawData() const = 0;
+
+ HRESULT CreateInputSample(IMFSample** aSample);
+ void ReplySampleRequestIfPossible();
+ bool ShouldServeSamples() const;
+
+ void NotifyNewData(MediaRawData* aSample);
+ void NotifyEndOfStreamInternal();
+
+ virtual bool IsEnded() const;
+
+ // Overwrite this method if inherited class needs to perform clean up on the
+ // task queue when the stream gets shutdowned.
+ virtual void ShutdownCleanUpOnTaskQueue(){};
+
+ // Inherited class must implement this method to return decoded data. it
+ // should uses `mRawDataQueueForGeneratingOutput` to generate output.
+ virtual already_AddRefed<MediaData> OutputDataInternal() = 0;
+
+ void SendRequestSampleEvent(bool aIsEnough);
+
+ HRESULT AddEncryptAttributes(IMFSample* aSample,
+ const CryptoSample& aCryptoConfig);
+
+ void AssertOnTaskQueue() const;
+ void AssertOnMFThreadPool() const;
+
+ // IMFMediaEventQueue is thread-safe.
+ Microsoft::WRL::ComPtr<IMFMediaEventQueue> mMediaEventQueue;
+ Microsoft::WRL::ComPtr<IMFStreamDescriptor> mStreamDescriptor;
+ Microsoft::WRL::ComPtr<MFMediaSource> mParentSource;
+
+ // This an unique ID retrieved from the IMFStreamDescriptor.
+ DWORD mStreamDescriptorId = 0;
+
+ // A unique ID assigned by MFMediaSource, which won't be changed after first
+ // assignment.
+ uint64_t mStreamId = 0;
+
+ RefPtr<TaskQueue> mTaskQueue;
+
+ // This class would be run on three threads, MF thread pool, the source's
+ // task queue and MediaPDecoder (wrapper thread). Following members would be
+ // used across both threads so they need to be thread-safe.
+
+ // Modify on the MF thread pool, access from any threads.
+ Atomic<bool> mIsShutdown;
+
+ // True if the stream is selected by the media source.
+ // Modify on MF thread pool, access from any threads.
+ Atomic<bool> mIsSelected;
+
+ // A thread-safe queue storing input samples, which provides samples to the
+ // media engine.
+ MediaQueue<MediaRawData> mRawDataQueueForFeedingEngine;
+
+ // A thread-safe queue storing input samples, which would be used to generate
+ // decoded data.
+ MediaQueue<MediaRawData> mRawDataQueueForGeneratingOutput;
+
+ // Thread-safe members END
+
+ // Store sample request token, one token should be related with one output
+ // data. It's used on the task queue only.
+ std::queue<Microsoft::WRL::ComPtr<IUnknown>> mSampleRequestTokens;
+
+ // Notify when playback reachs the end for this track.
+ MediaEventProducer<TrackInfo::TrackType> mEndedEvent;
+
+ // True if the stream has received the last data, but it could be reset if the
+ // stream starts delivering more data. Used on the task queue only.
+ bool mReceivedEOS;
+};
+
+/**
+ * This wrapper helps to dispatch task onto the stream's task queue. Its methods
+ * are not thread-safe and would only be called on the IPC decoder manager
+ * thread.
+ */
+class MFMediaEngineStreamWrapper final : public MediaDataDecoder {
+ public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MFMediaEngineStreamWrapper, final);
+
+ MFMediaEngineStreamWrapper(MFMediaEngineStream* aStream,
+ TaskQueue* aTaskQueue,
+ const CreateDecoderParams& aParams)
+ : mStream(aStream), mTaskQueue(aTaskQueue) {
+ MOZ_ASSERT(mStream);
+ MOZ_ASSERT(mTaskQueue);
+ }
+
+ // Methods for MediaDataDecoder, they are all called on the remote
+ // decoder manager thread.
+ RefPtr<InitPromise> Init() override;
+ RefPtr<DecodePromise> Decode(MediaRawData* aSample) override;
+ RefPtr<DecodePromise> Drain() override;
+ RefPtr<FlushPromise> Flush() override;
+ RefPtr<ShutdownPromise> Shutdown() override;
+ nsCString GetDescriptionName() const override;
+ nsCString GetCodecName() const override;
+ ConversionRequired NeedsConversion() const override;
+
+ private:
+ ~MFMediaEngineStreamWrapper() = default;
+
+ Microsoft::WRL::ComPtr<MFMediaEngineStream> mStream;
+ RefPtr<TaskQueue> mTaskQueue;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINESTREAM_H
diff --git a/dom/media/platforms/wmf/MFMediaEngineVideoStream.cpp b/dom/media/platforms/wmf/MFMediaEngineVideoStream.cpp
new file mode 100644
index 0000000000..6ac716ea15
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineVideoStream.cpp
@@ -0,0 +1,372 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFMediaEngineVideoStream.h"
+
+#include "mozilla/layers/DcompSurfaceImage.h"
+#include "MFMediaEngineUtils.h"
+#include "mozilla/StaticPrefs_media.h"
+
+namespace mozilla {
+
+#define LOG(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \
+ ("MFMediaStream=%p (%s), " msg, this, \
+ this->GetDescriptionName().get(), ##__VA_ARGS__))
+
+#define LOGV(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Verbose, \
+ ("MFMediaStream=%p (%s), " msg, this, \
+ this->GetDescriptionName().get(), ##__VA_ARGS__))
+
+using Microsoft::WRL::ComPtr;
+using Microsoft::WRL::MakeAndInitialize;
+
+/* static */
+MFMediaEngineVideoStream* MFMediaEngineVideoStream::Create(
+ uint64_t aStreamId, const TrackInfo& aInfo, MFMediaSource* aParentSource) {
+ MFMediaEngineVideoStream* stream;
+ MOZ_ASSERT(aInfo.IsVideo());
+ if (FAILED(MakeAndInitialize<MFMediaEngineVideoStream>(
+ &stream, aStreamId, aInfo, aParentSource))) {
+ return nullptr;
+ }
+ stream->mStreamType =
+ GetStreamTypeFromMimeType(aInfo.GetAsVideoInfo()->mMimeType);
+ MOZ_ASSERT(StreamTypeIsVideo(stream->mStreamType));
+ stream->mHasReceivedInitialCreateDecoderConfig = false;
+ stream->SetDCompSurfaceHandle(INVALID_HANDLE_VALUE, gfx::IntSize{});
+ return stream;
+}
+
+void MFMediaEngineVideoStream::SetKnowsCompositor(
+ layers::KnowsCompositor* aKnowsCompositor) {
+ ComPtr<MFMediaEngineVideoStream> self = this;
+ Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction(
+ "MFMediaEngineStream::SetKnowsCompositor",
+ [self, knowCompositor = RefPtr<layers::KnowsCompositor>{aKnowsCompositor},
+ this]() {
+ mKnowsCompositor = knowCompositor;
+ LOG("Set SetKnowsCompositor=%p", mKnowsCompositor.get());
+ ResolvePendingDrainPromiseIfNeeded();
+ }));
+}
+
+void MFMediaEngineVideoStream::SetDCompSurfaceHandle(HANDLE aDCompSurfaceHandle,
+ gfx::IntSize aDisplay) {
+ ComPtr<MFMediaEngineVideoStream> self = this;
+ Unused << mTaskQueue->Dispatch(NS_NewRunnableFunction(
+ "MFMediaEngineStream::SetDCompSurfaceHandle",
+ [self, aDCompSurfaceHandle, aDisplay, this]() {
+ if (mDCompSurfaceHandle == aDCompSurfaceHandle) {
+ return;
+ }
+ mDCompSurfaceHandle = aDCompSurfaceHandle;
+ mNeedRecreateImage = true;
+ {
+ MutexAutoLock lock(mMutex);
+ if (aDCompSurfaceHandle != INVALID_HANDLE_VALUE &&
+ aDisplay != mDisplay) {
+ LOG("Update display [%dx%d] -> [%dx%d]", mDisplay.Width(),
+ mDisplay.Height(), aDisplay.Width(), aDisplay.Height());
+ mDisplay = aDisplay;
+ }
+ }
+ LOG("Set DCompSurfaceHandle, handle=%p", mDCompSurfaceHandle);
+ ResolvePendingDrainPromiseIfNeeded();
+ }));
+}
+
+HRESULT MFMediaEngineVideoStream::CreateMediaType(const TrackInfo& aInfo,
+ IMFMediaType** aMediaType) {
+ auto& videoInfo = *aInfo.GetAsVideoInfo();
+ mIsEncrypted = videoInfo.mCrypto.IsEncrypted();
+
+ GUID subType = VideoMimeTypeToMediaFoundationSubtype(videoInfo.mMimeType);
+ NS_ENSURE_TRUE(subType != GUID_NULL, MF_E_TOPO_CODEC_NOT_FOUND);
+
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/media-type-attributes
+ ComPtr<IMFMediaType> mediaType;
+ RETURN_IF_FAILED(wmf::MFCreateMediaType(&mediaType));
+ RETURN_IF_FAILED(mediaType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
+ RETURN_IF_FAILED(mediaType->SetGUID(MF_MT_SUBTYPE, subType));
+
+ const auto& image = videoInfo.mImage;
+ UINT32 imageWidth = image.Width();
+ UINT32 imageHeight = image.Height();
+ RETURN_IF_FAILED(MFSetAttributeSize(mediaType.Get(), MF_MT_FRAME_SIZE,
+ imageWidth, imageHeight));
+
+ UINT32 displayWidth = videoInfo.mDisplay.Width();
+ UINT32 displayHeight = videoInfo.mDisplay.Height();
+ {
+ MutexAutoLock lock(mMutex);
+ mDisplay = videoInfo.mDisplay;
+ }
+ // PAR = DAR / SAR = (DW / DH) / (SW / SH) = (DW * SH) / (DH * SW)
+ RETURN_IF_FAILED(MFSetAttributeRatio(
+ mediaType.Get(), MF_MT_PIXEL_ASPECT_RATIO, displayWidth * imageHeight,
+ displayHeight * imageWidth));
+
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/ns-mfobjects-mfoffset
+ // The value of the MFOffset number is value + (fract / 65536.0f).
+ static const auto ToMFOffset = [](float aValue) {
+ MFOffset offset;
+ offset.value = static_cast<short>(aValue);
+ offset.fract = static_cast<WORD>(65536 * (aValue - offset.value));
+ return offset;
+ };
+ MFVideoArea area;
+ area.OffsetX = ToMFOffset(videoInfo.ImageRect().x);
+ area.OffsetY = ToMFOffset(videoInfo.ImageRect().y);
+ area.Area = {(LONG)imageWidth, (LONG)imageHeight};
+ RETURN_IF_FAILED(mediaType->SetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&area,
+ sizeof(area)));
+
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfapi/ne-mfapi-mfvideorotationformat
+ static const auto ToMFVideoRotationFormat =
+ [](VideoInfo::Rotation aRotation) {
+ using Rotation = VideoInfo::Rotation;
+ switch (aRotation) {
+ case Rotation::kDegree_0:
+ return MFVideoRotationFormat_0;
+ case Rotation::kDegree_90:
+ return MFVideoRotationFormat_90;
+ case Rotation::kDegree_180:
+ return MFVideoRotationFormat_180;
+ default:
+ MOZ_ASSERT(aRotation == Rotation::kDegree_270);
+ return MFVideoRotationFormat_270;
+ }
+ };
+ const auto rotation = ToMFVideoRotationFormat(videoInfo.mRotation);
+ RETURN_IF_FAILED(mediaType->SetUINT32(MF_MT_VIDEO_ROTATION, rotation));
+
+ static const auto ToMFVideoTransFunc =
+ [](const Maybe<gfx::YUVColorSpace>& aColorSpace) {
+ using YUVColorSpace = gfx::YUVColorSpace;
+ if (!aColorSpace) {
+ return MFVideoTransFunc_Unknown;
+ }
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/ne-mfobjects-mfvideotransferfunction
+ switch (*aColorSpace) {
+ case YUVColorSpace::BT601:
+ case YUVColorSpace::BT709:
+ return MFVideoTransFunc_709;
+ case YUVColorSpace::BT2020:
+ return MFVideoTransFunc_2020;
+ case YUVColorSpace::Identity:
+ return MFVideoTransFunc_sRGB;
+ default:
+ return MFVideoTransFunc_Unknown;
+ }
+ };
+ const auto transFunc = ToMFVideoTransFunc(videoInfo.mColorSpace);
+ RETURN_IF_FAILED(mediaType->SetUINT32(MF_MT_TRANSFER_FUNCTION, transFunc));
+
+ static const auto ToMFVideoPrimaries =
+ [](const Maybe<gfx::YUVColorSpace>& aColorSpace) {
+ using YUVColorSpace = gfx::YUVColorSpace;
+ if (!aColorSpace) {
+ return MFVideoPrimaries_Unknown;
+ }
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/ne-mfobjects-mfvideoprimaries
+ switch (*aColorSpace) {
+ case YUVColorSpace::BT601:
+ return MFVideoPrimaries_Unknown;
+ case YUVColorSpace::BT709:
+ return MFVideoPrimaries_BT709;
+ case YUVColorSpace::BT2020:
+ return MFVideoPrimaries_BT2020;
+ case YUVColorSpace::Identity:
+ return MFVideoPrimaries_BT709;
+ default:
+ return MFVideoPrimaries_Unknown;
+ }
+ };
+ const auto videoPrimaries = ToMFVideoPrimaries(videoInfo.mColorSpace);
+ RETURN_IF_FAILED(mediaType->SetUINT32(MF_MT_VIDEO_PRIMARIES, videoPrimaries));
+
+ LOG("Created video type, subtype=%s, image=[%ux%u], display=[%ux%u], "
+ "rotation=%s, tranFuns=%s, primaries=%s, encrypted=%d",
+ GUIDToStr(subType), imageWidth, imageHeight, displayWidth, displayHeight,
+ MFVideoRotationFormatToStr(rotation),
+ MFVideoTransferFunctionToStr(transFunc),
+ MFVideoPrimariesToStr(videoPrimaries), mIsEncrypted);
+ if (IsEncrypted()) {
+ ComPtr<IMFMediaType> protectedMediaType;
+ RETURN_IF_FAILED(wmf::MFWrapMediaType(mediaType.Get(),
+ MFMediaType_Protected, subType,
+ protectedMediaType.GetAddressOf()));
+ LOG("Wrap MFMediaType_Video into MFMediaType_Protected");
+ *aMediaType = protectedMediaType.Detach();
+ } else {
+ *aMediaType = mediaType.Detach();
+ }
+ return S_OK;
+}
+
+bool MFMediaEngineVideoStream::HasEnoughRawData() const {
+ // If more than this much raw video is queued, we'll hold off request more
+ // video.
+ return mRawDataQueueForFeedingEngine.Duration() >=
+ StaticPrefs::media_wmf_media_engine_raw_data_threshold_video();
+}
+
+bool MFMediaEngineVideoStream::IsDCompImageReady() {
+ AssertOnTaskQueue();
+ if (!mDCompSurfaceHandle || mDCompSurfaceHandle == INVALID_HANDLE_VALUE) {
+ LOGV("Can't create image without a valid dcomp surface handle");
+ return false;
+ }
+
+ if (!mKnowsCompositor) {
+ LOGV("Can't create image without the knows compositor");
+ return false;
+ }
+
+ if (!mDcompSurfaceImage || mNeedRecreateImage) {
+ MutexAutoLock lock(mMutex);
+ // DirectComposition only supports RGBA. We use DXGI_FORMAT_B8G8R8A8_UNORM
+ // as a default because we can't know what format the dcomp surface is.
+ // https://docs.microsoft.com/en-us/windows/win32/api/dcomp/nf-dcomp-idcompositionsurfacefactory-createsurface
+ mDcompSurfaceImage = new layers::DcompSurfaceImage(
+ mDCompSurfaceHandle, mDisplay, gfx::SurfaceFormat::B8G8R8A8,
+ mKnowsCompositor);
+ mNeedRecreateImage = false;
+ LOG("Created dcomp surface image, handle=%p, size=[%u,%u]",
+ mDCompSurfaceHandle, mDisplay.Width(), mDisplay.Height());
+ }
+ return true;
+}
+
+already_AddRefed<MediaData> MFMediaEngineVideoStream::OutputDataInternal() {
+ AssertOnTaskQueue();
+ if (mRawDataQueueForGeneratingOutput.GetSize() == 0 || !IsDCompImageReady()) {
+ return nullptr;
+ }
+ RefPtr<MediaRawData> sample = mRawDataQueueForGeneratingOutput.PopFront();
+ RefPtr<VideoData> output;
+ {
+ MutexAutoLock lock(mMutex);
+ output = VideoData::CreateFromImage(
+ mDisplay, sample->mOffset, sample->mTime, sample->mDuration,
+ mDcompSurfaceImage, sample->mKeyframe, sample->mTimecode);
+ }
+ return output.forget();
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> MFMediaEngineVideoStream::Drain() {
+ AssertOnTaskQueue();
+ MediaDataDecoder::DecodedData outputs;
+ if (!IsDCompImageReady()) {
+ LOGV("Waiting for dcomp image for draining");
+ return mPendingDrainPromise.Ensure(__func__);
+ }
+ return MFMediaEngineStream::Drain();
+}
+
+void MFMediaEngineVideoStream::ResolvePendingDrainPromiseIfNeeded() {
+ AssertOnTaskQueue();
+ if (mPendingDrainPromise.IsEmpty()) {
+ return;
+ }
+ if (!IsDCompImageReady()) {
+ return;
+ }
+ MediaDataDecoder::DecodedData outputs;
+ while (RefPtr<MediaData> outputData = OutputDataInternal()) {
+ outputs.AppendElement(outputData);
+ LOGV("Output data [%" PRId64 ",%" PRId64 "]",
+ outputData->mTime.ToMicroseconds(),
+ outputData->GetEndTime().ToMicroseconds());
+ }
+ mPendingDrainPromise.Resolve(std::move(outputs), __func__);
+ LOG("Resolved pending drain promise");
+}
+
+MediaDataDecoder::ConversionRequired MFMediaEngineVideoStream::NeedsConversion()
+ const {
+ return mStreamType == WMFStreamType::H264
+ ? MediaDataDecoder::ConversionRequired::kNeedAnnexB
+ : MediaDataDecoder::ConversionRequired::kNeedNone;
+}
+
+void MFMediaEngineVideoStream::SetConfig(const TrackInfo& aConfig) {
+ MOZ_ASSERT(aConfig.IsVideo());
+ ComPtr<MFMediaEngineStream> self = this;
+ Unused << mTaskQueue->Dispatch(
+ NS_NewRunnableFunction("MFMediaEngineStream::SetConfig",
+ [self, info = *aConfig.GetAsVideoInfo(), this]() {
+ if (mHasReceivedInitialCreateDecoderConfig) {
+ // Here indicating a new config for video,
+ // which is triggered by the media change
+ // monitor, so we need to update the config.
+ UpdateConfig(info);
+ }
+ mHasReceivedInitialCreateDecoderConfig = true;
+ }));
+}
+
+void MFMediaEngineVideoStream::UpdateConfig(const VideoInfo& aInfo) {
+ AssertOnTaskQueue();
+ // Disable explicit format change event for H264 to allow switching to the
+ // new stream without a full re-create, which will be much faster. This is
+ // also due to the fact that the MFT decoder can handle some format changes
+ // without a format change event. For format changes that the MFT decoder
+ // cannot support (e.g. codec change), the playback will fail later with
+ // MF_E_INVALIDMEDIATYPE (0xC00D36B4).
+ if (mStreamType == WMFStreamType::H264) {
+ return;
+ }
+
+ LOG("Video config changed, will update stream descriptor");
+ PROFILER_MARKER_TEXT("VideoConfigChange", MEDIA_PLAYBACK, {},
+ nsPrintfCString("stream=%s, id=%" PRIu64,
+ GetDescriptionName().get(), mStreamId));
+ ComPtr<IMFMediaType> mediaType;
+ RETURN_VOID_IF_FAILED(CreateMediaType(aInfo, mediaType.GetAddressOf()));
+ RETURN_VOID_IF_FAILED(GenerateStreamDescriptor(mediaType));
+ RETURN_VOID_IF_FAILED(mMediaEventQueue->QueueEventParamUnk(
+ MEStreamFormatChanged, GUID_NULL, S_OK, mediaType.Get()));
+}
+
+void MFMediaEngineVideoStream::ShutdownCleanUpOnTaskQueue() {
+ AssertOnTaskQueue();
+ mPendingDrainPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
+}
+
+bool MFMediaEngineVideoStream::IsEnded() const {
+ AssertOnTaskQueue();
+ // If a video only contains one frame, the media engine won't return a decoded
+ // frame before we tell it the track is already ended. However, due to the
+ // constraint of our media pipeline, the format reader won't notify EOS until
+ // the draining finishes, which causes a deadlock. Therefore, we would
+ // consider having pending drain promise as a sign of EOS as well, in order to
+ // get the decoded frame and revolve the drain promise.
+ return (mReceivedEOS || !mPendingDrainPromise.IsEmpty()) &&
+ mRawDataQueueForFeedingEngine.GetSize() == 0;
+}
+
+bool MFMediaEngineVideoStream::IsEncrypted() const { return mIsEncrypted; }
+
+nsCString MFMediaEngineVideoStream::GetCodecName() const {
+ switch (mStreamType) {
+ case WMFStreamType::H264:
+ return "h264"_ns;
+ case WMFStreamType::VP8:
+ return "vp8"_ns;
+ case WMFStreamType::VP9:
+ return "vp9"_ns;
+ case WMFStreamType::AV1:
+ return "av1"_ns;
+ default:
+ return "unknown"_ns;
+ };
+}
+
+#undef LOG
+#undef LOGV
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFMediaEngineVideoStream.h b/dom/media/platforms/wmf/MFMediaEngineVideoStream.h
new file mode 100644
index 0000000000..df17c264e4
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaEngineVideoStream.h
@@ -0,0 +1,107 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEVIDEOSTREAM_H
+#define DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEVIDEOSTREAM_H
+
+#include "MFMediaEngineStream.h"
+#include "WMFUtils.h"
+#include "mozilla/Atomics.h"
+#include "mozilla/Mutex.h"
+
+namespace mozilla {
+namespace layers {
+
+class Image;
+class DcompSurfaceImage;
+
+} // namespace layers
+
+class MFMediaSource;
+
+class MFMediaEngineVideoStream final : public MFMediaEngineStream {
+ public:
+ MFMediaEngineVideoStream() = default;
+
+ static MFMediaEngineVideoStream* Create(uint64_t aStreamId,
+ const TrackInfo& aInfo,
+ MFMediaSource* aParentSource);
+ nsCString GetDescriptionName() const override {
+ return "media engine video stream"_ns;
+ }
+
+ nsCString GetCodecName() const override;
+
+ TrackInfo::TrackType TrackType() override {
+ return TrackInfo::TrackType::kVideoTrack;
+ }
+
+ void SetKnowsCompositor(layers::KnowsCompositor* aKnowsCompositor);
+
+ void SetDCompSurfaceHandle(HANDLE aDCompSurfaceHandle, gfx::IntSize aDisplay);
+
+ MFMediaEngineVideoStream* AsVideoStream() override { return this; }
+
+ MediaDataDecoder::ConversionRequired NeedsConversion() const override;
+
+ // Called by MFMediaEngineParent when we are creating a video decoder for
+ // the remote decoder. This is used to detect if the inband video config
+ // change happens during playback.
+ void SetConfig(const TrackInfo& aConfig);
+
+ RefPtr<MediaDataDecoder::DecodePromise> Drain() override;
+
+ bool IsEncrypted() const override;
+
+ private:
+ HRESULT
+ CreateMediaType(const TrackInfo& aInfo, IMFMediaType** aMediaType) override;
+
+ bool HasEnoughRawData() const override;
+
+ void UpdateConfig(const VideoInfo& aInfo);
+
+ already_AddRefed<MediaData> OutputDataInternal() override;
+
+ bool IsDCompImageReady();
+
+ void ResolvePendingDrainPromiseIfNeeded();
+
+ void ShutdownCleanUpOnTaskQueue() override;
+
+ bool IsEnded() const override;
+
+ // Task queue only members.
+ HANDLE mDCompSurfaceHandle;
+ bool mNeedRecreateImage;
+ RefPtr<layers::KnowsCompositor> mKnowsCompositor;
+
+ Mutex mMutex{"MFMediaEngineVideoStream"};
+ gfx::IntSize mDisplay MOZ_GUARDED_BY(mMutex);
+
+ // Set on the initialization, won't be changed after that.
+ WMFStreamType mStreamType;
+
+ // Created and accessed in the decoder thread.
+ RefPtr<layers::DcompSurfaceImage> mDcompSurfaceImage;
+
+ // This flag is used to check if the video config changes detected by the
+ // media config monitor. When the video decoder get created first, we will set
+ // this flag to true, then we know any config being set afterward indicating
+ // a new config change.
+ bool mHasReceivedInitialCreateDecoderConfig;
+
+ // When draining, the track should return all decoded data. However, if the
+ // dcomp image hasn't been ready yet, then we won't have any decoded data to
+ // return. This promise is used for that case, and will be resolved once we
+ // have dcomp image.
+ MozPromiseHolder<MediaDataDecoder::DecodePromise> mPendingDrainPromise;
+
+ // Set when `CreateMediaType()` is called.
+ bool mIsEncrypted = false;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIAENGINEVIDEOSTREAM_H
diff --git a/dom/media/platforms/wmf/MFMediaSource.cpp b/dom/media/platforms/wmf/MFMediaSource.cpp
new file mode 100644
index 0000000000..ace3c7988c
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaSource.cpp
@@ -0,0 +1,605 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFMediaSource.h"
+
+#include <mfapi.h>
+#include <mfidl.h>
+#include <stdint.h>
+
+#include "MFCDMProxy.h"
+#include "MFMediaEngineAudioStream.h"
+#include "MFMediaEngineUtils.h"
+#include "MFMediaEngineVideoStream.h"
+#include "VideoUtils.h"
+#include "WMF.h"
+#include "mozilla/Atomics.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/TaskQueue.h"
+
+namespace mozilla {
+
+#define LOG(msg, ...) \
+ MOZ_LOG(gMFMediaEngineLog, LogLevel::Debug, \
+ ("MFMediaSource=%p, " msg, this, ##__VA_ARGS__))
+
+using Microsoft::WRL::ComPtr;
+
+MFMediaSource::MFMediaSource()
+ : mPresentationEnded(false), mIsAudioEnded(false), mIsVideoEnded(false) {
+ MOZ_COUNT_CTOR(MFMediaSource);
+}
+
+MFMediaSource::~MFMediaSource() {
+ // TODO : notify cdm about the last key id?
+ MOZ_COUNT_DTOR(MFMediaSource);
+}
+
+HRESULT MFMediaSource::RuntimeClassInitialize(
+ const Maybe<AudioInfo>& aAudio, const Maybe<VideoInfo>& aVideo,
+ nsISerialEventTarget* aManagerThread) {
+ // On manager thread.
+ MutexAutoLock lock(mMutex);
+
+ static uint64_t streamId = 1;
+
+ mTaskQueue = TaskQueue::Create(
+ GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER), "MFMediaSource");
+ mManagerThread = aManagerThread;
+ MOZ_ASSERT(mManagerThread, "manager thread shouldn't be nullptr!");
+
+ if (aAudio) {
+ mAudioStream.Attach(
+ MFMediaEngineAudioStream::Create(streamId++, *aAudio, this));
+ if (!mAudioStream) {
+ NS_WARNING("Failed to create audio stream");
+ return E_FAIL;
+ }
+ mAudioStreamEndedListener = mAudioStream->EndedEvent().Connect(
+ mManagerThread, this, &MFMediaSource::HandleStreamEnded);
+ } else {
+ mIsAudioEnded = true;
+ }
+
+ if (aVideo) {
+ mVideoStream.Attach(
+ MFMediaEngineVideoStream::Create(streamId++, *aVideo, this));
+ if (!mVideoStream) {
+ NS_WARNING("Failed to create video stream");
+ return E_FAIL;
+ }
+ mVideoStreamEndedListener = mVideoStream->EndedEvent().Connect(
+ mManagerThread, this, &MFMediaSource::HandleStreamEnded);
+ } else {
+ mIsVideoEnded = true;
+ }
+
+ RETURN_IF_FAILED(wmf::MFCreateEventQueue(&mMediaEventQueue));
+
+ LOG("Initialized a media source");
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::GetCharacteristics(DWORD* aCharacteristics) {
+ // This could be run on both mf thread pool and manager thread.
+ {
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+ }
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfidl/ne-mfidl-mfmediasource_characteristics
+ *aCharacteristics = MFMEDIASOURCE_CAN_SEEK | MFMEDIASOURCE_CAN_PAUSE;
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::CreatePresentationDescriptor(
+ IMFPresentationDescriptor** aPresentationDescriptor) {
+ AssertOnMFThreadPool();
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+
+ LOG("CreatePresentationDescriptor");
+ // See steps of creating the presentation descriptor
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/writing-a-custom-media-source#creating-the-presentation-descriptor
+ ComPtr<IMFPresentationDescriptor> presentationDescriptor;
+ nsTArray<ComPtr<IMFStreamDescriptor>> streamDescriptors;
+
+ DWORD audioDescriptorId = 0, videoDescriptorId = 0;
+ if (mAudioStream) {
+ ComPtr<IMFStreamDescriptor>* descriptor = streamDescriptors.AppendElement();
+ RETURN_IF_FAILED(
+ mAudioStream->GetStreamDescriptor(descriptor->GetAddressOf()));
+ audioDescriptorId = mAudioStream->DescriptorId();
+ }
+
+ if (mVideoStream) {
+ ComPtr<IMFStreamDescriptor>* descriptor = streamDescriptors.AppendElement();
+ RETURN_IF_FAILED(
+ mVideoStream->GetStreamDescriptor(descriptor->GetAddressOf()));
+ videoDescriptorId = mVideoStream->DescriptorId();
+ }
+
+ const DWORD descCount = static_cast<DWORD>(streamDescriptors.Length());
+ MOZ_ASSERT(descCount <= 2);
+ RETURN_IF_FAILED(wmf::MFCreatePresentationDescriptor(
+ descCount,
+ reinterpret_cast<IMFStreamDescriptor**>(streamDescriptors.Elements()),
+ &presentationDescriptor));
+
+ // Select default streams for the presentation descriptor.
+ for (DWORD idx = 0; idx < descCount; idx++) {
+ ComPtr<IMFStreamDescriptor> streamDescriptor;
+ BOOL selected;
+ RETURN_IF_FAILED(presentationDescriptor->GetStreamDescriptorByIndex(
+ idx, &selected, &streamDescriptor));
+ if (selected) {
+ continue;
+ }
+ RETURN_IF_FAILED(presentationDescriptor->SelectStream(idx));
+ DWORD streamId;
+ streamDescriptor->GetStreamIdentifier(&streamId);
+ LOG(" Select stream (id=%lu)", streamId);
+ }
+
+ LOG("Created a presentation descriptor (a=%lu,v=%lu)", audioDescriptorId,
+ videoDescriptorId);
+ *aPresentationDescriptor = presentationDescriptor.Detach();
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::Start(
+ IMFPresentationDescriptor* aPresentationDescriptor,
+ const GUID* aGuidTimeFormat, const PROPVARIANT* aStartPosition) {
+ AssertOnMFThreadPool();
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+
+ // See detailed steps in following documents.
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfidl/nf-mfidl-imfmediasource-start
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/writing-a-custom-media-source#starting-the-media-source
+
+ // A call to Start results in a seek if the previous state was started or
+ // paused, and the new starting position is not VT_EMPTY.
+ const bool isSeeking =
+ IsSeekable() && ((mState == State::Started || mState == State::Paused) &&
+ aStartPosition->vt != VT_EMPTY);
+ nsAutoCString startPosition;
+ if (aStartPosition->vt == VT_I8) {
+ startPosition.AppendInt(aStartPosition->hVal.QuadPart);
+ } else if (aStartPosition->vt == VT_EMPTY) {
+ startPosition.AppendLiteral("empty");
+ }
+ LOG("Start, start position=%s, isSeeking=%d", startPosition.get(), isSeeking);
+
+ // Ask IMFMediaStream to send stream events.
+ DWORD streamDescCount = 0;
+ RETURN_IF_FAILED(
+ aPresentationDescriptor->GetStreamDescriptorCount(&streamDescCount));
+
+ // TODO : should event orders be exactly same as msdn's order?
+ for (DWORD idx = 0; idx < streamDescCount; idx++) {
+ ComPtr<IMFStreamDescriptor> streamDescriptor;
+ BOOL selected;
+ RETURN_IF_FAILED(aPresentationDescriptor->GetStreamDescriptorByIndex(
+ idx, &selected, &streamDescriptor));
+
+ DWORD streamId;
+ RETURN_IF_FAILED(streamDescriptor->GetStreamIdentifier(&streamId));
+
+ ComPtr<MFMediaEngineStream> stream;
+ if (mAudioStream && mAudioStream->DescriptorId() == streamId) {
+ stream = mAudioStream;
+ } else if (mVideoStream && mVideoStream->DescriptorId() == streamId) {
+ stream = mVideoStream;
+ }
+ NS_ENSURE_TRUE(stream, MF_E_INVALIDREQUEST);
+
+ if (selected) {
+ RETURN_IF_FAILED(mMediaEventQueue->QueueEventParamUnk(
+ stream->IsSelected() ? MEUpdatedStream : MENewStream, GUID_NULL, S_OK,
+ stream.Get()));
+ // Need to select stream first before doing other operations.
+ stream->SetSelected(true);
+ if (isSeeking) {
+ RETURN_IF_FAILED(stream->Seek(aStartPosition));
+ } else {
+ RETURN_IF_FAILED(stream->Start(aStartPosition));
+ }
+ } else {
+ stream->SetSelected(false);
+ }
+ }
+
+ // Send source event.
+ RETURN_IF_FAILED(QueueEvent(isSeeking ? MESourceSeeked : MESourceStarted,
+ GUID_NULL, S_OK, aStartPosition));
+ mState = State::Started;
+ mPresentationEnded = false;
+ if (mAudioStream && mAudioStream->IsSelected()) {
+ mIsAudioEnded = false;
+ }
+ if (mVideoStream && mVideoStream->IsSelected()) {
+ mIsVideoEnded = false;
+ }
+ LOG("Started media source");
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::Stop() {
+ AssertOnMFThreadPool();
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+
+ LOG("Stop");
+ RETURN_IF_FAILED(QueueEvent(MESourceStopped, GUID_NULL, S_OK, nullptr));
+ if (mAudioStream) {
+ RETURN_IF_FAILED(mAudioStream->Stop());
+ }
+ if (mVideoStream) {
+ RETURN_IF_FAILED(mVideoStream->Stop());
+ }
+
+ mState = State::Stopped;
+ LOG("Stopped media source");
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::Pause() {
+ AssertOnMFThreadPool();
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+ if (mState != State::Started) {
+ return MF_E_INVALID_STATE_TRANSITION;
+ }
+
+ LOG("Pause");
+ RETURN_IF_FAILED(QueueEvent(MESourcePaused, GUID_NULL, S_OK, nullptr));
+ if (mAudioStream) {
+ RETURN_IF_FAILED(mAudioStream->Pause());
+ }
+ if (mVideoStream) {
+ RETURN_IF_FAILED(mVideoStream->Pause());
+ }
+
+ mState = State::Paused;
+ LOG("Paused media source");
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::Shutdown() {
+ // Could be called on either manager thread or MF thread pool.
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+
+ LOG("Shutdown");
+ // After this method is called, all IMFMediaEventQueue methods return
+ // MF_E_SHUTDOWN.
+ RETURN_IF_FAILED(mMediaEventQueue->Shutdown());
+ mState = State::Shutdowned;
+ LOG("Shutdowned media source");
+ return S_OK;
+}
+
+void MFMediaSource::ShutdownTaskQueue() {
+ AssertOnManagerThread();
+ LOG("ShutdownTaskQueue");
+ MutexAutoLock lock(mMutex);
+ if (mAudioStream) {
+ mAudioStream->Shutdown();
+ mAudioStream = nullptr;
+ mAudioStreamEndedListener.DisconnectIfExists();
+ }
+ if (mVideoStream) {
+ mVideoStream->Shutdown();
+ mVideoStream = nullptr;
+ mVideoStreamEndedListener.DisconnectIfExists();
+ }
+ Unused << mTaskQueue->BeginShutdown();
+ mTaskQueue = nullptr;
+}
+
+IFACEMETHODIMP MFMediaSource::GetEvent(DWORD aFlags, IMFMediaEvent** aEvent) {
+ MOZ_ASSERT(mMediaEventQueue);
+ return mMediaEventQueue->GetEvent(aFlags, aEvent);
+}
+
+IFACEMETHODIMP MFMediaSource::BeginGetEvent(IMFAsyncCallback* aCallback,
+ IUnknown* aState) {
+ MOZ_ASSERT(mMediaEventQueue);
+ return mMediaEventQueue->BeginGetEvent(aCallback, aState);
+}
+
+IFACEMETHODIMP MFMediaSource::EndGetEvent(IMFAsyncResult* aResult,
+ IMFMediaEvent** aEvent) {
+ MOZ_ASSERT(mMediaEventQueue);
+ return mMediaEventQueue->EndGetEvent(aResult, aEvent);
+}
+
+IFACEMETHODIMP MFMediaSource::QueueEvent(MediaEventType aType,
+ REFGUID aExtendedType, HRESULT aStatus,
+ const PROPVARIANT* aValue) {
+ MOZ_ASSERT(mMediaEventQueue);
+ RETURN_IF_FAILED(mMediaEventQueue->QueueEventParamVar(aType, aExtendedType,
+ aStatus, aValue));
+ LOG("Queued event %s", MediaEventTypeToStr(aType));
+ PROFILER_MARKER_TEXT("MFMediaSource::QueueEvent", MEDIA_PLAYBACK, {},
+ nsPrintfCString("%s", MediaEventTypeToStr(aType)));
+ return S_OK;
+}
+
+bool MFMediaSource::IsSeekable() const {
+ // TODO : check seekable from info.
+ return true;
+}
+
+void MFMediaSource::NotifyEndOfStream(TrackInfo::TrackType aType) {
+ AssertOnManagerThread();
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return;
+ }
+ if (aType == TrackInfo::TrackType::kAudioTrack) {
+ MOZ_ASSERT(mAudioStream);
+ mAudioStream->NotifyEndOfStream();
+ } else if (aType == TrackInfo::TrackType::kVideoTrack) {
+ MOZ_ASSERT(mVideoStream);
+ mVideoStream->NotifyEndOfStream();
+ }
+}
+
+void MFMediaSource::HandleStreamEnded(TrackInfo::TrackType aType) {
+ AssertOnManagerThread();
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return;
+ }
+ if (mPresentationEnded) {
+ LOG("Presentation is ended already");
+ RETURN_VOID_IF_FAILED(
+ QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, nullptr));
+ return;
+ }
+
+ LOG("Handle %s stream ended", TrackTypeToStr(aType));
+ if (aType == TrackInfo::TrackType::kAudioTrack) {
+ mIsAudioEnded = true;
+ } else if (aType == TrackInfo::TrackType::kVideoTrack) {
+ mIsVideoEnded = true;
+ } else {
+ MOZ_ASSERT_UNREACHABLE("Incorrect track type!");
+ }
+ mPresentationEnded = mIsAudioEnded && mIsVideoEnded;
+ LOG("PresentationEnded=%d, audioEnded=%d, videoEnded=%d",
+ !!mPresentationEnded, mIsAudioEnded, mIsVideoEnded);
+ PROFILER_MARKER_TEXT(
+ " MFMediaSource::HandleStreamEnded", MEDIA_PLAYBACK, {},
+ nsPrintfCString("PresentationEnded=%d, audioEnded=%d, videoEnded=%d",
+ !!mPresentationEnded, mIsAudioEnded, mIsVideoEnded));
+ if (mPresentationEnded) {
+ RETURN_VOID_IF_FAILED(
+ QueueEvent(MEEndOfPresentation, GUID_NULL, S_OK, nullptr));
+ }
+}
+
+void MFMediaSource::SetDCompSurfaceHandle(HANDLE aDCompSurfaceHandle,
+ gfx::IntSize aDisplay) {
+ AssertOnManagerThread();
+ MutexAutoLock lock(mMutex);
+ if (mVideoStream) {
+ mVideoStream->AsVideoStream()->SetDCompSurfaceHandle(aDCompSurfaceHandle,
+ aDisplay);
+ }
+}
+
+IFACEMETHODIMP MFMediaSource::GetService(REFGUID aGuidService, REFIID aRiid,
+ LPVOID* aResult) {
+ if (!IsEqualGUID(aGuidService, MF_RATE_CONTROL_SERVICE)) {
+ return MF_E_UNSUPPORTED_SERVICE;
+ }
+ return QueryInterface(aRiid, aResult);
+}
+
+IFACEMETHODIMP MFMediaSource::GetSlowestRate(MFRATE_DIRECTION aDirection,
+ BOOL aSupportsThinning,
+ float* aRate) {
+ AssertOnMFThreadPool();
+ MOZ_ASSERT(aRate);
+ *aRate = 0.0f;
+ {
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+ }
+ if (aDirection == MFRATE_REVERSE) {
+ return MF_E_REVERSE_UNSUPPORTED;
+ }
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::GetFastestRate(MFRATE_DIRECTION aDirection,
+ BOOL aSupportsThinning,
+ float* aRate) {
+ AssertOnMFThreadPool();
+ MOZ_ASSERT(aRate);
+ {
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ *aRate = 0.0f;
+ return MF_E_SHUTDOWN;
+ }
+ }
+ if (aDirection == MFRATE_REVERSE) {
+ return MF_E_REVERSE_UNSUPPORTED;
+ }
+ *aRate = 16.0f;
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::IsRateSupported(BOOL aSupportsThinning,
+ float aNewRate,
+ float* aSupportedRate) {
+ AssertOnMFThreadPool();
+ {
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+ }
+
+ if (aSupportedRate) {
+ *aSupportedRate = 0.0f;
+ }
+
+ MFRATE_DIRECTION direction = aNewRate >= 0 ? MFRATE_FORWARD : MFRATE_REVERSE;
+ float fastestRate = 0.0f, slowestRate = 0.0f;
+ GetFastestRate(direction, aSupportsThinning, &fastestRate);
+ GetSlowestRate(direction, aSupportsThinning, &slowestRate);
+
+ if (aSupportsThinning) {
+ return MF_E_THINNING_UNSUPPORTED;
+ } else if (aNewRate < slowestRate) {
+ return MF_E_REVERSE_UNSUPPORTED;
+ } else if (aNewRate > fastestRate) {
+ return MF_E_UNSUPPORTED_RATE;
+ }
+
+ if (aSupportedRate) {
+ *aSupportedRate = aNewRate;
+ }
+ return S_OK;
+}
+
+IFACEMETHODIMP MFMediaSource::SetRate(BOOL aSupportsThinning, float aRate) {
+ AssertOnMFThreadPool();
+ {
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+ }
+
+ HRESULT hr = IsRateSupported(aSupportsThinning, aRate, &mPlaybackRate);
+ if (FAILED(hr)) {
+ LOG("Unsupported playback rate %f, error=%lX", aRate, hr);
+ return hr;
+ }
+
+ PROPVARIANT varRate;
+ varRate.vt = VT_R4;
+ varRate.fltVal = mPlaybackRate;
+ LOG("Set playback rate %f", mPlaybackRate);
+ return QueueEvent(MESourceRateChanged, GUID_NULL, S_OK, &varRate);
+}
+
+IFACEMETHODIMP MFMediaSource::GetRate(BOOL* aSupportsThinning, float* aRate) {
+ AssertOnMFThreadPool();
+ {
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+ }
+ *aSupportsThinning = FALSE;
+ *aRate = mPlaybackRate;
+ return S_OK;
+}
+
+HRESULT MFMediaSource::GetInputTrustAuthority(DWORD aStreamId, REFIID aRiid,
+ IUnknown** aITAOut) {
+ // TODO : add threading assertion, not sure what thread it would be running on
+ // now.
+ {
+ MutexAutoLock lock(mMutex);
+ if (mState == State::Shutdowned) {
+ return MF_E_SHUTDOWN;
+ }
+ }
+#ifdef MOZ_WMF_CDM
+ if (!mCDMProxy) {
+ return MF_E_NOT_PROTECTED;
+ }
+
+ // TODO : verify if this aStreamId is really matching our stream id or not.
+ ComPtr<MFMediaEngineStream> stream = GetStreamByIndentifier(aStreamId);
+ if (!stream) {
+ return E_INVALIDARG;
+ }
+
+ if (!stream->IsEncrypted()) {
+ return MF_E_NOT_PROTECTED;
+ }
+
+ RETURN_IF_FAILED(
+ mCDMProxy->GetInputTrustAuthority(aStreamId, nullptr, 0, aRiid, aITAOut));
+#endif
+ return S_OK;
+}
+
+MFMediaSource::State MFMediaSource::GetState() const {
+ MutexAutoLock lock(mMutex);
+ return mState;
+}
+
+MFMediaEngineStream* MFMediaSource::GetAudioStream() {
+ MutexAutoLock lock(mMutex);
+ return mAudioStream.Get();
+}
+MFMediaEngineStream* MFMediaSource::GetVideoStream() {
+ MutexAutoLock lock(mMutex);
+ return mVideoStream.Get();
+}
+
+MFMediaEngineStream* MFMediaSource::GetStreamByIndentifier(
+ DWORD aStreamId) const {
+ MutexAutoLock lock(mMutex);
+ if (mAudioStream && mAudioStream->DescriptorId() == aStreamId) {
+ return mAudioStream.Get();
+ }
+ if (mVideoStream && mVideoStream->DescriptorId() == aStreamId) {
+ return mVideoStream.Get();
+ }
+ return nullptr;
+}
+
+#ifdef MOZ_WMF_CDM
+void MFMediaSource::SetCDMProxy(MFCDMProxy* aCDMProxy) {
+ // TODO : add threading assertion, not sure what thread it would be running on
+ // now.
+ mCDMProxy = aCDMProxy;
+ // TODO : ask cdm proxy to refresh trusted input
+}
+#endif
+
+bool MFMediaSource::IsEncrypted() const {
+ MutexAutoLock lock(mMutex);
+ return (mAudioStream && mAudioStream->IsEncrypted()) ||
+ (mVideoStream && mVideoStream->IsEncrypted());
+}
+
+void MFMediaSource::AssertOnManagerThread() const {
+ MOZ_ASSERT(mManagerThread->IsOnCurrentThread());
+}
+
+void MFMediaSource::AssertOnMFThreadPool() const {
+ // We can't really assert the thread id from thread pool, because it would
+ // change any time. So we just assert this is not the manager thread, and use
+ // the explicit function name to indicate what thread we should run on.
+ MOZ_ASSERT(!mManagerThread->IsOnCurrentThread());
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFMediaSource.h b/dom/media/platforms/wmf/MFMediaSource.h
new file mode 100644
index 0000000000..735d53579e
--- /dev/null
+++ b/dom/media/platforms/wmf/MFMediaSource.h
@@ -0,0 +1,188 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MFMEDIASOURCE_H
+#define DOM_MEDIA_PLATFORM_WMF_MFMEDIASOURCE_H
+
+#include <mfidl.h>
+#include <wrl.h>
+
+#include "MediaInfo.h"
+#include "MediaEventSource.h"
+#include "MFMediaEngineExtra.h"
+#include "MFMediaEngineStream.h"
+#include "mozilla/EnumSet.h"
+#include "mozilla/TaskQueue.h"
+
+namespace mozilla {
+
+class MFCDMProxy;
+
+// An event to indicate a need for a certain type of sample.
+struct SampleRequest {
+ SampleRequest(TrackInfo::TrackType aType, bool aIsEnough)
+ : mType(aType), mIsEnough(aIsEnough) {}
+ TrackInfo::TrackType mType;
+ bool mIsEnough;
+};
+
+/**
+ * MFMediaSource is a custom source for the media engine, the media engine would
+ * ask the source for the characteristics and the presentation descriptor to
+ * know how to react with the source. This source is also responsible to
+ * dispatch events to the media engine to notify the status changes.
+ *
+ * https://docs.microsoft.com/en-us/windows/win32/api/mfidl/nn-mfidl-imfmediasource
+ */
+class MFMediaSource : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<
+ Microsoft::WRL::RuntimeClassType::ClassicCom>,
+ IMFMediaSource, IMFRateControl, IMFRateSupport,
+ IMFGetService, IMFTrustedInput> {
+ public:
+ MFMediaSource();
+ ~MFMediaSource();
+
+ HRESULT RuntimeClassInitialize(const Maybe<AudioInfo>& aAudio,
+ const Maybe<VideoInfo>& aVideo,
+ nsISerialEventTarget* aManagerThread);
+
+ // Methods for IMFMediaSource
+ IFACEMETHODIMP GetCharacteristics(DWORD* aCharacteristics) override;
+ IFACEMETHODIMP CreatePresentationDescriptor(
+ IMFPresentationDescriptor** aPresentationDescriptor) override;
+ IFACEMETHODIMP Start(IMFPresentationDescriptor* aPresentationDescriptor,
+ const GUID* aGuidTimeFormat,
+ const PROPVARIANT* aStartPosition) override;
+ IFACEMETHODIMP Stop() override;
+ IFACEMETHODIMP Pause() override;
+ IFACEMETHODIMP Shutdown() override;
+
+ // Methods for IMFMediaEventGenerator, IMFMediaSource derives from
+ // IMFMediaEventGenerator.
+ IFACEMETHODIMP GetEvent(DWORD aFlags, IMFMediaEvent** aEvent) override;
+ IFACEMETHODIMP BeginGetEvent(IMFAsyncCallback* aCallback,
+ IUnknown* aState) override;
+ IFACEMETHODIMP EndGetEvent(IMFAsyncResult* aResult,
+ IMFMediaEvent** aEvent) override;
+ IFACEMETHODIMP QueueEvent(MediaEventType aType, REFGUID aExtendedType,
+ HRESULT aStatus,
+ const PROPVARIANT* aValue) override;
+
+ // IMFGetService
+ IFACEMETHODIMP GetService(REFGUID aGuidService, REFIID aRiid,
+ LPVOID* aResult) override;
+
+ // IMFRateSupport
+ IFACEMETHODIMP GetSlowestRate(MFRATE_DIRECTION aDirection,
+ BOOL aSupportsThinning, float* aRate) override;
+ IFACEMETHODIMP GetFastestRate(MFRATE_DIRECTION aDirection,
+ BOOL aSupportsThinning, float* aRate) override;
+ IFACEMETHODIMP IsRateSupported(BOOL aSupportsThinning, float aNewRate,
+ float* aSupportedRate) override;
+
+ // IMFRateControl
+ IFACEMETHODIMP SetRate(BOOL aSupportsThinning, float aRate) override;
+ IFACEMETHODIMP GetRate(BOOL* aSupportsThinning, float* aRate) override;
+
+ // IMFTrustedInput
+ IFACEMETHODIMP GetInputTrustAuthority(DWORD aStreamId, REFIID aRiid,
+ IUnknown** aITAOut) override;
+
+ MFMediaEngineStream* GetAudioStream();
+ MFMediaEngineStream* GetVideoStream();
+
+ MFMediaEngineStream* GetStreamByIndentifier(DWORD aStreamId) const;
+
+#ifdef MOZ_WMF_CDM
+ void SetCDMProxy(MFCDMProxy* aCDMProxy);
+#endif
+
+ TaskQueue* GetTaskQueue() const { return mTaskQueue; }
+
+ MediaEventSource<SampleRequest>& RequestSampleEvent() {
+ return mRequestSampleEvent;
+ }
+
+ // Called from the content process to notify that no more encoded data in that
+ // type of track.
+ void NotifyEndOfStream(TrackInfo::TrackType aType);
+
+ // Called from the MF stream to indicate that the stream has provided last
+ // encoded sample to the media engine.
+ void HandleStreamEnded(TrackInfo::TrackType aType);
+
+ enum class State {
+ Initialized,
+ Started,
+ Stopped,
+ Paused,
+ Shutdowned,
+ };
+ State GetState() const;
+
+ void SetDCompSurfaceHandle(HANDLE aDCompSurfaceHandle, gfx::IntSize aDisplay);
+
+ void ShutdownTaskQueue();
+
+ bool IsEncrypted() const;
+
+ private:
+ void AssertOnManagerThread() const;
+ void AssertOnMFThreadPool() const;
+
+ void NotifyEndOfStreamInternal(TrackInfo::TrackType aType);
+
+ bool IsSeekable() const;
+
+ // A thread-safe event queue.
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/media-event-generators#implementing-imfmediaeventgenerator
+ Microsoft::WRL::ComPtr<IMFMediaEventQueue> mMediaEventQueue;
+
+ // The thread used to run the engine streams' tasks.
+ RefPtr<TaskQueue> mTaskQueue;
+
+ // The thread used to run the media source's tasks.
+ RefPtr<nsISerialEventTarget> mManagerThread;
+
+ // MFMediaEngineStream will notify us when we need more sample.
+ friend class MFMediaEngineStream;
+ MediaEventProducer<SampleRequest> mRequestSampleEvent;
+
+ MediaEventListener mAudioStreamEndedListener;
+ MediaEventListener mVideoStreamEndedListener;
+
+ // This class would be run/accessed on two threads, MF thread pool and the
+ // manager thread. Following members could be used across threads so they need
+ // to be thread-safe.
+
+ mutable Mutex mMutex{"MFMediaEngineSource"};
+
+ // True if the playback is ended. Use and modify on both the manager thread
+ // and MF thread pool.
+ bool mPresentationEnded MOZ_GUARDED_BY(mMutex);
+ bool mIsAudioEnded MOZ_GUARDED_BY(mMutex);
+ bool mIsVideoEnded MOZ_GUARDED_BY(mMutex);
+
+ // Modify on MF thread pool and the manager thread, read on any threads.
+ State mState MOZ_GUARDED_BY(mMutex);
+
+ Microsoft::WRL::ComPtr<MFMediaEngineStream> mAudioStream
+ MOZ_GUARDED_BY(mMutex);
+ Microsoft::WRL::ComPtr<MFMediaEngineStream> mVideoStream
+ MOZ_GUARDED_BY(mMutex);
+
+ // Thread-safe members END
+
+ // Modify and access on MF thread pool.
+ float mPlaybackRate = 0.0f;
+
+#ifdef MOZ_WMF_CDM
+ RefPtr<MFCDMProxy> mCDMProxy;
+#endif
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MFMEDIASOURCE_H
diff --git a/dom/media/platforms/wmf/MFPMPHostWrapper.cpp b/dom/media/platforms/wmf/MFPMPHostWrapper.cpp
new file mode 100644
index 0000000000..64266f4ad5
--- /dev/null
+++ b/dom/media/platforms/wmf/MFPMPHostWrapper.cpp
@@ -0,0 +1,66 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFPMPHostWrapper.h"
+
+#include "MFMediaEngineUtils.h"
+#include "WMF.h"
+#include "mozilla/EMEUtils.h"
+
+namespace mozilla {
+
+using Microsoft::WRL::ComPtr;
+
+#define LOG(msg, ...) EME_LOG("MFPMPHostWrapper=%p, " msg, this, ##__VA_ARGS__)
+
+HRESULT MFPMPHostWrapper::RuntimeClassInitialize(
+ Microsoft::WRL::ComPtr<IMFPMPHost>& aHost) {
+ mPMPHost = aHost;
+ return S_OK;
+}
+
+STDMETHODIMP MFPMPHostWrapper::LockProcess() { return mPMPHost->LockProcess(); }
+
+STDMETHODIMP MFPMPHostWrapper::UnlockProcess() {
+ return mPMPHost->UnlockProcess();
+}
+
+STDMETHODIMP MFPMPHostWrapper::ActivateClassById(LPCWSTR aId, IStream* aStream,
+ REFIID aRiid,
+ void** aActivatedClass) {
+ LOG("ActivateClassById, id=%ls", aId);
+ ComPtr<IMFAttributes> creationAttributes;
+ RETURN_IF_FAILED(wmf::MFCreateAttributes(&creationAttributes, 2));
+ RETURN_IF_FAILED(creationAttributes->SetString(GUID_ClassName, aId));
+
+ if (aStream) {
+ STATSTG statstg;
+ RETURN_IF_FAILED(
+ aStream->Stat(&statstg, STATFLAG_NOOPEN | STATFLAG_NONAME));
+ nsTArray<uint8_t> streamBlob;
+ streamBlob.SetLength(statstg.cbSize.LowPart);
+ unsigned long readSize = 0;
+ RETURN_IF_FAILED(
+ aStream->Read(&streamBlob[0], streamBlob.Length(), &readSize));
+ RETURN_IF_FAILED(creationAttributes->SetBlob(GUID_ObjectStream,
+ &streamBlob[0], readSize));
+ }
+
+ ComPtr<IStream> outputStream;
+ RETURN_IF_FAILED(CreateStreamOnHGlobal(nullptr, TRUE, &outputStream));
+ RETURN_IF_FAILED(wmf::MFSerializeAttributesToStream(creationAttributes.Get(),
+ 0, outputStream.Get()));
+ RETURN_IF_FAILED(outputStream->Seek({}, STREAM_SEEK_SET, nullptr));
+
+ ComPtr<IMFActivate> activator;
+ RETURN_IF_FAILED(mPMPHost->CreateObjectByCLSID(
+ CLSID_EMEStoreActivate, outputStream.Get(), IID_PPV_ARGS(&activator)));
+ RETURN_IF_FAILED(activator->ActivateObject(aRiid, aActivatedClass));
+ LOG("Done ActivateClassById, id=%ls", aId);
+ return S_OK;
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFPMPHostWrapper.h b/dom/media/platforms/wmf/MFPMPHostWrapper.h
new file mode 100644
index 0000000000..3b644283b7
--- /dev/null
+++ b/dom/media/platforms/wmf/MFPMPHostWrapper.h
@@ -0,0 +1,42 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_PLATFORM_WMF_MPMPHOSTWRAPPER_H
+#define DOM_MEDIA_PLATFORM_WMF_MPMPHOSTWRAPPER_H
+
+#include <wrl.h>
+#include <wrl/client.h>
+
+#include "MFCDMExtra.h"
+
+namespace mozilla {
+
+// This class is used to create and manage PMP sessions. For PlayReady CDM,
+// it needs to connect with IMFPMPHostApp first before generating any request.
+// That behavior is undocumented on the mdsn, see more details in
+// https://github.com/microsoft/media-foundation/issues/37#issuecomment-1197321484
+class MFPMPHostWrapper : public Microsoft::WRL::RuntimeClass<
+ Microsoft::WRL::RuntimeClassFlags<
+ Microsoft::WRL::RuntimeClassType::ClassicCom>,
+ IMFPMPHostApp> {
+ public:
+ MFPMPHostWrapper() = default;
+ ~MFPMPHostWrapper() = default;
+
+ HRESULT RuntimeClassInitialize(Microsoft::WRL::ComPtr<IMFPMPHost>& aHost);
+
+ STDMETHODIMP LockProcess() override;
+
+ STDMETHODIMP UnlockProcess() override;
+
+ STDMETHODIMP ActivateClassById(LPCWSTR aId, IStream* aStream, REFIID aRiid,
+ void** aActivatedClass) override;
+
+ private:
+ Microsoft::WRL::ComPtr<IMFPMPHost> mPMPHost;
+};
+
+} // namespace mozilla
+
+#endif // DOM_MEDIA_PLATFORM_WMF_MPMPHOSTWRAPPER_H
diff --git a/dom/media/platforms/wmf/MFTDecoder.cpp b/dom/media/platforms/wmf/MFTDecoder.cpp
new file mode 100644
index 0000000000..6b66a9e399
--- /dev/null
+++ b/dom/media/platforms/wmf/MFTDecoder.cpp
@@ -0,0 +1,430 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFTDecoder.h"
+#include "WMFUtils.h"
+#include "mozilla/Logging.h"
+#include "nsThreadUtils.h"
+#include "mozilla/mscom/COMWrappers.h"
+#include "mozilla/mscom/Utils.h"
+#include "PlatformDecoderModule.h"
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+namespace mozilla {
+MFTDecoder::MFTDecoder() {
+ memset(&mInputStreamInfo, 0, sizeof(MFT_INPUT_STREAM_INFO));
+ memset(&mOutputStreamInfo, 0, sizeof(MFT_OUTPUT_STREAM_INFO));
+}
+
+MFTDecoder::~MFTDecoder() {
+ if (mActivate) {
+ // Releases all internal references to the created IMFTransform.
+ // https://docs.microsoft.com/en-us/windows/win32/api/mfobjects/nf-mfobjects-imfactivate-shutdownobject
+ mActivate->ShutdownObject();
+ }
+}
+
+HRESULT MFTDecoder::Create(const GUID& aCLSID) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+
+ HRESULT hr = mscom::wrapped::CoCreateInstance(
+ aCLSID, nullptr, CLSCTX_INPROC_SERVER,
+ IID_PPV_ARGS(static_cast<IMFTransform**>(getter_AddRefs(mDecoder))));
+ NS_WARNING_ASSERTION(SUCCEEDED(hr), "Failed to create MFT by CLSID");
+ return hr;
+}
+
+HRESULT
+MFTDecoder::Create(const GUID& aCategory, const GUID& aInSubtype,
+ const GUID& aOutSubtype) {
+ // Note: IMFTransform is documented to only be safe on MTA threads.
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+
+ // Use video by default, but select audio if necessary.
+ const GUID major = aCategory == MFT_CATEGORY_AUDIO_DECODER
+ ? MFMediaType_Audio
+ : MFMediaType_Video;
+
+ // Ignore null GUIDs to allow searching for all decoders supporting
+ // just one input or output type.
+ auto createInfo = [&major](const GUID& subtype) -> MFT_REGISTER_TYPE_INFO* {
+ if (IsEqualGUID(subtype, GUID_NULL)) {
+ return nullptr;
+ }
+
+ MFT_REGISTER_TYPE_INFO* info = new MFT_REGISTER_TYPE_INFO();
+ info->guidMajorType = major;
+ info->guidSubtype = subtype;
+ return info;
+ };
+ const MFT_REGISTER_TYPE_INFO* inInfo = createInfo(aInSubtype);
+ const MFT_REGISTER_TYPE_INFO* outInfo = createInfo(aOutSubtype);
+
+ // Request a decoder from the Windows API.
+ HRESULT hr;
+ IMFActivate** acts = nullptr;
+ UINT32 actsNum = 0;
+
+ hr = wmf::MFTEnumEx(aCategory, MFT_ENUM_FLAG_SORTANDFILTER, inInfo, outInfo,
+ &acts, &actsNum);
+ delete inInfo;
+ delete outInfo;
+ if (FAILED(hr)) {
+ NS_WARNING(nsPrintfCString("MFTEnumEx failed with code %lx", hr).get());
+ return hr;
+ }
+ if (actsNum == 0) {
+ NS_WARNING("MFTEnumEx returned no IMFActivate instances");
+ return WINCODEC_ERR_COMPONENTNOTFOUND;
+ }
+ auto guard = MakeScopeExit([&] {
+ // Start from index 1, acts[0] will be stored as a RefPtr to release later.
+ for (UINT32 i = 1; i < actsNum; i++) {
+ acts[i]->Release();
+ }
+ CoTaskMemFree(acts);
+ });
+
+ // Create the IMFTransform to do the decoding.
+ // Note: Ideally we would cache the IMFActivate and call
+ // IMFActivate::DetachObject, but doing so causes the MFTs to fail on
+ // MFT_MESSAGE_SET_D3D_MANAGER.
+ mActivate = RefPtr<IMFActivate>(acts[0]);
+ hr = mActivate->ActivateObject(
+ IID_PPV_ARGS(static_cast<IMFTransform**>(getter_AddRefs(mDecoder))));
+ NS_WARNING_ASSERTION(
+ SUCCEEDED(hr),
+ nsPrintfCString("IMFActivate::ActivateObject failed with code %lx", hr)
+ .get());
+ return hr;
+}
+
+HRESULT
+MFTDecoder::SetMediaTypes(IMFMediaType* aInputType, IMFMediaType* aOutputType,
+ std::function<HRESULT(IMFMediaType*)>&& aCallback) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+
+ // Set the input type to the one the caller gave us...
+ HRESULT hr = mDecoder->SetInputType(0, aInputType, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ GUID currentSubtype = {0};
+ hr = aOutputType->GetGUID(MF_MT_SUBTYPE, &currentSubtype);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = SetDecoderOutputType(currentSubtype, aOutputType, std::move(aCallback));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mDecoder->GetInputStreamInfo(0, &mInputStreamInfo);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+}
+
+already_AddRefed<IMFAttributes> MFTDecoder::GetAttributes() {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ RefPtr<IMFAttributes> attr;
+ HRESULT hr = mDecoder->GetAttributes(getter_AddRefs(attr));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+ return attr.forget();
+}
+
+already_AddRefed<IMFAttributes> MFTDecoder::GetOutputStreamAttributes() {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ RefPtr<IMFAttributes> attr;
+ HRESULT hr = mDecoder->GetOutputStreamAttributes(0, getter_AddRefs(attr));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+ return attr.forget();
+}
+
+HRESULT
+MFTDecoder::FindDecoderOutputType() {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mOutputType, "SetDecoderTypes must have been called once");
+
+ return FindDecoderOutputTypeWithSubtype(mOutputSubType);
+}
+
+HRESULT
+MFTDecoder::FindDecoderOutputTypeWithSubtype(const GUID& aSubType) {
+ return SetDecoderOutputType(aSubType, nullptr,
+ [](IMFMediaType*) { return S_OK; });
+}
+
+HRESULT
+MFTDecoder::SetDecoderOutputType(
+ const GUID& aSubType, IMFMediaType* aTypeToUse,
+ std::function<HRESULT(IMFMediaType*)>&& aCallback) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ if (!aTypeToUse) {
+ aTypeToUse = mOutputType;
+ }
+
+ // Iterate the enumerate the output types, until we find one compatible
+ // with what we need.
+ RefPtr<IMFMediaType> outputType;
+ UINT32 typeIndex = 0;
+ while (SUCCEEDED(mDecoder->GetOutputAvailableType(
+ 0, typeIndex++, getter_AddRefs(outputType)))) {
+ GUID outSubtype = {0};
+ HRESULT hr = outputType->GetGUID(MF_MT_SUBTYPE, &outSubtype);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (aSubType == outSubtype) {
+ hr = aCallback(outputType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mDecoder->SetOutputType(0, outputType, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mDecoder->GetOutputStreamInfo(0, &mOutputStreamInfo);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mMFTProvidesOutputSamples = IsFlagSet(mOutputStreamInfo.dwFlags,
+ MFT_OUTPUT_STREAM_PROVIDES_SAMPLES);
+
+ mOutputType = outputType;
+ mOutputSubType = outSubtype;
+
+ return S_OK;
+ }
+ outputType = nullptr;
+ }
+ return E_FAIL;
+}
+
+HRESULT
+MFTDecoder::SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+ LOG("Send message '%s'", MFTMessageTypeToStr(aMsg));
+ HRESULT hr = mDecoder->ProcessMessage(aMsg, aData);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::CreateInputSample(const uint8_t* aData, uint32_t aDataSize,
+ int64_t aTimestamp, int64_t aDuration,
+ RefPtr<IMFSample>* aOutSample) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ HRESULT hr;
+ RefPtr<IMFSample> sample;
+ hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ int32_t bufferSize =
+ std::max<uint32_t>(uint32_t(mInputStreamInfo.cbSize), aDataSize);
+ UINT32 alignment =
+ (mInputStreamInfo.cbAlignment > 1) ? mInputStreamInfo.cbAlignment - 1 : 0;
+ hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment,
+ getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ DWORD maxLength = 0;
+ DWORD currentLength = 0;
+ BYTE* dst = nullptr;
+ hr = buffer->Lock(&dst, &maxLength, &currentLength);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // Copy data into sample's buffer.
+ memcpy(dst, aData, aDataSize);
+
+ hr = buffer->Unlock();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = buffer->SetCurrentLength(aDataSize);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = sample->AddBuffer(buffer);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = sample->SetSampleTime(UsecsToHNs(aTimestamp));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (aDuration == 0) {
+ // If the sample duration is 0, the decoder will try and estimate the
+ // duration. In practice this can lead to some wildly incorrect durations,
+ // as in bug 1560440. The Microsoft docs seem conflicting here with
+ // `IMFSample::SetSampleDuration` stating 'The duration can also be zero.
+ // This might be valid for some types of data.' However,
+ // `IMFSample::GetSampleDuration method` states 'If the retrieved duration
+ // is zero, or if the method returns MF_E_NO_SAMPLE_DURATION, the duration
+ // is unknown. In that case, it might be possible to calculate the duration
+ // from the media type--for example, by using the video frame rate or the
+ // audio sampling rate.' The latter of those seems to be how the decoder
+ // handles 0 duration, hence why it estimates.
+ //
+ // Since our demuxing pipeline can create 0 duration samples, and since the
+ // decoder will override them to something positive anyway, setting them to
+ // have a trivial duration seems like the lesser of evils.
+ aDuration = 1;
+ }
+ hr = sample->SetSampleDuration(UsecsToHNs(aDuration));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ *aOutSample = sample.forget();
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::CreateOutputSample(RefPtr<IMFSample>* aOutSample) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ HRESULT hr;
+ RefPtr<IMFSample> sample;
+ hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ int32_t bufferSize = mOutputStreamInfo.cbSize;
+ UINT32 alignment = (mOutputStreamInfo.cbAlignment > 1)
+ ? mOutputStreamInfo.cbAlignment - 1
+ : 0;
+ hr = wmf::MFCreateAlignedMemoryBuffer(bufferSize, alignment,
+ getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = sample->AddBuffer(buffer);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ *aOutSample = sample.forget();
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::Output(RefPtr<IMFSample>* aOutput) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ HRESULT hr;
+
+ MFT_OUTPUT_DATA_BUFFER output = {0};
+
+ bool providedSample = false;
+ RefPtr<IMFSample> sample;
+ if (*aOutput) {
+ output.pSample = *aOutput;
+ providedSample = true;
+ } else if (!mMFTProvidesOutputSamples) {
+ hr = CreateOutputSample(&sample);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ output.pSample = sample;
+ }
+
+ DWORD status = 0;
+ hr = mDecoder->ProcessOutput(0, 1, &output, &status);
+ if (output.pEvents) {
+ // We must release this, as per the IMFTransform::ProcessOutput()
+ // MSDN documentation.
+ output.pEvents->Release();
+ output.pEvents = nullptr;
+ }
+
+ if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ return MF_E_TRANSFORM_STREAM_CHANGE;
+ }
+
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ // Not enough input to produce output. This is an expected failure,
+ // so don't warn on encountering it.
+ return hr;
+ }
+ // Treat other errors as unexpected, and warn.
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (!output.pSample) {
+ return S_OK;
+ }
+
+ if (mDiscontinuity) {
+ output.pSample->SetUINT32(MFSampleExtension_Discontinuity, TRUE);
+ mDiscontinuity = false;
+ }
+
+ *aOutput = output.pSample; // AddRefs
+ if (mMFTProvidesOutputSamples && !providedSample) {
+ // If the MFT is providing samples, we must release the sample here.
+ // Typically only the H.264 MFT provides samples when using DXVA,
+ // and it always re-uses the same sample, so if we don't release it
+ // MFT::ProcessOutput() deadlocks waiting for the sample to be released.
+ output.pSample->Release();
+ output.pSample = nullptr;
+ }
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::Input(const uint8_t* aData, uint32_t aDataSize, int64_t aTimestamp,
+ int64_t aDuration) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ NS_ENSURE_TRUE(mDecoder != nullptr, E_POINTER);
+
+ RefPtr<IMFSample> input;
+ HRESULT hr =
+ CreateInputSample(aData, aDataSize, aTimestamp, aDuration, &input);
+ NS_ENSURE_TRUE(SUCCEEDED(hr) && input != nullptr, hr);
+
+ return Input(input);
+}
+
+HRESULT
+MFTDecoder::Input(IMFSample* aSample) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ HRESULT hr = mDecoder->ProcessInput(0, aSample, 0);
+ if (hr == MF_E_NOTACCEPTING) {
+ // MFT *already* has enough data to produce a sample. Retrieve it.
+ return MF_E_NOTACCEPTING;
+ }
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::Flush() {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ HRESULT hr = SendMFTMessage(MFT_MESSAGE_COMMAND_FLUSH, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mDiscontinuity = true;
+
+ return S_OK;
+}
+
+HRESULT
+MFTDecoder::GetInputMediaType(RefPtr<IMFMediaType>& aMediaType) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ NS_ENSURE_TRUE(mDecoder, E_POINTER);
+ return mDecoder->GetInputCurrentType(0, getter_AddRefs(aMediaType));
+}
+
+HRESULT
+MFTDecoder::GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ NS_ENSURE_TRUE(mDecoder, E_POINTER);
+ return mDecoder->GetOutputCurrentType(0, getter_AddRefs(aMediaType));
+}
+
+#undef LOG
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/MFTDecoder.h b/dom/media/platforms/wmf/MFTDecoder.h
new file mode 100644
index 0000000000..7af99e550d
--- /dev/null
+++ b/dom/media/platforms/wmf/MFTDecoder.h
@@ -0,0 +1,132 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(MFTDecoder_h_)
+# define MFTDecoder_h_
+
+# include "WMF.h"
+# include "mozilla/ReentrantMonitor.h"
+# include "mozilla/RefPtr.h"
+# include "nsIThread.h"
+
+namespace mozilla {
+
+class MFTDecoder final {
+ ~MFTDecoder();
+
+ public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MFTDecoder)
+
+ MFTDecoder();
+
+ // Creates the MFT by COM class ID.
+ //
+ // Params:
+ // - aCLSID The COM class ID of the decoder.
+ HRESULT Create(const GUID& aCLSID);
+
+ // Creates the MFT by querying a category and media subtype.
+ // First thing to do as part of setup.
+ //
+ // Params:
+ // - aCategory the GUID of the MFT category to use.
+ // - aInSubType the GUID of the input MFT media type to use.
+ // GUID_NULL may be used as a wildcard.
+ // - aOutSubType the GUID of the output MFT media type to use.
+ // GUID_NULL may be used as a wildcard.
+ HRESULT Create(const GUID& aCategory, const GUID& aInSubtype,
+ const GUID& aOutSubtype);
+
+ // Sets the input and output media types. Call after Init().
+ //
+ // Params:
+ // - aInputType needs at least major and minor types set.
+ // - aOutputType needs at least major and minor types set.
+ // This is used to select the matching output type out
+ // of all the available output types of the MFT.
+ HRESULT SetMediaTypes(
+ IMFMediaType* aInputType, IMFMediaType* aOutputType,
+ std::function<HRESULT(IMFMediaType*)>&& aCallback =
+ [](IMFMediaType* aOutput) { return S_OK; });
+
+ // Returns the MFT's global IMFAttributes object.
+ already_AddRefed<IMFAttributes> GetAttributes();
+
+ // Returns the MFT's IMFAttributes object for an output stream.
+ already_AddRefed<IMFAttributes> GetOutputStreamAttributes();
+
+ // Retrieves the media type being input.
+ HRESULT GetInputMediaType(RefPtr<IMFMediaType>& aMediaType);
+
+ // Retrieves the media type being output. This may not be valid until
+ // the first sample is decoded.
+ HRESULT GetOutputMediaType(RefPtr<IMFMediaType>& aMediaType);
+ const GUID& GetOutputMediaSubType() const { return mOutputSubType; }
+
+ // Submits data into the MFT for processing.
+ //
+ // Returns:
+ // - MF_E_NOTACCEPTING if the decoder can't accept input. The data
+ // must be resubmitted after Output() stops producing output.
+ HRESULT Input(const uint8_t* aData, uint32_t aDataSize,
+ int64_t aTimestampUsecs, int64_t aDurationUsecs);
+ HRESULT Input(IMFSample* aSample);
+
+ HRESULT CreateInputSample(const uint8_t* aData, uint32_t aDataSize,
+ int64_t aTimestampUsecs, int64_t aDurationUsecs,
+ RefPtr<IMFSample>* aOutSample);
+
+ // Retrieves output from the MFT. Call this once Input() returns
+ // MF_E_NOTACCEPTING. Some MFTs with hardware acceleration (the H.264
+ // decoder MFT in particular) can't handle it if clients hold onto
+ // references to the output IMFSample, so don't do that.
+ //
+ // Returns:
+ // - MF_E_TRANSFORM_STREAM_CHANGE if the underlying stream output
+ // type changed. Retrieve the output media type and reconfig client,
+ // else you may misinterpret the MFT's output.
+ // - MF_E_TRANSFORM_NEED_MORE_INPUT if no output can be produced
+ // due to lack of input.
+ // - S_OK if an output frame is produced.
+ HRESULT Output(RefPtr<IMFSample>* aOutput);
+
+ // Sends a flush message to the MFT. This causes it to discard all
+ // input data. Use before seeking.
+ HRESULT Flush();
+
+ // Sends a message to the MFT.
+ HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData);
+
+ HRESULT FindDecoderOutputTypeWithSubtype(const GUID& aSubType);
+ HRESULT FindDecoderOutputType();
+
+ private:
+ // Will search a suitable MediaType using aTypeToUse if set, if not will
+ // use the current mOutputType.
+ HRESULT SetDecoderOutputType(
+ const GUID& aSubType, IMFMediaType* aTypeToUse,
+ std::function<HRESULT(IMFMediaType*)>&& aCallback);
+ HRESULT CreateOutputSample(RefPtr<IMFSample>* aOutSample);
+
+ MFT_INPUT_STREAM_INFO mInputStreamInfo;
+ MFT_OUTPUT_STREAM_INFO mOutputStreamInfo;
+
+ RefPtr<IMFActivate> mActivate;
+ RefPtr<IMFTransform> mDecoder;
+
+ RefPtr<IMFMediaType> mOutputType;
+ GUID mOutputSubType;
+
+ // True if the IMFTransform allocates the samples that it returns.
+ bool mMFTProvidesOutputSamples = false;
+
+ // True if we need to mark the next sample as a discontinuity.
+ bool mDiscontinuity = true;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/wmf/MFTEncoder.cpp b/dom/media/platforms/wmf/MFTEncoder.cpp
new file mode 100644
index 0000000000..410da2733c
--- /dev/null
+++ b/dom/media/platforms/wmf/MFTEncoder.cpp
@@ -0,0 +1,754 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MFTEncoder.h"
+#include "mozilla/Logging.h"
+#include "mozilla/WindowsProcessMitigations.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/mscom/Utils.h"
+#include "WMFUtils.h"
+
+// Missing from MinGW.
+#ifndef CODECAPI_AVEncAdaptiveMode
+# define STATIC_CODECAPI_AVEncAdaptiveMode \
+ 0x4419b185, 0xda1f, 0x4f53, 0xbc, 0x76, 0x9, 0x7d, 0xc, 0x1e, 0xfb, 0x1e
+DEFINE_CODECAPI_GUID(AVEncAdaptiveMode, "4419b185-da1f-4f53-bc76-097d0c1efb1e",
+ 0x4419b185, 0xda1f, 0x4f53, 0xbc, 0x76, 0x9, 0x7d, 0xc,
+ 0x1e, 0xfb, 0x1e)
+# define CODECAPI_AVEncAdaptiveMode \
+ DEFINE_CODECAPI_GUIDNAMED(AVEncAdaptiveMode)
+#endif
+#ifndef MF_E_NO_EVENTS_AVAILABLE
+# define MF_E_NO_EVENTS_AVAILABLE _HRESULT_TYPEDEF_(0xC00D3E80L)
+#endif
+
+#define MFT_ENC_LOGD(arg, ...) \
+ MOZ_LOG(mozilla::sPEMLog, mozilla::LogLevel::Debug, \
+ ("MFTEncoder(0x%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+#define MFT_ENC_LOGE(arg, ...) \
+ MOZ_LOG(mozilla::sPEMLog, mozilla::LogLevel::Error, \
+ ("MFTEncoder(0x%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+#define MFT_ENC_SLOGD(arg, ...) \
+ MOZ_LOG(mozilla::sPEMLog, mozilla::LogLevel::Debug, \
+ ("MFTEncoder::%s: " arg, __func__, ##__VA_ARGS__))
+#define MFT_ENC_SLOGE(arg, ...) \
+ MOZ_LOG(mozilla::sPEMLog, mozilla::LogLevel::Error, \
+ ("MFTEncoder::%s: " arg, __func__, ##__VA_ARGS__))
+
+namespace mozilla {
+extern LazyLogModule sPEMLog;
+
+static const char* ErrorStr(HRESULT hr) {
+ switch (hr) {
+ case S_OK:
+ return "OK";
+ case MF_E_INVALIDMEDIATYPE:
+ return "INVALIDMEDIATYPE";
+ case MF_E_INVALIDSTREAMNUMBER:
+ return "INVALIDSTREAMNUMBER";
+ case MF_E_INVALIDTYPE:
+ return "INVALIDTYPE";
+ case MF_E_TRANSFORM_CANNOT_CHANGE_MEDIATYPE_WHILE_PROCESSING:
+ return "TRANSFORM_PROCESSING";
+ case MF_E_TRANSFORM_TYPE_NOT_SET:
+ return "TRANSFORM_TYPE_NO_SET";
+ case MF_E_UNSUPPORTED_D3D_TYPE:
+ return "UNSUPPORTED_D3D_TYPE";
+ case E_INVALIDARG:
+ return "INVALIDARG";
+ case MF_E_NO_SAMPLE_DURATION:
+ return "NO_SAMPLE_DURATION";
+ case MF_E_NO_SAMPLE_TIMESTAMP:
+ return "NO_SAMPLE_TIMESTAMP";
+ case MF_E_NOTACCEPTING:
+ return "NOTACCEPTING";
+ case MF_E_ATTRIBUTENOTFOUND:
+ return "NOTFOUND";
+ case MF_E_BUFFERTOOSMALL:
+ return "BUFFERTOOSMALL";
+ case E_NOTIMPL:
+ return "NOTIMPL";
+ default:
+ return "OTHER";
+ }
+}
+
+static const char* CodecStr(const GUID& aGUID) {
+ if (IsEqualGUID(aGUID, MFVideoFormat_H264)) {
+ return "H.264";
+ } else if (IsEqualGUID(aGUID, MFVideoFormat_VP80)) {
+ return "VP8";
+ } else if (IsEqualGUID(aGUID, MFVideoFormat_VP90)) {
+ return "VP9";
+ } else {
+ return "Unsupported codec";
+ }
+}
+
+static UINT32 EnumEncoders(const GUID& aSubtype, IMFActivate**& aActivates,
+ const bool aUseHW = true) {
+ UINT32 num = 0;
+ MFT_REGISTER_TYPE_INFO inType = {.guidMajorType = MFMediaType_Video,
+ .guidSubtype = MFVideoFormat_NV12};
+ MFT_REGISTER_TYPE_INFO outType = {.guidMajorType = MFMediaType_Video,
+ .guidSubtype = aSubtype};
+ HRESULT hr = S_OK;
+ if (aUseHW) {
+ if (IsWin32kLockedDown()) {
+ // Some HW encoders use DXGI API and crash when locked down.
+ // TODO: move HW encoding out of content process (bug 1754531).
+ MFT_ENC_SLOGD("Don't use HW encoder when win32k locked down.");
+ return 0;
+ }
+
+ hr = wmf::MFTEnumEx(MFT_CATEGORY_VIDEO_ENCODER,
+ MFT_ENUM_FLAG_HARDWARE | MFT_ENUM_FLAG_SORTANDFILTER,
+ &inType, &outType, &aActivates, &num);
+ if (FAILED(hr)) {
+ MFT_ENC_SLOGE("enumerate HW encoder for %s: error=%s", CodecStr(aSubtype),
+ ErrorStr(hr));
+ return 0;
+ }
+ if (num > 0) {
+ return num;
+ }
+ }
+
+ // Try software MFTs.
+ hr = wmf::MFTEnumEx(MFT_CATEGORY_VIDEO_ENCODER,
+ MFT_ENUM_FLAG_SYNCMFT | MFT_ENUM_FLAG_ASYNCMFT |
+ MFT_ENUM_FLAG_SORTANDFILTER,
+ &inType, &outType, &aActivates, &num);
+ if (FAILED(hr)) {
+ MFT_ENC_SLOGE("enumerate SW encoder for %s: error=%s", CodecStr(aSubtype),
+ ErrorStr(hr));
+ return 0;
+ }
+ if (num == 0) {
+ MFT_ENC_SLOGD("cannot find encoder for %s", CodecStr(aSubtype));
+ }
+ return num;
+}
+
+static HRESULT GetFriendlyName(IMFActivate* aAttributes, nsCString& aName) {
+ UINT32 len = 0;
+ HRESULT hr = aAttributes->GetStringLength(MFT_FRIENDLY_NAME_Attribute, &len);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ if (len > 0) {
+ ++len; // '\0'.
+ WCHAR name[len];
+ if (SUCCEEDED(aAttributes->GetString(MFT_FRIENDLY_NAME_Attribute, name, len,
+ nullptr))) {
+ aName.Append(NS_ConvertUTF16toUTF8(name));
+ }
+ }
+
+ if (aName.Length() == 0) {
+ aName.Append("Unknown MFT");
+ }
+
+ return S_OK;
+}
+
+static void PopulateEncoderInfo(const GUID& aSubtype,
+ nsTArray<MFTEncoder::Info>& aInfos) {
+ IMFActivate** activates = nullptr;
+ UINT32 num = EnumEncoders(aSubtype, activates);
+ for (UINT32 i = 0; i < num; ++i) {
+ MFTEncoder::Info info = {.mSubtype = aSubtype};
+ GetFriendlyName(activates[i], info.mName);
+ aInfos.AppendElement(info);
+ MFT_ENC_SLOGD("<ENC> [%s] %s\n", CodecStr(aSubtype), info.mName.Data());
+ activates[i]->Release();
+ activates[i] = nullptr;
+ }
+ CoTaskMemFree(activates);
+}
+
+Maybe<MFTEncoder::Info> MFTEncoder::GetInfo(const GUID& aSubtype) {
+ nsTArray<Info>& infos = Infos();
+
+ for (auto i : infos) {
+ if (IsEqualGUID(aSubtype, i.mSubtype)) {
+ return Some(i);
+ }
+ }
+ return Nothing();
+}
+
+nsCString MFTEncoder::GetFriendlyName(const GUID& aSubtype) {
+ Maybe<Info> info = GetInfo(aSubtype);
+
+ return info ? info.ref().mName : "???"_ns;
+}
+
+// Called only once by Infos().
+nsTArray<MFTEncoder::Info> MFTEncoder::Enumerate() {
+ nsTArray<Info> infos;
+
+ if (!wmf::MediaFoundationInitializer::HasInitialized()) {
+ MFT_ENC_SLOGE("cannot init Media Foundation");
+ return infos;
+ }
+
+ PopulateEncoderInfo(MFVideoFormat_H264, infos);
+ PopulateEncoderInfo(MFVideoFormat_VP90, infos);
+ PopulateEncoderInfo(MFVideoFormat_VP80, infos);
+
+ return infos;
+}
+
+nsTArray<MFTEncoder::Info>& MFTEncoder::Infos() {
+ static nsTArray<Info> infos = Enumerate();
+ return infos;
+}
+
+already_AddRefed<IMFActivate> MFTEncoder::CreateFactory(const GUID& aSubtype) {
+ IMFActivate** activates = nullptr;
+ UINT32 num = EnumEncoders(aSubtype, activates, !mHardwareNotAllowed);
+ if (num == 0) {
+ return nullptr;
+ }
+
+ // Keep the first and throw out others, if there is any.
+ RefPtr<IMFActivate> factory = activates[0];
+ activates[0] = nullptr;
+ for (UINT32 i = 1; i < num; ++i) {
+ activates[i]->Release();
+ activates[i] = nullptr;
+ }
+ CoTaskMemFree(activates);
+
+ return factory.forget();
+}
+
+HRESULT MFTEncoder::Create(const GUID& aSubtype) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(!mEncoder);
+
+ RefPtr<IMFActivate> factory = CreateFactory(aSubtype);
+ if (!factory) {
+ return E_FAIL;
+ }
+
+ // Create MFT via the activation object.
+ RefPtr<IMFTransform> encoder;
+ HRESULT hr = factory->ActivateObject(
+ IID_PPV_ARGS(static_cast<IMFTransform**>(getter_AddRefs(encoder))));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<ICodecAPI> config;
+ // Avoid IID_PPV_ARGS() here for MingGW fails to declare UUID for ICodecAPI.
+ hr = encoder->QueryInterface(IID_ICodecAPI, getter_AddRefs(config));
+ if (FAILED(hr)) {
+ encoder = nullptr;
+ factory->ShutdownObject();
+ return hr;
+ }
+
+ mFactory = std::move(factory);
+ mEncoder = std::move(encoder);
+ mConfig = std::move(config);
+ return S_OK;
+}
+
+HRESULT
+MFTEncoder::Destroy() {
+ if (!mEncoder) {
+ return S_OK;
+ }
+
+ mEncoder = nullptr;
+ mConfig = nullptr;
+ // Release MFT resources via activation object.
+ HRESULT hr = mFactory->ShutdownObject();
+ mFactory = nullptr;
+
+ return hr;
+}
+
+HRESULT
+MFTEncoder::SetMediaTypes(IMFMediaType* aInputType, IMFMediaType* aOutputType) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(aInputType && aOutputType);
+
+ AsyncMFTResult asyncMFT = AttemptEnableAsync();
+ NS_ENSURE_TRUE(asyncMFT.isOk(), asyncMFT.unwrapErr());
+
+ HRESULT hr = GetStreamIDs();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // Always set encoder output type before input.
+ hr = mEncoder->SetOutputType(mOutputStreamID, aOutputType, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ NS_ENSURE_TRUE(MatchInputSubtype(aInputType) != GUID_NULL,
+ MF_E_INVALIDMEDIATYPE);
+
+ hr = mEncoder->SetInputType(mInputStreamID, aInputType, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mEncoder->GetInputStreamInfo(mInputStreamID, &mInputStreamInfo);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = mEncoder->GetOutputStreamInfo(mInputStreamID, &mOutputStreamInfo);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ mOutputStreamProvidesSample =
+ IsFlagSet(mOutputStreamInfo.dwFlags, MFT_OUTPUT_STREAM_PROVIDES_SAMPLES);
+
+ hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_BEGIN_STREAMING, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = SendMFTMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (asyncMFT.unwrap()) {
+ RefPtr<IMFMediaEventGenerator> source;
+ hr = mEncoder->QueryInterface(IID_PPV_ARGS(
+ static_cast<IMFMediaEventGenerator**>(getter_AddRefs(source))));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ mEventSource.SetAsyncEventGenerator(source.forget());
+ } else {
+ mEventSource.InitSyncMFTEventQueue();
+ }
+
+ mNumNeedInput = 0;
+ return S_OK;
+}
+
+// Async MFT won't work without unlocking. See
+// https://docs.microsoft.com/en-us/windows/win32/medfound/asynchronous-mfts#unlocking-asynchronous-mfts
+MFTEncoder::AsyncMFTResult MFTEncoder::AttemptEnableAsync() {
+ IMFAttributes* pAttributes = nullptr;
+ HRESULT hr = mEncoder->GetAttributes(&pAttributes);
+ if (FAILED(hr)) {
+ return AsyncMFTResult(hr);
+ }
+
+ bool async =
+ MFGetAttributeUINT32(pAttributes, MF_TRANSFORM_ASYNC, FALSE) == TRUE;
+ if (async) {
+ hr = pAttributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, TRUE);
+ } else {
+ hr = S_OK;
+ }
+ pAttributes->Release();
+
+ return SUCCEEDED(hr) ? AsyncMFTResult(async) : AsyncMFTResult(hr);
+}
+
+HRESULT MFTEncoder::GetStreamIDs() {
+ DWORD numIns;
+ DWORD numOuts;
+ HRESULT hr = mEncoder->GetStreamCount(&numIns, &numOuts);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ if (numIns < 1 || numOuts < 1) {
+ MFT_ENC_LOGE("stream count error");
+ return MF_E_INVALIDSTREAMNUMBER;
+ }
+
+ DWORD inIDs[numIns];
+ DWORD outIDs[numOuts];
+ hr = mEncoder->GetStreamIDs(numIns, inIDs, numOuts, outIDs);
+ if (SUCCEEDED(hr)) {
+ mInputStreamID = inIDs[0];
+ mOutputStreamID = outIDs[0];
+ } else if (hr == E_NOTIMPL) {
+ mInputStreamID = 0;
+ mOutputStreamID = 0;
+ } else {
+ MFT_ENC_LOGE("failed to get stream IDs");
+ return hr;
+ }
+ return S_OK;
+}
+
+GUID MFTEncoder::MatchInputSubtype(IMFMediaType* aInputType) {
+ MOZ_ASSERT(mEncoder);
+ MOZ_ASSERT(aInputType);
+
+ GUID desired = GUID_NULL;
+ HRESULT hr = aInputType->GetGUID(MF_MT_SUBTYPE, &desired);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), GUID_NULL);
+ MOZ_ASSERT(desired != GUID_NULL);
+
+ DWORD i = 0;
+ IMFMediaType* inputType = nullptr;
+ GUID preferred = GUID_NULL;
+ while (true) {
+ hr = mEncoder->GetInputAvailableType(mInputStreamID, i, &inputType);
+ if (hr == MF_E_NO_MORE_TYPES) {
+ break;
+ }
+ NS_ENSURE_TRUE(SUCCEEDED(hr), GUID_NULL);
+
+ GUID sub = GUID_NULL;
+ hr = inputType->GetGUID(MF_MT_SUBTYPE, &sub);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), GUID_NULL);
+
+ if (IsEqualGUID(desired, sub)) {
+ preferred = desired;
+ break;
+ }
+ ++i;
+ }
+
+ return IsEqualGUID(preferred, desired) ? preferred : GUID_NULL;
+}
+
+HRESULT
+MFTEncoder::SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mEncoder);
+
+ return mEncoder->ProcessMessage(aMsg, aData);
+}
+
+HRESULT MFTEncoder::SetModes(UINT32 aBitsPerSec) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mConfig);
+
+ VARIANT var;
+ var.vt = VT_UI4;
+ var.ulVal = eAVEncCommonRateControlMode_CBR;
+ HRESULT hr = mConfig->SetValue(&CODECAPI_AVEncCommonRateControlMode, &var);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ var.ulVal = aBitsPerSec;
+ hr = mConfig->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (SUCCEEDED(mConfig->IsModifiable(&CODECAPI_AVEncAdaptiveMode))) {
+ var.ulVal = eAVEncAdaptiveMode_Resolution;
+ hr = mConfig->SetValue(&CODECAPI_AVEncAdaptiveMode, &var);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+
+ if (SUCCEEDED(mConfig->IsModifiable(&CODECAPI_AVLowLatencyMode))) {
+ var.vt = VT_BOOL;
+ var.boolVal = VARIANT_TRUE;
+ hr = mConfig->SetValue(&CODECAPI_AVLowLatencyMode, &var);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+
+ return S_OK;
+}
+
+HRESULT
+MFTEncoder::SetBitrate(UINT32 aBitsPerSec) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mConfig);
+
+ VARIANT var = {.vt = VT_UI4, .ulVal = aBitsPerSec};
+ return mConfig->SetValue(&CODECAPI_AVEncCommonMeanBitRate, &var);
+}
+
+static HRESULT CreateSample(RefPtr<IMFSample>* aOutSample, DWORD aSize,
+ DWORD aAlignment) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+
+ HRESULT hr;
+ RefPtr<IMFSample> sample;
+ hr = wmf::MFCreateSample(getter_AddRefs(sample));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ hr = wmf::MFCreateAlignedMemoryBuffer(aSize, aAlignment,
+ getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = sample->AddBuffer(buffer);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ *aOutSample = sample.forget();
+
+ return S_OK;
+}
+
+HRESULT
+MFTEncoder::CreateInputSample(RefPtr<IMFSample>* aSample, size_t aSize) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+
+ return CreateSample(
+ aSample, aSize,
+ mInputStreamInfo.cbAlignment > 0 ? mInputStreamInfo.cbAlignment - 1 : 0);
+}
+
+HRESULT
+MFTEncoder::PushInput(RefPtr<IMFSample>&& aInput) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mEncoder);
+ MOZ_ASSERT(aInput);
+
+ mPendingInputs.Push(aInput.forget());
+ if (mEventSource.IsSync() && mNumNeedInput == 0) {
+ // To step 2 in
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data
+ mNumNeedInput++;
+ }
+
+ HRESULT hr = ProcessInput();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return ProcessEvents();
+}
+
+HRESULT MFTEncoder::ProcessInput() {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mEncoder);
+
+ if (mNumNeedInput == 0 || mPendingInputs.GetSize() == 0) {
+ return S_OK;
+ }
+
+ RefPtr<IMFSample> input = mPendingInputs.PopFront();
+ HRESULT hr = mEncoder->ProcessInput(mInputStreamID, input, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ --mNumNeedInput;
+
+ if (!mEventSource.IsSync()) {
+ return S_OK;
+ }
+ // For sync MFT: Step 3 in
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data
+ DWORD flags = 0;
+ hr = mEncoder->GetOutputStatus(&flags);
+ MediaEventType evType = MEUnknown;
+ switch (hr) {
+ case S_OK:
+ evType = flags == MFT_OUTPUT_STATUS_SAMPLE_READY
+ ? METransformHaveOutput // To step 4: ProcessOutput().
+ : METransformNeedInput; // To step 2: ProcessInput().
+ break;
+ case E_NOTIMPL:
+ evType = METransformHaveOutput; // To step 4: ProcessOutput().
+ break;
+ default:
+ MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("undefined output status");
+ return hr;
+ }
+ return mEventSource.QueueSyncMFTEvent(evType);
+}
+
+HRESULT MFTEncoder::ProcessEvents() {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mEncoder);
+
+ HRESULT hr = E_FAIL;
+ while (true) {
+ Event event = mEventSource.GetEvent();
+ if (event.isErr()) {
+ hr = event.unwrapErr();
+ break;
+ }
+
+ MediaEventType evType = event.unwrap();
+ switch (evType) {
+ case METransformNeedInput:
+ ++mNumNeedInput;
+ hr = ProcessInput();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ break;
+ case METransformHaveOutput:
+ hr = ProcessOutput();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ break;
+ case METransformDrainComplete:
+ mDrainState = DrainState::DRAINED;
+ break;
+ default:
+ MFT_ENC_LOGE("unsupported event: %lx", evType);
+ }
+ }
+
+ switch (hr) {
+ case MF_E_NO_EVENTS_AVAILABLE:
+ return S_OK;
+ case MF_E_MULTIPLE_SUBSCRIBERS:
+ default:
+ MFT_ENC_LOGE("failed to get event: %s", ErrorStr(hr));
+ return hr;
+ }
+}
+
+HRESULT MFTEncoder::ProcessOutput() {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mEncoder);
+
+ MFT_OUTPUT_DATA_BUFFER output = {.dwStreamID = mOutputStreamID,
+ .pSample = nullptr,
+ .dwStatus = 0,
+ .pEvents = nullptr};
+ RefPtr<IMFSample> sample;
+ HRESULT hr = E_FAIL;
+ if (!mOutputStreamProvidesSample) {
+ hr = CreateSample(&sample, mOutputStreamInfo.cbSize,
+ mOutputStreamInfo.cbAlignment > 1
+ ? mOutputStreamInfo.cbAlignment - 1
+ : 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ output.pSample = sample;
+ }
+
+ DWORD status = 0;
+ hr = mEncoder->ProcessOutput(0, 1, &output, &status);
+ if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ MFT_ENC_LOGD("output stream change");
+ if (output.dwStatus & MFT_OUTPUT_DATA_BUFFER_FORMAT_CHANGE) {
+ // Follow the instructions in Microsoft doc:
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/handling-stream-changes#output-type
+ IMFMediaType* outputType = nullptr;
+ hr = mEncoder->GetOutputAvailableType(mOutputStreamID, 0, &outputType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ hr = mEncoder->SetOutputType(mOutputStreamID, outputType, 0);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+ return MF_E_TRANSFORM_STREAM_CHANGE;
+ }
+
+ // Step 8 in
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ MOZ_ASSERT(mEventSource.IsSync());
+ MOZ_ASSERT(mDrainState == DrainState::DRAINING);
+
+ mEventSource.QueueSyncMFTEvent(METransformDrainComplete);
+ return S_OK;
+ }
+
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ mOutputs.AppendElement(output.pSample);
+ if (mOutputStreamProvidesSample) {
+ // Release MFT provided sample.
+ output.pSample->Release();
+ output.pSample = nullptr;
+ }
+
+ return S_OK;
+}
+
+HRESULT MFTEncoder::TakeOutput(nsTArray<RefPtr<IMFSample>>& aOutput) {
+ MOZ_ASSERT(aOutput.Length() == 0);
+ aOutput.SwapElements(mOutputs);
+ return S_OK;
+}
+
+HRESULT MFTEncoder::Drain(nsTArray<RefPtr<IMFSample>>& aOutput) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mEncoder);
+ MOZ_ASSERT(aOutput.Length() == 0);
+
+ switch (mDrainState) {
+ case DrainState::DRAINABLE:
+ // Exhaust pending inputs.
+ while (mPendingInputs.GetSize() > 0) {
+ if (mEventSource.IsSync()) {
+ // Step 5 in
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data
+ mEventSource.QueueSyncMFTEvent(METransformNeedInput);
+ }
+ HRESULT hr = ProcessEvents();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+ SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0);
+ mDrainState = DrainState::DRAINING;
+ [[fallthrough]]; // To collect and return outputs.
+ case DrainState::DRAINING:
+ // Collect remaining outputs.
+ while (mOutputs.Length() == 0 && mDrainState != DrainState::DRAINED) {
+ if (mEventSource.IsSync()) {
+ // Step 8 in
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data
+ mEventSource.QueueSyncMFTEvent(METransformHaveOutput);
+ }
+ HRESULT hr = ProcessEvents();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+ [[fallthrough]]; // To return outputs.
+ case DrainState::DRAINED:
+ aOutput.SwapElements(mOutputs);
+ return S_OK;
+ }
+}
+
+HRESULT MFTEncoder::GetMPEGSequenceHeader(nsTArray<UINT8>& aHeader) {
+ MOZ_ASSERT(mscom::IsCurrentThreadMTA());
+ MOZ_ASSERT(mEncoder);
+ MOZ_ASSERT(aHeader.Length() == 0);
+
+ RefPtr<IMFMediaType> outputType;
+ HRESULT hr = mEncoder->GetOutputCurrentType(mOutputStreamID,
+ getter_AddRefs(outputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ UINT32 length = 0;
+ hr = outputType->GetBlobSize(MF_MT_MPEG_SEQUENCE_HEADER, &length);
+ if (hr == MF_E_ATTRIBUTENOTFOUND || length == 0) {
+ return S_OK;
+ }
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ aHeader.SetCapacity(length);
+ hr = outputType->GetBlob(MF_MT_MPEG_SEQUENCE_HEADER, aHeader.Elements(),
+ length, nullptr);
+ aHeader.SetLength(SUCCEEDED(hr) ? length : 0);
+
+ return hr;
+}
+
+MFTEncoder::Event MFTEncoder::EventSource::GetEvent() {
+ if (IsSync()) {
+ return GetSyncMFTEvent();
+ }
+
+ RefPtr<IMFMediaEvent> event;
+ HRESULT hr = mImpl.as<RefPtr<IMFMediaEventGenerator>>()->GetEvent(
+ MF_EVENT_FLAG_NO_WAIT, getter_AddRefs(event));
+ MediaEventType type = MEUnknown;
+ if (SUCCEEDED(hr)) {
+ hr = event->GetType(&type);
+ }
+ return SUCCEEDED(hr) ? Event{type} : Event{hr};
+}
+
+HRESULT MFTEncoder::EventSource::QueueSyncMFTEvent(MediaEventType aEventType) {
+ MOZ_ASSERT(IsSync());
+ MOZ_ASSERT(IsOnCurrentThread());
+
+ auto q = mImpl.as<UniquePtr<EventQueue>>().get();
+ q->push(aEventType);
+ return S_OK;
+}
+
+MFTEncoder::Event MFTEncoder::EventSource::GetSyncMFTEvent() {
+ MOZ_ASSERT(IsOnCurrentThread());
+
+ auto q = mImpl.as<UniquePtr<EventQueue>>().get();
+ if (q->empty()) {
+ return Event{MF_E_NO_EVENTS_AVAILABLE};
+ }
+
+ MediaEventType type = q->front();
+ q->pop();
+ return Event{type};
+}
+
+#ifdef DEBUG
+bool MFTEncoder::EventSource::IsOnCurrentThread() {
+ if (!mThread) {
+ mThread = GetCurrentSerialEventTarget();
+ }
+ return mThread->IsOnCurrentThread();
+}
+#endif
+
+} // namespace mozilla
+
+#undef MFT_ENC_SLOGE
+#undef MFT_ENC_SLOGD
+#undef MFT_ENC_LOGE
+#undef MFT_ENC_LOGD
diff --git a/dom/media/platforms/wmf/MFTEncoder.h b/dom/media/platforms/wmf/MFTEncoder.h
new file mode 100644
index 0000000000..e2eaec3476
--- /dev/null
+++ b/dom/media/platforms/wmf/MFTEncoder.h
@@ -0,0 +1,144 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(MFTEncoder_h_)
+# define MFTEncoder_h_
+
+# include <functional>
+# include <queue>
+# include "mozilla/RefPtr.h"
+# include "mozilla/ResultVariant.h"
+# include "nsISupportsImpl.h"
+# include "nsDeque.h"
+# include "nsTArray.h"
+# include "WMF.h"
+
+namespace mozilla {
+
+class MFTEncoder final {
+ public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MFTEncoder)
+
+ explicit MFTEncoder(const bool aHardwareNotAllowed)
+ : mHardwareNotAllowed(aHardwareNotAllowed) {}
+
+ HRESULT Create(const GUID& aSubtype);
+ HRESULT Destroy();
+ HRESULT SetMediaTypes(IMFMediaType* aInputType, IMFMediaType* aOutputType);
+ HRESULT SetModes(UINT32 aBitsPerSec);
+ HRESULT SetBitrate(UINT32 aBitsPerSec);
+
+ HRESULT CreateInputSample(RefPtr<IMFSample>* aSample, size_t aSize);
+ HRESULT PushInput(RefPtr<IMFSample>&& aInput);
+ HRESULT TakeOutput(nsTArray<RefPtr<IMFSample>>& aOutput);
+ HRESULT Drain(nsTArray<RefPtr<IMFSample>>& aOutput);
+
+ HRESULT GetMPEGSequenceHeader(nsTArray<UINT8>& aHeader);
+
+ static nsCString GetFriendlyName(const GUID& aSubtype);
+
+ struct Info final {
+ GUID mSubtype;
+ nsCString mName;
+ };
+
+ private:
+ // Abstractions to support sync MFTs using the same logic for async MFTs.
+ // When the MFT is async and a real event generator is available, simply
+ // forward the calls. For sync MFTs, use the synchronous processing model
+ // described in
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/basic-mft-processing-model#process-data
+ // to generate events of the asynchronous processing model.
+ using Event = Result<MediaEventType, HRESULT>;
+ using EventQueue = std::queue<MediaEventType>;
+ class EventSource final {
+ public:
+ EventSource() : mImpl(Nothing{}) {}
+
+ void SetAsyncEventGenerator(
+ already_AddRefed<IMFMediaEventGenerator>&& aAsyncEventGenerator) {
+ MOZ_ASSERT(mImpl.is<Nothing>());
+ mImpl.emplace<RefPtr<IMFMediaEventGenerator>>(aAsyncEventGenerator);
+ }
+
+ void InitSyncMFTEventQueue() {
+ MOZ_ASSERT(mImpl.is<Nothing>());
+ mImpl.emplace<UniquePtr<EventQueue>>(MakeUnique<EventQueue>());
+ }
+
+ bool IsSync() const { return mImpl.is<UniquePtr<EventQueue>>(); }
+
+ Event GetEvent();
+ // Push an event when sync MFT is used.
+ HRESULT QueueSyncMFTEvent(MediaEventType aEventType);
+
+ private:
+ // Pop an event from the queue when sync MFT is used.
+ Event GetSyncMFTEvent();
+
+ Variant<
+ // Uninitialized.
+ Nothing,
+ // For async MFT events. See
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/asynchronous-mfts#events
+ RefPtr<IMFMediaEventGenerator>,
+ // Event queue for a sync MFT. Storing EventQueue directly breaks the
+ // code so a pointer is introduced.
+ UniquePtr<EventQueue>>
+ mImpl;
+# ifdef DEBUG
+ bool IsOnCurrentThread();
+ nsCOMPtr<nsISerialEventTarget> mThread;
+# endif
+ };
+
+ ~MFTEncoder() { Destroy(); };
+
+ static nsTArray<Info>& Infos();
+ static nsTArray<Info> Enumerate();
+ static Maybe<Info> GetInfo(const GUID& aSubtype);
+
+ already_AddRefed<IMFActivate> CreateFactory(const GUID& aSubtype);
+ // Return true when successfully enabled, false for MFT that doesn't support
+ // async processing model, and error otherwise.
+ using AsyncMFTResult = Result<bool, HRESULT>;
+ AsyncMFTResult AttemptEnableAsync();
+ HRESULT GetStreamIDs();
+ GUID MatchInputSubtype(IMFMediaType* aInputType);
+ HRESULT SendMFTMessage(MFT_MESSAGE_TYPE aMsg, ULONG_PTR aData);
+
+ HRESULT ProcessEvents();
+ HRESULT ProcessInput();
+ HRESULT ProcessOutput();
+
+ const bool mHardwareNotAllowed;
+ RefPtr<IMFTransform> mEncoder;
+ // For MFT object creation. See
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/activation-objects
+ RefPtr<IMFActivate> mFactory;
+ // For encoder configuration. See
+ // https://docs.microsoft.com/en-us/windows/win32/directshow/encoder-api
+ RefPtr<ICodecAPI> mConfig;
+
+ DWORD mInputStreamID;
+ DWORD mOutputStreamID;
+ MFT_INPUT_STREAM_INFO mInputStreamInfo;
+ MFT_OUTPUT_STREAM_INFO mOutputStreamInfo;
+ bool mOutputStreamProvidesSample;
+
+ size_t mNumNeedInput;
+ enum class DrainState { DRAINED, DRAINABLE, DRAINING };
+ DrainState mDrainState = DrainState::DRAINABLE;
+
+ nsRefPtrDeque<IMFSample> mPendingInputs;
+ nsTArray<RefPtr<IMFSample>> mOutputs;
+
+ EventSource mEventSource;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/wmf/WMF.h b/dom/media/platforms/wmf/WMF.h
new file mode 100644
index 0000000000..740442ceda
--- /dev/null
+++ b/dom/media/platforms/wmf/WMF.h
@@ -0,0 +1,198 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WMF_H_
+#define WMF_H_
+
+#include <windows.h>
+#include <mfapi.h>
+#include <mfidl.h>
+#include <mfreadwrite.h>
+#include <mfobjects.h>
+#include <ks.h>
+#include <stdio.h>
+#include <mferror.h>
+#include <propvarutil.h>
+#include <wmcodecdsp.h>
+#include <d3d9.h>
+#include <dxva2api.h>
+#include <wmcodecdsp.h>
+#include <codecapi.h>
+
+#include "mozilla/Atomics.h"
+#include "mozilla/ClearOnShutdown.h"
+#include "mozilla/StaticMutex.h"
+#include "nsThreadUtils.h"
+
+// The Windows headers helpfully declare min and max macros, which don't
+// compile in the presence of std::min and std::max and unified builds.
+// So undef them here.
+#ifdef min
+# undef min
+#endif
+#ifdef max
+# undef max
+#endif
+
+// https://stackoverflow.com/questions/25759700/ms-format-tag-for-opus-codec
+#ifndef MFAudioFormat_Opus
+DEFINE_GUID(MFAudioFormat_Opus, WAVE_FORMAT_OPUS, 0x000, 0x0010, 0x80, 0x00,
+ 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
+#endif
+
+const inline GUID CLSID_CMSVPXDecMFT = {
+ 0xe3aaf548,
+ 0xc9a4,
+ 0x4c6e,
+ {0x23, 0x4d, 0x5a, 0xda, 0x37, 0x4b, 0x00, 0x00}};
+
+namespace mozilla::wmf {
+
+// A helper class for automatically starting and shuting down the Media
+// Foundation. Prior to using Media Foundation in a process, users should call
+// MediaFoundationInitializer::HasInitialized() to ensure Media Foundation is
+// initialized. Users should also check the result of this call, in case the
+// internal call to MFStartup fails. The first check to HasInitialized will
+// cause the helper to start up Media Foundation and set up a runnable to handle
+// Media Foundation shutdown at XPCOM shutdown. Calls after the first will not
+// cause any extra startups or shutdowns, so it's safe to check multiple times
+// in the same process. Users do not need to do any manual shutdown, the helper
+// will handle this internally.
+class MediaFoundationInitializer final {
+ public:
+ ~MediaFoundationInitializer() {
+ if (mHasInitialized) {
+ if (FAILED(MFShutdown())) {
+ NS_WARNING("MFShutdown failed");
+ }
+ }
+ }
+ static bool HasInitialized() {
+ if (sIsShutdown) {
+ return false;
+ }
+ return Get()->mHasInitialized;
+ }
+
+ private:
+ static MediaFoundationInitializer* Get() {
+ {
+ StaticMutexAutoLock lock(sCreateMutex);
+ if (!sInitializer) {
+ sInitializer.reset(new MediaFoundationInitializer());
+ GetMainThreadSerialEventTarget()->Dispatch(
+ NS_NewRunnableFunction("MediaFoundationInitializer::Get", [&] {
+ // Need to run this before MTA thread gets destroyed.
+ RunOnShutdown(
+ [&] {
+ sInitializer.reset();
+ sIsShutdown = true;
+ },
+ ShutdownPhase::XPCOMShutdown);
+ }));
+ }
+ }
+ return sInitializer.get();
+ }
+
+ MediaFoundationInitializer() : mHasInitialized(SUCCEEDED(MFStartup())) {
+ if (!mHasInitialized) {
+ NS_WARNING("MFStartup failed");
+ }
+ }
+
+ // If successful, loads all required WMF DLLs and calls the WMF MFStartup()
+ // function. This delegates the WMF MFStartup() call to the MTA thread if
+ // the current thread is not MTA. This is to ensure we always interact with
+ // WMF from threads with the same COM compartment model.
+ HRESULT MFStartup();
+
+ // Calls the WMF MFShutdown() function. Call this once for every time
+ // wmf::MFStartup() succeeds. Note: does not unload the WMF DLLs loaded by
+ // MFStartup(); leaves them in memory to save I/O at next MFStartup() call.
+ // This delegates the WMF MFShutdown() call to the MTA thread if the current
+ // thread is not MTA. This is to ensure we always interact with
+ // WMF from threads with the same COM compartment model.
+ HRESULT MFShutdown();
+
+ static inline UniquePtr<MediaFoundationInitializer> sInitializer;
+ static inline StaticMutex sCreateMutex;
+ static inline Atomic<bool> sIsShutdown{false};
+ const bool mHasInitialized;
+};
+
+// All functions below are wrappers around the corresponding WMF function,
+// and automatically locate and call the corresponding function in the WMF DLLs.
+
+HRESULT MFCreateMediaType(IMFMediaType** aOutMFType);
+
+HRESULT MFGetStrideForBitmapInfoHeader(DWORD aFormat, DWORD aWidth,
+ LONG* aOutStride);
+
+HRESULT MFGetService(IUnknown* punkObject, REFGUID guidService, REFIID riid,
+ LPVOID* ppvObject);
+
+HRESULT DXVA2CreateDirect3DDeviceManager9(
+ UINT* pResetToken, IDirect3DDeviceManager9** ppDXVAManager);
+
+HRESULT MFCreateDXGIDeviceManager(UINT* pResetToken,
+ IMFDXGIDeviceManager** ppDXVAManager);
+
+HRESULT MFCreateSample(IMFSample** ppIMFSample);
+
+HRESULT MFCreateAlignedMemoryBuffer(DWORD cbMaxLength, DWORD fAlignmentFlags,
+ IMFMediaBuffer** ppBuffer);
+
+HRESULT MFCreateDXGISurfaceBuffer(REFIID riid, IUnknown* punkSurface,
+ UINT uSubresourceIndex,
+ BOOL fButtomUpWhenLinear,
+ IMFMediaBuffer** ppBuffer);
+
+HRESULT MFTEnumEx(GUID guidCategory, UINT32 Flags,
+ const MFT_REGISTER_TYPE_INFO* pInputType,
+ const MFT_REGISTER_TYPE_INFO* pOutputType,
+ IMFActivate*** pppMFTActivate, UINT32* pnumMFTActivate);
+
+HRESULT MFTGetInfo(CLSID clsidMFT, LPWSTR* pszName,
+ MFT_REGISTER_TYPE_INFO** ppInputTypes, UINT32* pcInputTypes,
+ MFT_REGISTER_TYPE_INFO** ppOutputTypes,
+ UINT32* pcOutputTypes, IMFAttributes** ppAttributes);
+
+HRESULT MFCreateAttributes(IMFAttributes** ppMFAttributes, UINT32 cInitialSize);
+
+HRESULT MFCreateEventQueue(IMFMediaEventQueue** ppMediaEventQueue);
+
+HRESULT MFCreateStreamDescriptor(DWORD dwStreamIdentifier, DWORD cMediaTypes,
+ IMFMediaType** apMediaTypes,
+ IMFStreamDescriptor** ppDescriptor);
+
+HRESULT MFCreateAsyncResult(IUnknown* punkObject, IMFAsyncCallback* pCallback,
+ IUnknown* punkState,
+ IMFAsyncResult** ppAsyncResult);
+
+HRESULT MFCreatePresentationDescriptor(
+ DWORD cStreamDescriptors, IMFStreamDescriptor** apStreamDescriptors,
+ IMFPresentationDescriptor** ppPresentationDescriptor);
+
+HRESULT MFCreateMemoryBuffer(DWORD cbMaxLength, IMFMediaBuffer** ppBuffer);
+
+HRESULT MFLockDXGIDeviceManager(UINT* pResetToken,
+ IMFDXGIDeviceManager** ppManager);
+
+HRESULT MFUnlockDXGIDeviceManager();
+
+HRESULT MFPutWorkItem(DWORD dwQueue, IMFAsyncCallback* pCallback,
+ IUnknown* pState);
+
+HRESULT MFSerializeAttributesToStream(IMFAttributes* pAttr, DWORD dwOptions,
+ IStream* pStm);
+
+HRESULT MFWrapMediaType(IMFMediaType* pOrig, REFGUID MajorType, REFGUID SubType,
+ IMFMediaType** ppWrap);
+
+} // namespace mozilla::wmf
+
+#endif
diff --git a/dom/media/platforms/wmf/WMFAudioMFTManager.cpp b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp
new file mode 100644
index 0000000000..6ebcf9a80a
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFAudioMFTManager.cpp
@@ -0,0 +1,315 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFAudioMFTManager.h"
+#include "MediaInfo.h"
+#include "TimeUnits.h"
+#include "VideoUtils.h"
+#include "WMFUtils.h"
+#include "mozilla/AbstractThread.h"
+#include "mozilla/Logging.h"
+#include "mozilla/Telemetry.h"
+#include "nsTArray.h"
+#include "BufferReader.h"
+#include "mozilla/ScopeExit.h"
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+namespace mozilla {
+
+using media::TimeUnit;
+
+WMFAudioMFTManager::WMFAudioMFTManager(const AudioInfo& aConfig)
+ : mAudioChannels(aConfig.mChannels),
+ mChannelsMap(AudioConfig::ChannelLayout::UNKNOWN_MAP),
+ mAudioRate(aConfig.mRate),
+ mStreamType(GetStreamTypeFromMimeType(aConfig.mMimeType)) {
+ MOZ_COUNT_CTOR(WMFAudioMFTManager);
+
+ if (mStreamType == WMFStreamType::AAC) {
+ const uint8_t* audioSpecConfig;
+ uint32_t configLength;
+ if (aConfig.mCodecSpecificConfig.is<AacCodecSpecificData>()) {
+ const AacCodecSpecificData& aacCodecSpecificData =
+ aConfig.mCodecSpecificConfig.as<AacCodecSpecificData>();
+ audioSpecConfig =
+ aacCodecSpecificData.mDecoderConfigDescriptorBinaryBlob->Elements();
+ configLength =
+ aacCodecSpecificData.mDecoderConfigDescriptorBinaryBlob->Length();
+
+ mRemainingEncoderDelay = mEncoderDelay =
+ aacCodecSpecificData.mEncoderDelayFrames;
+ mTotalMediaFrames = aacCodecSpecificData.mMediaFrameCount;
+ LOG("AudioMFT decoder: Found AAC decoder delay (%" PRIu32
+ "frames) and total media frames (%" PRIu64 " frames)\n",
+ mEncoderDelay, mTotalMediaFrames);
+ } else {
+ // Gracefully handle failure to cover all codec specific cases above. Once
+ // we're confident there is no fall through from these cases above, we
+ // should remove this code.
+ RefPtr<MediaByteBuffer> audioCodecSpecificBinaryBlob =
+ GetAudioCodecSpecificBlob(aConfig.mCodecSpecificConfig);
+ audioSpecConfig = audioCodecSpecificBinaryBlob->Elements();
+ configLength = audioCodecSpecificBinaryBlob->Length();
+ }
+ AACAudioSpecificConfigToUserData(aConfig.mExtendedProfile, audioSpecConfig,
+ configLength, mUserData);
+ }
+}
+
+WMFAudioMFTManager::~WMFAudioMFTManager() {
+ MOZ_COUNT_DTOR(WMFAudioMFTManager);
+}
+
+const GUID& WMFAudioMFTManager::GetMediaSubtypeGUID() {
+ MOZ_ASSERT(StreamTypeIsAudio(mStreamType));
+ switch (mStreamType) {
+ case WMFStreamType::AAC:
+ return MFAudioFormat_AAC;
+ case WMFStreamType::MP3:
+ return MFAudioFormat_MP3;
+ default:
+ return GUID_NULL;
+ };
+}
+
+bool WMFAudioMFTManager::Init() {
+ NS_ENSURE_TRUE(StreamTypeIsAudio(mStreamType), false);
+
+ RefPtr<MFTDecoder> decoder(new MFTDecoder());
+ // Note: MP3 MFT isn't registered as supporting Float output, but it works.
+ // Find PCM output MFTs as this is the common type.
+ HRESULT hr = WMFDecoderModule::CreateMFTDecoder(mStreamType, decoder);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ // Setup input/output media types
+ RefPtr<IMFMediaType> inputType;
+
+ hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, mAudioRate);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, mAudioChannels);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ if (mStreamType == WMFStreamType::AAC) {
+ hr = inputType->SetUINT32(MF_MT_AAC_PAYLOAD_TYPE, 0x0); // Raw AAC packet
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = inputType->SetBlob(MF_MT_USER_DATA, mUserData.Elements(),
+ mUserData.Length());
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+ }
+
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = outputType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_Float);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = outputType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, 32);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ hr = decoder->SetMediaTypes(inputType, outputType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), false);
+
+ mDecoder = decoder;
+
+ return true;
+}
+
+HRESULT
+WMFAudioMFTManager::Input(MediaRawData* aSample) {
+ mLastInputTime = aSample->mTime;
+ return mDecoder->Input(aSample->Data(), uint32_t(aSample->Size()),
+ aSample->mTime.ToMicroseconds(),
+ aSample->mDuration.ToMicroseconds());
+}
+
+nsCString WMFAudioMFTManager::GetCodecName() const {
+ if (mStreamType == WMFStreamType::AAC) {
+ return "aac"_ns;
+ } else if (mStreamType == WMFStreamType::MP3) {
+ return "mp3"_ns;
+ }
+ return "unknown"_ns;
+}
+
+HRESULT
+WMFAudioMFTManager::UpdateOutputType() {
+ HRESULT hr;
+
+ RefPtr<IMFMediaType> type;
+ hr = mDecoder->GetOutputMediaType(type);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = type->GetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, &mAudioRate);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = type->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &mAudioChannels);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ uint32_t channelsMap;
+ hr = type->GetUINT32(MF_MT_AUDIO_CHANNEL_MASK, &channelsMap);
+ if (SUCCEEDED(hr)) {
+ mChannelsMap = channelsMap;
+ } else {
+ LOG("Unable to retrieve channel layout. Ignoring");
+ mChannelsMap = AudioConfig::ChannelLayout::UNKNOWN_MAP;
+ }
+
+ return S_OK;
+}
+
+HRESULT
+WMFAudioMFTManager::Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutData) {
+ aOutData = nullptr;
+ RefPtr<IMFSample> sample;
+ HRESULT hr;
+ int typeChangeCount = 0;
+ const auto oldAudioRate = mAudioRate;
+ while (true) {
+ hr = mDecoder->Output(&sample);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ return hr;
+ }
+ if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ hr = mDecoder->FindDecoderOutputType();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ hr = UpdateOutputType();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ // Catch infinite loops, but some decoders perform at least 2 stream
+ // changes on consecutive calls, so be permissive.
+ // 100 is arbitrarily > 2.
+ NS_ENSURE_TRUE(typeChangeCount < 100, MF_E_TRANSFORM_STREAM_CHANGE);
+ ++typeChangeCount;
+ continue;
+ }
+ break;
+ }
+
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (!sample) {
+ LOG("Audio MFTDecoder returned success but null output.");
+ return E_FAIL;
+ }
+
+ UINT32 discontinuity = false;
+ sample->GetUINT32(MFSampleExtension_Discontinuity, &discontinuity);
+ if (mFirstFrame || discontinuity) {
+ // Update the output type, in case this segment has a different
+ // rate. This also triggers on the first sample, which can have a
+ // different rate than is advertised in the container, and sometimes we
+ // don't get a MF_E_TRANSFORM_STREAM_CHANGE when the rate changes.
+ hr = UpdateOutputType();
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ mFirstFrame = false;
+ }
+
+ LONGLONG hns;
+ hr = sample->GetSampleTime(&hns);
+ if (FAILED(hr)) {
+ return E_FAIL;
+ }
+ TimeUnit pts = TimeUnit::FromHns(hns, mAudioRate);
+ NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ hr = sample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ BYTE* data = nullptr; // Note: *data will be owned by the IMFMediaBuffer, we
+ // don't need to free it.
+ DWORD maxLength = 0, currentLength = 0;
+ hr = buffer->Lock(&data, &maxLength, &currentLength);
+ ScopeExit exit([buffer] { buffer->Unlock(); });
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // Output is made of floats.
+ int32_t numSamples = currentLength / sizeof(float);
+ int32_t numFrames = numSamples / mAudioChannels;
+ MOZ_ASSERT(numFrames >= 0);
+ MOZ_ASSERT(numSamples >= 0);
+ if (numFrames == 0) {
+ // All data from this chunk stripped, loop back and try to output the next
+ // frame, if possible.
+ return S_OK;
+ }
+
+ if (oldAudioRate != mAudioRate) {
+ LOG("Audio rate changed from %" PRIu32 " to %" PRIu32, oldAudioRate,
+ mAudioRate);
+ }
+
+ AlignedAudioBuffer audioData(numSamples);
+ if (!audioData) {
+ return E_OUTOFMEMORY;
+ }
+
+ float* floatData = reinterpret_cast<float*>(data);
+ PodCopy(audioData.Data(), floatData, numSamples);
+
+ TimeUnit duration(numFrames, mAudioRate);
+ NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
+
+ const bool isAudioRateChangedToHigher = oldAudioRate < mAudioRate;
+ if (IsPartialOutput(duration, isAudioRateChangedToHigher)) {
+ LOG("Encounter a partial frame?! duration shrinks from %s to %s",
+ mLastOutputDuration.ToString().get(), duration.ToString().get());
+ return MF_E_TRANSFORM_NEED_MORE_INPUT;
+ }
+
+ aOutData = new AudioData(aStreamOffset, pts, std::move(audioData),
+ mAudioChannels, mAudioRate, mChannelsMap);
+ MOZ_DIAGNOSTIC_ASSERT(duration == aOutData->mDuration, "must be equal");
+ mLastOutputDuration = aOutData->mDuration;
+
+#ifdef LOG_SAMPLE_DECODE
+ LOG("Decoded audio sample! timestamp=%lld duration=%lld currentLength=%u",
+ pts.ToMicroseconds(), duration.ToMicroseconds(), currentLength);
+#endif
+
+ return S_OK;
+}
+
+bool WMFAudioMFTManager::IsPartialOutput(
+ const media::TimeUnit& aNewOutputDuration,
+ const bool aIsRateChangedToHigher) const {
+ // This issue was found in Windows11, where AAC MFT decoder would incorrectly
+ // output partial output samples to us, even if MS's documentation said it
+ // won't happen [1]. More details are described in bug 1731430 comment 26.
+ // If the audio rate isn't changed to higher, which would result in shorter
+ // duration, but the new output duration is still shorter than the last one,
+ // then new output is possible an incorrect partial output.
+ // [1]
+ // https://docs.microsoft.com/en-us/windows/win32/medfound/mft-message-command-drain
+ if (mStreamType != WMFStreamType::AAC) {
+ return false;
+ }
+ if (mLastOutputDuration > aNewOutputDuration && !aIsRateChangedToHigher) {
+ return true;
+ }
+ return false;
+}
+
+void WMFAudioMFTManager::Shutdown() { mDecoder = nullptr; }
+
+} // namespace mozilla
+
+#undef LOG
diff --git a/dom/media/platforms/wmf/WMFAudioMFTManager.h b/dom/media/platforms/wmf/WMFAudioMFTManager.h
new file mode 100644
index 0000000000..b5dc379396
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFAudioMFTManager.h
@@ -0,0 +1,69 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(WMFAudioOutputSource_h_)
+# define WMFAudioOutputSource_h_
+
+# include "MFTDecoder.h"
+# include "WMF.h"
+# include "WMFDecoderModule.h"
+# include "WMFMediaDataDecoder.h"
+# include "mozilla/RefPtr.h"
+
+namespace mozilla {
+
+class WMFAudioMFTManager : public MFTManager {
+ public:
+ explicit WMFAudioMFTManager(const AudioInfo& aConfig);
+ ~WMFAudioMFTManager();
+
+ bool Init();
+
+ HRESULT Input(MediaRawData* aSample) override;
+
+ // Note WMF's AAC decoder sometimes output negatively timestamped samples,
+ // presumably they're the preroll samples, and we strip them. We may return
+ // a null aOutput in this case.
+ HRESULT Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutput) override;
+
+ void Shutdown() override;
+
+ TrackInfo::TrackType GetType() override { return TrackInfo::kAudioTrack; }
+
+ nsCString GetDescriptionName() const override {
+ return "wmf audio decoder"_ns;
+ }
+
+ nsCString GetCodecName() const override;
+
+ private:
+ HRESULT UpdateOutputType();
+
+ bool IsPartialOutput(const media::TimeUnit& aNewOutputDuration,
+ const bool aIsRateChangedToHigher) const;
+
+ uint32_t mAudioChannels;
+ AudioConfig::ChannelLayout::ChannelMap mChannelsMap;
+ uint32_t mAudioRate;
+ nsTArray<BYTE> mUserData;
+
+ WMFStreamType mStreamType;
+
+ const GUID& GetMediaSubtypeGUID();
+
+ media::TimeUnit mLastInputTime = media::TimeUnit::Zero();
+ media::TimeUnit mLastOutputDuration = media::TimeUnit::Zero();
+
+ bool mFirstFrame = true;
+
+ uint64_t mTotalMediaFrames = 0;
+ uint32_t mEncoderDelay = 0;
+ uint32_t mRemainingEncoderDelay = 0;
+};
+
+} // namespace mozilla
+
+#endif // WMFAudioOutputSource_h_
diff --git a/dom/media/platforms/wmf/WMFDataEncoderUtils.h b/dom/media/platforms/wmf/WMFDataEncoderUtils.h
new file mode 100644
index 0000000000..49221f7ae3
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFDataEncoderUtils.h
@@ -0,0 +1,165 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFMediaDataEncoder.h"
+
+#include "AnnexB.h"
+#include "H264.h"
+#include "libyuv.h"
+#include "mozilla/Logging.h"
+#include "mozilla/mscom/EnsureMTA.h"
+
+#define WMF_ENC_LOGD(arg, ...) \
+ MOZ_LOG( \
+ mozilla::sPEMLog, mozilla::LogLevel::Debug, \
+ ("WMFMediaDataEncoder(0x%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+#define WMF_ENC_LOGE(arg, ...) \
+ MOZ_LOG( \
+ mozilla::sPEMLog, mozilla::LogLevel::Error, \
+ ("WMFMediaDataEncoder(0x%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
+
+namespace mozilla {
+
+extern LazyLogModule sPEMLog;
+
+static const GUID CodecToSubtype(MediaDataEncoder::CodecType aCodec) {
+ switch (aCodec) {
+ case MediaDataEncoder::CodecType::H264:
+ return MFVideoFormat_H264;
+ case MediaDataEncoder::CodecType::VP8:
+ return MFVideoFormat_VP80;
+ case MediaDataEncoder::CodecType::VP9:
+ return MFVideoFormat_VP90;
+ default:
+ MOZ_ASSERT(false, "Unsupported codec");
+ return GUID_NULL;
+ }
+}
+
+bool CanCreateWMFEncoder(MediaDataEncoder::CodecType aCodec) {
+ bool canCreate = false;
+ mscom::EnsureMTA([&]() {
+ if (!wmf::MediaFoundationInitializer::HasInitialized()) {
+ return;
+ }
+ // Try HW encoder first.
+ auto enc = MakeRefPtr<MFTEncoder>(false /* HW not allowed */);
+ canCreate = SUCCEEDED(enc->Create(CodecToSubtype(aCodec)));
+ if (!canCreate) {
+ // Try SW encoder.
+ enc = MakeRefPtr<MFTEncoder>(true /* HW not allowed */);
+ canCreate = SUCCEEDED(enc->Create(CodecToSubtype(aCodec)));
+ }
+ });
+ return canCreate;
+}
+
+static already_AddRefed<MediaByteBuffer> ParseH264Parameters(
+ nsTArray<uint8_t>& aHeader, const bool aAsAnnexB) {
+ size_t length = aHeader.Length();
+ auto annexB = MakeRefPtr<MediaByteBuffer>(length);
+ PodCopy(annexB->Elements(), aHeader.Elements(), length);
+ annexB->SetLength(length);
+ if (aAsAnnexB) {
+ return annexB.forget();
+ }
+
+ // Convert to avcC.
+ nsTArray<AnnexB::NALEntry> paramSets;
+ AnnexB::ParseNALEntries(
+ Span<const uint8_t>(annexB->Elements(), annexB->Length()), paramSets);
+
+ auto avcc = MakeRefPtr<MediaByteBuffer>();
+ AnnexB::NALEntry& sps = paramSets.ElementAt(0);
+ AnnexB::NALEntry& pps = paramSets.ElementAt(1);
+ const uint8_t* spsPtr = annexB->Elements() + sps.mOffset;
+ H264::WriteExtraData(
+ avcc, spsPtr[1], spsPtr[2], spsPtr[3],
+ Span<const uint8_t>(spsPtr, sps.mSize),
+ Span<const uint8_t>(annexB->Elements() + pps.mOffset, pps.mSize));
+ return avcc.forget();
+}
+
+static uint32_t GetProfile(
+ MediaDataEncoder::H264Specific::ProfileLevel aProfileLevel) {
+ switch (aProfileLevel) {
+ case MediaDataEncoder::H264Specific::ProfileLevel::BaselineAutoLevel:
+ return eAVEncH264VProfile_Base;
+ case MediaDataEncoder::H264Specific::ProfileLevel::MainAutoLevel:
+ return eAVEncH264VProfile_Main;
+ default:
+ return eAVEncH264VProfile_unknown;
+ }
+}
+
+template <typename Config>
+HRESULT SetMediaTypes(RefPtr<MFTEncoder>& aEncoder, Config& aConfig) {
+ RefPtr<IMFMediaType> inputType = CreateInputType(aConfig);
+ if (!inputType) {
+ return E_FAIL;
+ }
+
+ RefPtr<IMFMediaType> outputType = CreateOutputType(aConfig);
+ if (!outputType) {
+ return E_FAIL;
+ }
+
+ return aEncoder->SetMediaTypes(inputType, outputType);
+}
+
+template <typename Config>
+already_AddRefed<IMFMediaType> CreateInputType(Config& aConfig) {
+ RefPtr<IMFMediaType> type;
+ return SUCCEEDED(wmf::MFCreateMediaType(getter_AddRefs(type))) &&
+ SUCCEEDED(
+ type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) &&
+ SUCCEEDED(type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_NV12)) &&
+ SUCCEEDED(type->SetUINT32(MF_MT_INTERLACE_MODE,
+ MFVideoInterlace_Progressive)) &&
+ SUCCEEDED(MFSetAttributeRatio(type, MF_MT_FRAME_RATE,
+ aConfig.mFramerate, 1)) &&
+ SUCCEEDED(MFSetAttributeSize(type, MF_MT_FRAME_SIZE,
+ aConfig.mSize.width,
+ aConfig.mSize.height))
+ ? type.forget()
+ : nullptr;
+}
+
+template <typename Config>
+already_AddRefed<IMFMediaType> CreateOutputType(Config& aConfig) {
+ RefPtr<IMFMediaType> type;
+ if (FAILED(wmf::MFCreateMediaType(getter_AddRefs(type))) ||
+ FAILED(type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) ||
+ FAILED(
+ type->SetGUID(MF_MT_SUBTYPE, CodecToSubtype(aConfig.mCodecType))) ||
+ FAILED(type->SetUINT32(MF_MT_AVG_BITRATE, aConfig.mBitsPerSec)) ||
+ FAILED(type->SetUINT32(MF_MT_INTERLACE_MODE,
+ MFVideoInterlace_Progressive)) ||
+ FAILED(
+ MFSetAttributeRatio(type, MF_MT_FRAME_RATE, aConfig.mFramerate, 1)) ||
+ FAILED(MFSetAttributeSize(type, MF_MT_FRAME_SIZE, aConfig.mSize.width,
+ aConfig.mSize.height))) {
+ return nullptr;
+ }
+ if (aConfig.mCodecSpecific &&
+ FAILED(SetCodecSpecific(type, aConfig.mCodecSpecific.ref()))) {
+ return nullptr;
+ }
+
+ return type.forget();
+}
+
+template <typename T>
+HRESULT SetCodecSpecific(IMFMediaType* aOutputType, const T& aSpecific) {
+ return S_OK;
+}
+
+template <>
+HRESULT SetCodecSpecific(IMFMediaType* aOutputType,
+ const MediaDataEncoder::H264Specific& aSpecific) {
+ return aOutputType->SetUINT32(MF_MT_MPEG2_PROFILE,
+ GetProfile(aSpecific.mProfileLevel));
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFDecoderModule.cpp b/dom/media/platforms/wmf/WMFDecoderModule.cpp
new file mode 100644
index 0000000000..5366071840
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFDecoderModule.cpp
@@ -0,0 +1,454 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFDecoderModule.h"
+
+#include <algorithm>
+#include <vector>
+
+#include "DriverCrashGuard.h"
+#include "GfxDriverInfo.h"
+#include "MFTDecoder.h"
+#include "MP4Decoder.h"
+#include "MediaInfo.h"
+#include "PDMFactory.h"
+#include "VPXDecoder.h"
+#include "WMFAudioMFTManager.h"
+#include "WMFMediaDataDecoder.h"
+#include "WMFVideoMFTManager.h"
+#include "mozilla/DebugOnly.h"
+#include "mozilla/Maybe.h"
+#include "mozilla/StaticMutex.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/SyncRunnable.h"
+#include "mozilla/WindowsVersion.h"
+#include "mozilla/gfx/gfxVars.h"
+#include "mozilla/mscom/EnsureMTA.h"
+#include "mozilla/ProfilerMarkers.h"
+#include "nsComponentManagerUtils.h"
+#include "nsIXULRuntime.h"
+#include "nsIXULRuntime.h" // for BrowserTabsRemoteAutostart
+#include "nsServiceManagerUtils.h"
+#include "nsWindowsHelpers.h"
+#include "prsystem.h"
+
+#ifdef MOZ_AV1
+# include "AOMDecoder.h"
+#endif
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+namespace mozilla {
+
+// Helper function to add a profile marker and log at the same time.
+static void MOZ_FORMAT_PRINTF(2, 3)
+ WmfDecoderModuleMarkerAndLog(const ProfilerString8View& aMarkerTag,
+ const char* aFormat, ...) {
+ va_list ap;
+ va_start(ap, aFormat);
+ const nsVprintfCString markerString(aFormat, ap);
+ va_end(ap);
+ PROFILER_MARKER_TEXT(aMarkerTag, MEDIA_PLAYBACK, {}, markerString);
+ LOG("%s", markerString.get());
+}
+
+static const GUID CLSID_CMSAACDecMFT = {
+ 0x32D186A7,
+ 0x218F,
+ 0x4C75,
+ {0x88, 0x76, 0xDD, 0x77, 0x27, 0x3A, 0x89, 0x99}};
+
+static Atomic<bool> sDXVAEnabled(false);
+
+/* static */
+already_AddRefed<PlatformDecoderModule> WMFDecoderModule::Create() {
+ RefPtr<WMFDecoderModule> wmf = new WMFDecoderModule();
+ return wmf.forget();
+}
+
+static bool IsRemoteAcceleratedCompositor(
+ layers::KnowsCompositor* aKnowsCompositor) {
+ if (!aKnowsCompositor) {
+ return false;
+ }
+
+ if (aKnowsCompositor->UsingSoftwareWebRenderD3D11()) {
+ return true;
+ }
+
+ layers::TextureFactoryIdentifier ident =
+ aKnowsCompositor->GetTextureFactoryIdentifier();
+ return !aKnowsCompositor->UsingSoftwareWebRender() &&
+ ident.mParentProcessType == GeckoProcessType_GPU;
+}
+
+static Atomic<bool> sSupportedTypesInitialized(false);
+static EnumSet<WMFStreamType> sSupportedTypes;
+
+/* static */
+void WMFDecoderModule::Init() {
+ MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
+ if (XRE_IsContentProcess()) {
+ // If we're in the content process and the UseGPUDecoder pref is set, it
+ // means that we've given up on the GPU process (it's been crashing) so we
+ // should disable DXVA
+ sDXVAEnabled = !StaticPrefs::media_gpu_process_decoder();
+ } else if (XRE_IsGPUProcess()) {
+ // Always allow DXVA in the GPU process.
+ sDXVAEnabled = true;
+ } else if (XRE_IsRDDProcess()) {
+ // Hardware accelerated decoding is explicitly only done in the GPU process
+ // to avoid copying textures whenever possible. Previously, detecting
+ // whether the video bridge was set up could be done with the following:
+ // sDXVAEnabled = !!DeviceManagerDx::Get()->GetImageDevice();
+ // The video bridge was previously broken due to initialization order
+ // issues. For more information see Bug 1763880.
+ sDXVAEnabled = false;
+ } else {
+ // Only allow DXVA in the UI process if we aren't in e10s Firefox
+ sDXVAEnabled = !mozilla::BrowserTabsRemoteAutostart();
+ }
+
+ // We have heavy logging below to help diagnose issue around hardware
+ // decoding failures. Due to these failures often relating to driver level
+ // problems they're hard to nail down, so we want lots of info. We may be
+ // able to relax this in future if we're not seeing such problems (see bug
+ // 1673007 for references to the bugs motivating this).
+ bool hwVideo = gfx::gfxVars::GetCanUseHardwareVideoDecodingOrDefault();
+ WmfDecoderModuleMarkerAndLog(
+ "WMFInit DXVA Status",
+ "sDXVAEnabled: %s, CanUseHardwareVideoDecoding: %s",
+ sDXVAEnabled ? "true" : "false", hwVideo ? "true" : "false");
+ sDXVAEnabled = sDXVAEnabled && hwVideo;
+
+ mozilla::mscom::EnsureMTA([&]() {
+ // Store the supported MFT decoders.
+ sSupportedTypes.clear();
+ // i = 1 to skip Unknown.
+ for (uint32_t i = 1; i < static_cast<uint32_t>(WMFStreamType::SENTINEL);
+ i++) {
+ WMFStreamType type = static_cast<WMFStreamType>(i);
+ RefPtr<MFTDecoder> decoder = new MFTDecoder();
+ HRESULT hr = CreateMFTDecoder(type, decoder);
+ if (SUCCEEDED(hr)) {
+ sSupportedTypes += type;
+ WmfDecoderModuleMarkerAndLog("WMFInit Decoder Supported",
+ "%s is enabled", StreamTypeToString(type));
+ } else if (hr != E_FAIL) {
+ // E_FAIL should be logged by CreateMFTDecoder. Skipping those codes
+ // will help to keep the logs readable.
+ WmfDecoderModuleMarkerAndLog("WMFInit Decoder Failed",
+ "%s failed with code 0x%lx",
+ StreamTypeToString(type), hr);
+ }
+ }
+ });
+
+ sSupportedTypesInitialized = true;
+
+ WmfDecoderModuleMarkerAndLog("WMFInit Result",
+ "WMFDecoderModule::Init finishing");
+}
+
+/* static */
+int WMFDecoderModule::GetNumDecoderThreads() {
+ int32_t numCores = PR_GetNumberOfProcessors();
+
+ // If we have more than 4 cores, let the decoder decide how many threads.
+ // On an 8 core machine, WMF chooses 4 decoder threads.
+ static const int WMF_DECODER_DEFAULT = -1;
+ if (numCores > 4) {
+ return WMF_DECODER_DEFAULT;
+ }
+ return std::max(numCores - 1, 1);
+}
+
+/* static */
+HRESULT WMFDecoderModule::CreateMFTDecoder(const WMFStreamType& aType,
+ RefPtr<MFTDecoder>& aDecoder) {
+ // Do not expose any video decoder on utility process which is only for audio
+ // decoding.
+ if (XRE_IsUtilityProcess()) {
+ switch (aType) {
+ case WMFStreamType::H264:
+ case WMFStreamType::VP8:
+ case WMFStreamType::VP9:
+ case WMFStreamType::AV1:
+ return E_FAIL;
+ default:
+ break;
+ }
+ }
+
+ switch (aType) {
+ case WMFStreamType::H264:
+ return aDecoder->Create(CLSID_CMSH264DecoderMFT);
+ case WMFStreamType::VP8:
+ static const uint32_t VP8_USABLE_BUILD = 16287;
+ if (!IsWindowsBuildOrLater(VP8_USABLE_BUILD)) {
+ WmfDecoderModuleMarkerAndLog("CreateMFTDecoder, VP8 Failure",
+ "VP8 MFT requires Windows build %" PRId32
+ " or later",
+ VP8_USABLE_BUILD);
+ return E_FAIL;
+ }
+ if (!gfx::gfxVars::UseVP8HwDecode()) {
+ WmfDecoderModuleMarkerAndLog("CreateMFTDecoder, VP8 Failure",
+ "Gfx VP8 blocklist");
+ return E_FAIL;
+ }
+ [[fallthrough]];
+ case WMFStreamType::VP9:
+ if (!sDXVAEnabled) {
+ WmfDecoderModuleMarkerAndLog("CreateMFTDecoder, VPx Disabled",
+ "%s MFT requires DXVA",
+ StreamTypeToString(aType));
+ return E_FAIL;
+ }
+
+ {
+ gfx::WMFVPXVideoCrashGuard guard;
+ if (guard.Crashed()) {
+ WmfDecoderModuleMarkerAndLog(
+ "CreateMFTDecoder, VPx Failure",
+ "Will not use VPx MFT due to crash guard reporting a crash");
+ return E_FAIL;
+ }
+ return aDecoder->Create(CLSID_CMSVPXDecMFT);
+ }
+#ifdef MOZ_AV1
+ case WMFStreamType::AV1:
+ // If this process cannot use DXVA, the AV1 decoder will not be used.
+ // Also, upon startup, init will be called both before and after
+ // layers acceleration is setup. This prevents creating the AV1 decoder
+ // twice.
+ if (!sDXVAEnabled) {
+ WmfDecoderModuleMarkerAndLog("CreateMFTDecoder AV1 Disabled",
+ "AV1 MFT requires DXVA");
+ return E_FAIL;
+ }
+ // TODO: MFTEnumEx is slower than creating by CLSID, it may be worth
+ // investigating other ways to instantiate the AV1 decoder.
+ return aDecoder->Create(MFT_CATEGORY_VIDEO_DECODER, MFVideoFormat_AV1,
+ MFVideoFormat_NV12);
+#endif
+ case WMFStreamType::MP3:
+ return aDecoder->Create(CLSID_CMP3DecMediaObject);
+ case WMFStreamType::AAC:
+ return aDecoder->Create(CLSID_CMSAACDecMFT);
+ default:
+ return E_FAIL;
+ }
+}
+
+/* static */
+bool WMFDecoderModule::CanCreateMFTDecoder(const WMFStreamType& aType) {
+ MOZ_ASSERT(WMFStreamType::Unknown < aType && aType < WMFStreamType::SENTINEL);
+ if (!sSupportedTypesInitialized) {
+ if (NS_IsMainThread()) {
+ Init();
+ } else {
+ nsCOMPtr<nsIRunnable> runnable =
+ NS_NewRunnableFunction("WMFDecoderModule::Init", [&]() { Init(); });
+ SyncRunnable::DispatchToThread(GetMainThreadSerialEventTarget(),
+ runnable);
+ }
+ }
+
+ // Check prefs here rather than CreateMFTDecoder so that prefs aren't baked
+ // into sSupportedTypes
+ switch (aType) {
+ case WMFStreamType::VP8:
+ case WMFStreamType::VP9:
+ if (!StaticPrefs::media_wmf_vp9_enabled()) {
+ return false;
+ }
+ break;
+#ifdef MOZ_AV1
+ case WMFStreamType::AV1:
+ if (!StaticPrefs::media_av1_enabled() ||
+ !StaticPrefs::media_wmf_av1_enabled()) {
+ return false;
+ }
+ break;
+#endif
+ case WMFStreamType::MP3:
+ // Prefer ffvpx mp3 decoder over WMF.
+ if (StaticPrefs::media_ffvpx_mp3_enabled()) {
+ return false;
+ }
+ break;
+ default:
+ break;
+ }
+
+ // Do not expose any video decoder on utility process which is only for audio
+ // decoding.
+ if (XRE_IsUtilityProcess()) {
+ switch (aType) {
+ case WMFStreamType::H264:
+ case WMFStreamType::VP8:
+ case WMFStreamType::VP9:
+ case WMFStreamType::AV1:
+ return false;
+ default:
+ break;
+ }
+ }
+
+ return sSupportedTypes.contains(aType);
+}
+
+bool WMFDecoderModule::SupportsColorDepth(
+ gfx::ColorDepth aColorDepth, DecoderDoctorDiagnostics* aDiagnostics) const {
+ // Color depth support can be determined by creating DX decoders.
+ return true;
+}
+
+media::DecodeSupportSet WMFDecoderModule::Supports(
+ const SupportDecoderParams& aParams,
+ DecoderDoctorDiagnostics* aDiagnostics) const {
+ // This should only be supported by MFMediaEngineDecoderModule.
+ if (aParams.mMediaEngineId) {
+ return media::DecodeSupport::Unsupported;
+ }
+ // In GPU process, only support decoding if video. This only gives a hint of
+ // what the GPU decoder *may* support. The actual check will occur in
+ // CreateVideoDecoder.
+ const auto& trackInfo = aParams.mConfig;
+ if (XRE_IsGPUProcess() && !trackInfo.GetAsVideoInfo()) {
+ return media::DecodeSupport::Unsupported;
+ }
+
+ const auto* videoInfo = trackInfo.GetAsVideoInfo();
+ // Temporary - forces use of VPXDecoder when alpha is present.
+ // Bug 1263836 will handle alpha scenario once implemented. It will shift
+ // the check for alpha to PDMFactory but not itself remove the need for a
+ // check.
+ if (videoInfo && (!SupportsColorDepth(videoInfo->mColorDepth, aDiagnostics) ||
+ videoInfo->HasAlpha())) {
+ return media::DecodeSupport::Unsupported;
+ }
+
+ WMFStreamType type = GetStreamTypeFromMimeType(aParams.MimeType());
+ if (type == WMFStreamType::Unknown) {
+ return media::DecodeSupport::Unsupported;
+ }
+
+ if (CanCreateMFTDecoder(type)) {
+ if (StreamTypeIsVideo(type)) {
+ return sDXVAEnabled ? media::DecodeSupport::HardwareDecode
+ : media::DecodeSupport::SoftwareDecode;
+ } else {
+ // Audio only supports software decode
+ return media::DecodeSupport::SoftwareDecode;
+ }
+ }
+
+ return media::DecodeSupport::Unsupported;
+}
+
+nsresult WMFDecoderModule::Startup() {
+ return wmf::MediaFoundationInitializer::HasInitialized() ? NS_OK
+ : NS_ERROR_FAILURE;
+}
+
+already_AddRefed<MediaDataDecoder> WMFDecoderModule::CreateVideoDecoder(
+ const CreateDecoderParams& aParams) {
+ // In GPU process, only support decoding if an accelerated compositor is
+ // known.
+ if (XRE_IsGPUProcess() &&
+ !IsRemoteAcceleratedCompositor(aParams.mKnowsCompositor)) {
+ return nullptr;
+ }
+
+ UniquePtr<WMFVideoMFTManager> manager(new WMFVideoMFTManager(
+ aParams.VideoConfig(), aParams.mKnowsCompositor, aParams.mImageContainer,
+ aParams.mRate.mValue, aParams.mOptions, sDXVAEnabled,
+ aParams.mTrackingId));
+
+ MediaResult result = manager->Init();
+ if (NS_FAILED(result)) {
+ if (aParams.mError) {
+ *aParams.mError = result;
+ }
+ WmfDecoderModuleMarkerAndLog(
+ "WMFVDecoderCreation Failure",
+ "WMFDecoderModule::CreateVideoDecoder failed for manager with "
+ "description %s with result: %s",
+ manager->GetDescriptionName().get(), result.Description().get());
+ return nullptr;
+ }
+
+ nsAutoCString hwFailure;
+ if (!manager->IsHardwareAccelerated(hwFailure)) {
+ // The decoder description includes whether it is using software or
+ // hardware, but no information about how the hardware acceleration failed.
+ WmfDecoderModuleMarkerAndLog(
+ "WMFVDecoderCreation Success",
+ "WMFDecoderModule::CreateVideoDecoder success for manager with "
+ "description %s - DXVA failure: %s",
+ manager->GetDescriptionName().get(), hwFailure.get());
+ } else {
+ WmfDecoderModuleMarkerAndLog(
+ "WMFVDecoderCreation Success",
+ "WMFDecoderModule::CreateVideoDecoder success for manager with "
+ "description %s",
+ manager->GetDescriptionName().get());
+ }
+
+ RefPtr<MediaDataDecoder> decoder = new WMFMediaDataDecoder(manager.release());
+ return decoder.forget();
+}
+
+already_AddRefed<MediaDataDecoder> WMFDecoderModule::CreateAudioDecoder(
+ const CreateDecoderParams& aParams) {
+ if (XRE_IsGPUProcess()) {
+ // Only allow video in the GPU process.
+ return nullptr;
+ }
+
+ UniquePtr<WMFAudioMFTManager> manager(
+ new WMFAudioMFTManager(aParams.AudioConfig()));
+
+ if (!manager->Init()) {
+ WmfDecoderModuleMarkerAndLog(
+ "WMFADecoderCreation Failure",
+ "WMFDecoderModule::CreateAudioDecoder failed for manager with "
+ "description %s",
+ manager->GetDescriptionName().get());
+ return nullptr;
+ }
+
+ WmfDecoderModuleMarkerAndLog(
+ "WMFADecoderCreation Success",
+ "WMFDecoderModule::CreateAudioDecoder success for manager with "
+ "description %s",
+ manager->GetDescriptionName().get());
+
+ RefPtr<MediaDataDecoder> decoder = new WMFMediaDataDecoder(manager.release());
+ return decoder.forget();
+}
+
+media::DecodeSupportSet WMFDecoderModule::SupportsMimeType(
+ const nsACString& aMimeType, DecoderDoctorDiagnostics* aDiagnostics) const {
+ UniquePtr<TrackInfo> trackInfo = CreateTrackInfoWithMIMEType(aMimeType);
+ if (!trackInfo) {
+ return media::DecodeSupport::Unsupported;
+ }
+ auto supports = Supports(SupportDecoderParams(*trackInfo), aDiagnostics);
+ MOZ_LOG(
+ sPDMLog, LogLevel::Debug,
+ ("WMF decoder %s requested type '%s'",
+ supports != media::DecodeSupport::Unsupported ? "supports" : "rejects",
+ aMimeType.BeginReading()));
+ return supports;
+}
+
+} // namespace mozilla
+
+#undef WFM_DECODER_MODULE_STATUS_MARKER
+#undef LOG
diff --git a/dom/media/platforms/wmf/WMFDecoderModule.h b/dom/media/platforms/wmf/WMFDecoderModule.h
new file mode 100644
index 0000000000..3198860511
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFDecoderModule.h
@@ -0,0 +1,58 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(WMFPlatformDecoderModule_h_)
+# define WMFPlatformDecoderModule_h_
+
+# include "PlatformDecoderModule.h"
+# include "WMF.h"
+# include "WMFUtils.h"
+
+namespace mozilla {
+
+class MFTDecoder;
+
+class WMFDecoderModule : public PlatformDecoderModule {
+ public:
+ static already_AddRefed<PlatformDecoderModule> Create();
+
+ // Initializes the module, loads required dynamic libraries, etc.
+ nsresult Startup() override;
+
+ already_AddRefed<MediaDataDecoder> CreateVideoDecoder(
+ const CreateDecoderParams& aParams) override;
+
+ already_AddRefed<MediaDataDecoder> CreateAudioDecoder(
+ const CreateDecoderParams& aParams) override;
+
+ bool SupportsColorDepth(
+ gfx::ColorDepth aColorDepth,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+ media::DecodeSupportSet SupportsMimeType(
+ const nsACString& aMimeType,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+ media::DecodeSupportSet Supports(
+ const SupportDecoderParams& aParams,
+ DecoderDoctorDiagnostics* aDiagnostics) const override;
+
+ // Called on main thread.
+ static void Init();
+
+ // Called from any thread, must call init first
+ static int GetNumDecoderThreads();
+
+ static HRESULT CreateMFTDecoder(const WMFStreamType& aType,
+ RefPtr<MFTDecoder>& aDecoder);
+ static bool CanCreateMFTDecoder(const WMFStreamType& aType);
+
+ private:
+ WMFDecoderModule() = default;
+ virtual ~WMFDecoderModule() = default;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/wmf/WMFEncoderModule.cpp b/dom/media/platforms/wmf/WMFEncoderModule.cpp
new file mode 100644
index 0000000000..0f8e432390
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFEncoderModule.cpp
@@ -0,0 +1,43 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFEncoderModule.h"
+
+#include "WMFMediaDataEncoder.h"
+
+namespace mozilla {
+extern LazyLogModule sPEMLog;
+
+bool WMFEncoderModule::SupportsMimeType(const nsACString& aMimeType) const {
+ return CanCreateWMFEncoder(CreateEncoderParams::CodecTypeForMime(aMimeType));
+}
+
+already_AddRefed<MediaDataEncoder> WMFEncoderModule::CreateVideoEncoder(
+ const CreateEncoderParams& aParams, const bool aHardwareNotAllowed) const {
+ MediaDataEncoder::CodecType codec =
+ CreateEncoderParams::CodecTypeForMime(aParams.mConfig.mMimeType);
+ RefPtr<MediaDataEncoder> encoder;
+ switch (codec) {
+ case MediaDataEncoder::CodecType::H264:
+ encoder = new WMFMediaDataEncoder<MediaDataEncoder::H264Config>(
+ aParams.ToH264Config(), aParams.mTaskQueue, aHardwareNotAllowed);
+ break;
+ case MediaDataEncoder::CodecType::VP8:
+ encoder = new WMFMediaDataEncoder<MediaDataEncoder::VP8Config>(
+ aParams.ToVP8Config(), aParams.mTaskQueue, aHardwareNotAllowed);
+ break;
+ case MediaDataEncoder::CodecType::VP9:
+ encoder = new WMFMediaDataEncoder<MediaDataEncoder::VP9Config>(
+ aParams.ToVP9Config(), aParams.mTaskQueue, aHardwareNotAllowed);
+ break;
+ default:
+ // Do nothing.
+ break;
+ }
+ return encoder.forget();
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFEncoderModule.h b/dom/media/platforms/wmf/WMFEncoderModule.h
new file mode 100644
index 0000000000..6d02a3af96
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFEncoderModule.h
@@ -0,0 +1,24 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WMFEncoderModule_h_
+#define WMFEncoderModule_h_
+
+#include "PlatformEncoderModule.h"
+
+namespace mozilla {
+class WMFEncoderModule final : public PlatformEncoderModule {
+ public:
+ bool SupportsMimeType(const nsACString& aMimeType) const override;
+
+ already_AddRefed<MediaDataEncoder> CreateVideoEncoder(
+ const CreateEncoderParams& aParams,
+ const bool aHardwareNotAllowed) const override;
+};
+
+} // namespace mozilla
+
+#endif /* WMFEncoderModule_h_ */
diff --git a/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp b/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp
new file mode 100644
index 0000000000..73589d02c2
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFMediaDataDecoder.cpp
@@ -0,0 +1,279 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFMediaDataDecoder.h"
+
+#include "VideoUtils.h"
+#include "WMFUtils.h"
+#include "mozilla/Logging.h"
+#include "mozilla/ProfilerMarkers.h"
+#include "mozilla/SyncRunnable.h"
+#include "mozilla/TaskQueue.h"
+#include "mozilla/Telemetry.h"
+#include "mozilla/WindowsVersion.h"
+#include "nsTArray.h"
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+namespace mozilla {
+
+WMFMediaDataDecoder::WMFMediaDataDecoder(MFTManager* aMFTManager)
+ : mTaskQueue(TaskQueue::Create(
+ GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER),
+ "WMFMediaDataDecoder")),
+ mMFTManager(aMFTManager) {}
+
+WMFMediaDataDecoder::~WMFMediaDataDecoder() {}
+
+RefPtr<MediaDataDecoder::InitPromise> WMFMediaDataDecoder::Init() {
+ MOZ_ASSERT(!mIsShutDown);
+ return InitPromise::CreateAndResolve(mMFTManager->GetType(), __func__);
+}
+
+RefPtr<ShutdownPromise> WMFMediaDataDecoder::Shutdown() {
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+ mIsShutDown = true;
+
+ return InvokeAsync(mTaskQueue, __func__, [self = RefPtr{this}, this] {
+ if (mMFTManager) {
+ mMFTManager->Shutdown();
+ mMFTManager = nullptr;
+ }
+ return mTaskQueue->BeginShutdown();
+ });
+}
+
+// Inserts data into the decoder's pipeline.
+RefPtr<MediaDataDecoder::DecodePromise> WMFMediaDataDecoder::Decode(
+ MediaRawData* aSample) {
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ return InvokeAsync<MediaRawData*>(
+ mTaskQueue, this, __func__, &WMFMediaDataDecoder::ProcessDecode, aSample);
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> WMFMediaDataDecoder::ProcessError(
+ HRESULT aError, const char* aReason) {
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+
+ nsPrintfCString markerString(
+ "WMFMediaDataDecoder::ProcessError for decoder with description %s with "
+ "reason: %s",
+ GetDescriptionName().get(), aReason);
+ LOG("%s", markerString.get());
+ PROFILER_MARKER_TEXT("WMFDecoder Error", MEDIA_PLAYBACK, {}, markerString);
+
+ // TODO: For the error DXGI_ERROR_DEVICE_RESET, we could return
+ // NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER to get the latest device. Maybe retry
+ // up to 3 times.
+ return DecodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
+ RESULT_DETAIL("%s:%lx", aReason, aError)),
+ __func__);
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> WMFMediaDataDecoder::ProcessDecode(
+ MediaRawData* aSample) {
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+ DecodedData results;
+ LOG("ProcessDecode, type=%s, sample=%" PRId64,
+ TrackTypeToStr(mMFTManager->GetType()), aSample->mTime.ToMicroseconds());
+ HRESULT hr = mMFTManager->Input(aSample);
+ if (hr == MF_E_NOTACCEPTING) {
+ hr = ProcessOutput(results);
+ if (FAILED(hr) && hr != MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ return ProcessError(hr, "MFTManager::Output(1)");
+ }
+ hr = mMFTManager->Input(aSample);
+ }
+
+ if (FAILED(hr)) {
+ NS_WARNING("MFTManager rejected sample");
+ return ProcessError(hr, "MFTManager::Input");
+ }
+
+ if (mOutputsCount == 0) {
+ mInputTimesSet.insert(aSample->mTime.ToMicroseconds());
+ }
+
+ if (!mLastTime || aSample->mTime > *mLastTime) {
+ mLastTime = Some(aSample->mTime);
+ mLastDuration = aSample->mDuration;
+ }
+
+ mSamplesCount++;
+ mDrainStatus = DrainStatus::DRAINABLE;
+ mLastStreamOffset = aSample->mOffset;
+
+ hr = ProcessOutput(results);
+ if (SUCCEEDED(hr) || hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ return DecodePromise::CreateAndResolve(std::move(results), __func__);
+ }
+ return ProcessError(hr, "MFTManager::Output(2)");
+}
+
+bool WMFMediaDataDecoder::ShouldGuardAgaintIncorrectFirstSample(
+ MediaData* aOutput) const {
+ // Incorrect first samples have only been observed in video tracks, so only
+ // guard video tracks.
+ if (mMFTManager->GetType() != TrackInfo::kVideoTrack) {
+ return false;
+ }
+
+ // By observation so far this issue only happens on Windows 10 so we don't
+ // need to enable this on other versions.
+ if (!IsWin10OrLater()) {
+ return false;
+ }
+
+ // This is not the first output sample so we don't need to guard it.
+ if (mOutputsCount != 0) {
+ return false;
+ }
+
+ // Output isn't in the map which contains the inputs we gave to the decoder.
+ // This is probably the invalid first sample. MFT decoder sometime will return
+ // incorrect first output to us, which always has 0 timestamp, even if the
+ // input we gave to MFT has timestamp that is way later than 0.
+ MOZ_ASSERT(!mInputTimesSet.empty());
+ return mInputTimesSet.find(aOutput->mTime.ToMicroseconds()) ==
+ mInputTimesSet.end() &&
+ aOutput->mTime.ToMicroseconds() == 0;
+}
+
+HRESULT
+WMFMediaDataDecoder::ProcessOutput(DecodedData& aResults) {
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+ RefPtr<MediaData> output;
+ HRESULT hr = S_OK;
+ while (SUCCEEDED(hr = mMFTManager->Output(mLastStreamOffset, output))) {
+ MOZ_ASSERT(output.get(), "Upon success, we must receive an output");
+ if (ShouldGuardAgaintIncorrectFirstSample(output)) {
+ LOG("Discarding sample with time %" PRId64
+ " because of ShouldGuardAgaintIncorrectFirstSample check",
+ output->mTime.ToMicroseconds());
+ continue;
+ }
+ if (++mOutputsCount == 1) {
+ // Got first valid sample, don't need to guard following sample anymore.
+ mInputTimesSet.clear();
+ }
+ aResults.AppendElement(std::move(output));
+ if (mDrainStatus == DrainStatus::DRAINING) {
+ break;
+ }
+ }
+ return hr;
+}
+
+RefPtr<MediaDataDecoder::FlushPromise> WMFMediaDataDecoder::ProcessFlush() {
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+ if (mMFTManager) {
+ mMFTManager->Flush();
+ }
+ LOG("ProcessFlush, type=%s", TrackTypeToStr(mMFTManager->GetType()));
+ mDrainStatus = DrainStatus::DRAINED;
+ mSamplesCount = 0;
+ mOutputsCount = 0;
+ mLastTime.reset();
+ mInputTimesSet.clear();
+ return FlushPromise::CreateAndResolve(true, __func__);
+}
+
+RefPtr<MediaDataDecoder::FlushPromise> WMFMediaDataDecoder::Flush() {
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ return InvokeAsync(mTaskQueue, this, __func__,
+ &WMFMediaDataDecoder::ProcessFlush);
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> WMFMediaDataDecoder::ProcessDrain() {
+ MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
+ if (!mMFTManager || mDrainStatus == DrainStatus::DRAINED) {
+ return DecodePromise::CreateAndResolve(DecodedData(), __func__);
+ }
+
+ if (mDrainStatus != DrainStatus::DRAINING) {
+ // Order the decoder to drain...
+ mMFTManager->Drain();
+ mDrainStatus = DrainStatus::DRAINING;
+ }
+
+ // Then extract all available output.
+ DecodedData results;
+ HRESULT hr = ProcessOutput(results);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ mDrainStatus = DrainStatus::DRAINED;
+ }
+ if (SUCCEEDED(hr) || hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ if (results.Length() > 0 &&
+ results.LastElement()->mType == MediaData::Type::VIDEO_DATA) {
+ const RefPtr<MediaData>& data = results.LastElement();
+ if (mSamplesCount == 1 && data->mTime == media::TimeUnit::Zero()) {
+ // WMF is unable to calculate a duration if only a single sample
+ // was parsed. Additionally, the pts always comes out at 0 under those
+ // circumstances.
+ // Seeing that we've only fed the decoder a single frame, the pts
+ // and duration are known, it's of the last sample.
+ data->mTime = *mLastTime;
+ }
+ if (data->mTime == *mLastTime) {
+ // The WMF Video decoder is sometimes unable to provide a valid duration
+ // on the last sample even if it has been first set through
+ // SetSampleTime (appears to always happen on Windows 7). So we force
+ // set the duration of the last sample as it was input.
+ data->mDuration = mLastDuration;
+ }
+ } else if (results.Length() == 1 &&
+ results.LastElement()->mType == MediaData::Type::AUDIO_DATA) {
+ // When we drain the audio decoder and one frame was queued (such as with
+ // AAC) the MFT will re-calculate the starting time rather than use the
+ // value set on the IMF Sample.
+ // This is normally an okay thing to do; however when dealing with poorly
+ // muxed content that has incorrect start time, it could lead to broken
+ // A/V sync. So we ensure that we use the compressed sample's time
+ // instead. Additionally, this is what all other audio decoders are doing
+ // anyway.
+ MOZ_ASSERT(mLastTime,
+ "We must have attempted to decode at least one frame to get "
+ "one decoded output");
+ results.LastElement()->As<AudioData>()->SetOriginalStartTime(*mLastTime);
+ }
+ return DecodePromise::CreateAndResolve(std::move(results), __func__);
+ }
+ return ProcessError(hr, "MFTManager::Output");
+}
+
+RefPtr<MediaDataDecoder::DecodePromise> WMFMediaDataDecoder::Drain() {
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ return InvokeAsync(mTaskQueue, this, __func__,
+ &WMFMediaDataDecoder::ProcessDrain);
+}
+
+bool WMFMediaDataDecoder::IsHardwareAccelerated(
+ nsACString& aFailureReason) const {
+ MOZ_ASSERT(!mIsShutDown);
+
+ return mMFTManager && mMFTManager->IsHardwareAccelerated(aFailureReason);
+}
+
+void WMFMediaDataDecoder::SetSeekThreshold(const media::TimeUnit& aTime) {
+ MOZ_DIAGNOSTIC_ASSERT(!mIsShutDown);
+
+ RefPtr<WMFMediaDataDecoder> self = this;
+ nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
+ "WMFMediaDataDecoder::SetSeekThreshold", [self, aTime]() {
+ MOZ_ASSERT(self->mTaskQueue->IsCurrentThreadIn());
+ media::TimeUnit threshold = aTime;
+ self->mMFTManager->SetSeekThreshold(threshold);
+ });
+ nsresult rv = mTaskQueue->Dispatch(runnable.forget());
+ MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
+ Unused << rv;
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFMediaDataDecoder.h b/dom/media/platforms/wmf/WMFMediaDataDecoder.h
new file mode 100644
index 0000000000..b344ba7b65
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFMediaDataDecoder.h
@@ -0,0 +1,182 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(WMFMediaDataDecoder_h_)
+# define WMFMediaDataDecoder_h_
+
+# include <set>
+
+# include "MFTDecoder.h"
+# include "PlatformDecoderModule.h"
+# include "WMF.h"
+# include "mozilla/RefPtr.h"
+
+namespace mozilla {
+
+// Encapsulates the initialization of the MFTDecoder appropriate for decoding
+// a given stream, and the process of converting the IMFSample produced
+// by the MFT into a MediaData object.
+class MFTManager {
+ public:
+ virtual ~MFTManager() {}
+
+ // Submit a compressed sample for decoding.
+ // This should forward to the MFTDecoder after performing
+ // any required sample formatting.
+ virtual HRESULT Input(MediaRawData* aSample) = 0;
+
+ // Produces decoded output, if possible. Blocks until output can be produced,
+ // or until no more is able to be produced.
+ // Returns S_OK on success, or MF_E_TRANSFORM_NEED_MORE_INPUT if there's not
+ // enough data to produce more output. If this returns a failure code other
+ // than MF_E_TRANSFORM_NEED_MORE_INPUT, an error will be reported to the
+ // MP4Reader.
+ virtual HRESULT Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutput) = 0;
+
+ virtual void Flush() {
+ mDecoder->Flush();
+ mSeekTargetThreshold.reset();
+ }
+
+ void Drain() {
+ if (FAILED(mDecoder->SendMFTMessage(MFT_MESSAGE_COMMAND_DRAIN, 0))) {
+ NS_WARNING("Failed to send DRAIN command to MFT");
+ }
+ }
+
+ // Destroys all resources.
+ virtual void Shutdown() = 0;
+
+ virtual bool IsHardwareAccelerated(nsACString& aFailureReason) const {
+ return false;
+ }
+
+ virtual TrackInfo::TrackType GetType() = 0;
+
+ virtual nsCString GetDescriptionName() const = 0;
+
+ virtual nsCString GetCodecName() const = 0;
+
+ virtual void SetSeekThreshold(const media::TimeUnit& aTime) {
+ if (aTime.IsValid()) {
+ mSeekTargetThreshold = Some(aTime);
+ } else {
+ mSeekTargetThreshold.reset();
+ }
+ }
+
+ virtual bool HasSeekThreshold() const {
+ return mSeekTargetThreshold.isSome();
+ }
+
+ virtual MediaDataDecoder::ConversionRequired NeedsConversion() const {
+ return MediaDataDecoder::ConversionRequired::kNeedNone;
+ }
+
+ protected:
+ // IMFTransform wrapper that performs the decoding.
+ RefPtr<MFTDecoder> mDecoder;
+
+ Maybe<media::TimeUnit> mSeekTargetThreshold;
+};
+
+DDLoggedTypeDeclNameAndBase(WMFMediaDataDecoder, MediaDataDecoder);
+
+// Decodes audio and video using Windows Media Foundation. Samples are decoded
+// using the MFTDecoder created by the MFTManager. This class implements
+// the higher-level logic that drives mapping the MFT to the async
+// MediaDataDecoder interface. The specifics of decoding the exact stream
+// type are handled by MFTManager and the MFTDecoder it creates.
+class WMFMediaDataDecoder final
+ : public MediaDataDecoder,
+ public DecoderDoctorLifeLogger<WMFMediaDataDecoder> {
+ public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WMFMediaDataDecoder, final);
+
+ explicit WMFMediaDataDecoder(MFTManager* aOutputSource);
+
+ RefPtr<MediaDataDecoder::InitPromise> Init() override;
+
+ RefPtr<DecodePromise> Decode(MediaRawData* aSample) override;
+
+ RefPtr<DecodePromise> Drain() override;
+
+ RefPtr<FlushPromise> Flush() override;
+
+ RefPtr<ShutdownPromise> Shutdown() override;
+
+ bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
+
+ nsCString GetDescriptionName() const override {
+ return mMFTManager ? mMFTManager->GetDescriptionName() : "unknown"_ns;
+ }
+
+ nsCString GetCodecName() const override {
+ return mMFTManager ? mMFTManager->GetCodecName() : ""_ns;
+ }
+
+ ConversionRequired NeedsConversion() const override {
+ MOZ_ASSERT(mMFTManager);
+ return mMFTManager->NeedsConversion();
+ }
+
+ virtual void SetSeekThreshold(const media::TimeUnit& aTime) override;
+
+ private:
+ ~WMFMediaDataDecoder();
+
+ RefPtr<DecodePromise> ProcessError(HRESULT aError, const char* aReason);
+
+ // Called on the task queue. Inserts the sample into the decoder, and
+ // extracts output if available.
+ RefPtr<DecodePromise> ProcessDecode(MediaRawData* aSample);
+
+ // Called on the task queue. Extracts output if available, and delivers
+ // it to the reader. Called after ProcessDecode() and ProcessDrain().
+ HRESULT ProcessOutput(DecodedData& aResults);
+
+ // Called on the task queue. Orders the MFT to flush. There is no output to
+ // extract.
+ RefPtr<FlushPromise> ProcessFlush();
+
+ // Called on the task queue. Orders the MFT to drain, and then extracts
+ // all available output.
+ RefPtr<DecodePromise> ProcessDrain();
+
+ // Checks if `aOutput` should be discarded (guarded against) because its a
+ // potentially invalid output from the decoder. This is done because the
+ // Windows decoder appears to produce invalid outputs under certain
+ // conditions.
+ bool ShouldGuardAgaintIncorrectFirstSample(MediaData* aOutput) const;
+
+ const RefPtr<TaskQueue> mTaskQueue;
+
+ UniquePtr<MFTManager> mMFTManager;
+
+ // The last offset into the media resource that was passed into Input().
+ // This is used to approximate the decoder's position in the media resource.
+ int64_t mLastStreamOffset;
+ Maybe<media::TimeUnit> mLastTime;
+ media::TimeUnit mLastDuration;
+ // Before we get the first sample, this records the times of all samples we
+ // send to the decoder which is used to validate if the first sample is valid.
+ std::set<int64_t> mInputTimesSet;
+ int64_t mSamplesCount = 0;
+ int64_t mOutputsCount = 0;
+
+ bool mIsShutDown = false;
+
+ enum class DrainStatus {
+ DRAINED,
+ DRAINABLE,
+ DRAINING,
+ };
+ DrainStatus mDrainStatus = DrainStatus::DRAINED;
+};
+
+} // namespace mozilla
+
+#endif // WMFMediaDataDecoder_h_
diff --git a/dom/media/platforms/wmf/WMFMediaDataEncoder.h b/dom/media/platforms/wmf/WMFMediaDataEncoder.h
new file mode 100644
index 0000000000..a0cc1dd1a8
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFMediaDataEncoder.h
@@ -0,0 +1,337 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WMFMediaDataEncoder_h_
+#define WMFMediaDataEncoder_h_
+
+#include "ImageContainer.h"
+#include "MFTEncoder.h"
+#include "PlatformEncoderModule.h"
+#include "TimeUnits.h"
+#include "WMFDataEncoderUtils.h"
+#include "WMFUtils.h"
+
+namespace mozilla {
+
+template <typename ConfigType>
+class WMFMediaDataEncoder final : public MediaDataEncoder {
+ public:
+ WMFMediaDataEncoder(const ConfigType& aConfig, RefPtr<TaskQueue> aTaskQueue,
+ const bool aHardwareNotAllowed)
+ : mConfig(aConfig),
+ mTaskQueue(aTaskQueue),
+ mHardwareNotAllowed(aHardwareNotAllowed) {
+ MOZ_ASSERT(mTaskQueue);
+ }
+
+ RefPtr<InitPromise> Init() override {
+ return InvokeAsync(mTaskQueue, this, __func__,
+ &WMFMediaDataEncoder<ConfigType>::ProcessInit);
+ }
+ RefPtr<EncodePromise> Encode(const MediaData* aSample) override {
+ MOZ_ASSERT(aSample);
+
+ RefPtr<const VideoData> sample(aSample->As<const VideoData>());
+
+ return InvokeAsync<RefPtr<const VideoData>>(
+ mTaskQueue, this, __func__, &WMFMediaDataEncoder::ProcessEncode,
+ std::move(sample));
+ }
+ RefPtr<EncodePromise> Drain() override {
+ return InvokeAsync(
+ mTaskQueue, __func__, [self = RefPtr<WMFMediaDataEncoder>(this)]() {
+ nsTArray<RefPtr<IMFSample>> outputs;
+ return SUCCEEDED(self->mEncoder->Drain(outputs))
+ ? self->ProcessOutputSamples(outputs)
+ : EncodePromise::CreateAndReject(
+ NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
+ });
+ }
+ RefPtr<ShutdownPromise> Shutdown() override {
+ return InvokeAsync(
+ mTaskQueue, __func__, [self = RefPtr<WMFMediaDataEncoder>(this)]() {
+ if (self->mEncoder) {
+ self->mEncoder->Destroy();
+ self->mEncoder = nullptr;
+ }
+ return ShutdownPromise::CreateAndResolve(true, __func__);
+ });
+ }
+ RefPtr<GenericPromise> SetBitrate(Rate aBitsPerSec) override {
+ return InvokeAsync(
+ mTaskQueue, __func__,
+ [self = RefPtr<WMFMediaDataEncoder>(this), aBitsPerSec]() {
+ MOZ_ASSERT(self->mEncoder);
+ return SUCCEEDED(self->mEncoder->SetBitrate(aBitsPerSec))
+ ? GenericPromise::CreateAndResolve(true, __func__)
+ : GenericPromise::CreateAndReject(
+ NS_ERROR_DOM_MEDIA_NOT_SUPPORTED_ERR, __func__);
+ });
+ }
+
+ nsCString GetDescriptionName() const override {
+ return MFTEncoder::GetFriendlyName(CodecToSubtype(mConfig.mCodecType));
+ }
+
+ private:
+ // Automatically lock/unlock IMFMediaBuffer.
+ class LockBuffer final {
+ public:
+ explicit LockBuffer(RefPtr<IMFMediaBuffer>& aBuffer) : mBuffer(aBuffer) {
+ mResult = mBuffer->Lock(&mBytes, &mCapacity, &mLength);
+ }
+
+ ~LockBuffer() {
+ if (SUCCEEDED(mResult)) {
+ mBuffer->Unlock();
+ }
+ }
+
+ BYTE* Data() { return mBytes; }
+ DWORD Capacity() { return mCapacity; }
+ DWORD Length() { return mLength; }
+ HRESULT Result() { return mResult; }
+
+ private:
+ RefPtr<IMFMediaBuffer> mBuffer;
+ BYTE* mBytes;
+ DWORD mCapacity;
+ DWORD mLength;
+ HRESULT mResult;
+ };
+
+ RefPtr<InitPromise> ProcessInit() {
+ AssertOnTaskQueue();
+
+ MOZ_ASSERT(!mEncoder,
+ "Should not initialize encoder again without shutting down");
+
+ if (!wmf::MediaFoundationInitializer::HasInitialized()) {
+ return InitPromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Can't create the MFT encoder.")),
+ __func__);
+ }
+
+ RefPtr<MFTEncoder> encoder = new MFTEncoder(mHardwareNotAllowed);
+ HRESULT hr;
+ mscom::EnsureMTA([&]() { hr = InitMFTEncoder(encoder); });
+
+ if (FAILED(hr)) {
+ WMF_ENC_LOGE("init MFTEncoder: error = 0x%lX", hr);
+ return InitPromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Can't create the MFT encoder.")),
+ __func__);
+ }
+
+ mEncoder = std::move(encoder);
+ FillConfigData();
+ return InitPromise::CreateAndResolve(TrackInfo::TrackType::kVideoTrack,
+ __func__);
+ }
+
+ HRESULT InitMFTEncoder(RefPtr<MFTEncoder>& aEncoder) {
+ HRESULT hr = aEncoder->Create(CodecToSubtype(mConfig.mCodecType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = SetMediaTypes(aEncoder, mConfig);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = aEncoder->SetModes(mConfig.mBitsPerSec);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return S_OK;
+ }
+
+ void FillConfigData() {
+ nsTArray<UINT8> header;
+ NS_ENSURE_TRUE_VOID(SUCCEEDED(mEncoder->GetMPEGSequenceHeader(header)));
+
+ mConfigData =
+ header.Length() > 0
+ ? ParseH264Parameters(header, mConfig.mUsage == Usage::Realtime)
+ : nullptr;
+ }
+
+ RefPtr<EncodePromise> ProcessEncode(RefPtr<const VideoData>&& aSample) {
+ AssertOnTaskQueue();
+ MOZ_ASSERT(mEncoder);
+ MOZ_ASSERT(aSample);
+
+ RefPtr<IMFSample> nv12 = ConvertToNV12InputSample(std::move(aSample));
+ if (!nv12 || FAILED(mEncoder->PushInput(std::move(nv12)))) {
+ WMF_ENC_LOGE("failed to process input sample");
+ return EncodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Failed to process input.")),
+ __func__);
+ }
+
+ nsTArray<RefPtr<IMFSample>> outputs;
+ HRESULT hr = mEncoder->TakeOutput(outputs);
+ if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ FillConfigData();
+ } else if (FAILED(hr)) {
+ WMF_ENC_LOGE("failed to process output");
+ return EncodePromise::CreateAndReject(
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Failed to process output.")),
+ __func__);
+ }
+
+ return ProcessOutputSamples(outputs);
+ }
+
+ already_AddRefed<IMFSample> ConvertToNV12InputSample(
+ RefPtr<const VideoData>&& aData) {
+ AssertOnTaskQueue();
+ MOZ_ASSERT(mEncoder);
+
+ const layers::PlanarYCbCrImage* image = aData->mImage->AsPlanarYCbCrImage();
+ MOZ_ASSERT(image);
+ const layers::PlanarYCbCrData* yuv = image->GetData();
+ auto ySize = yuv->YDataSize();
+ auto cbcrSize = yuv->CbCrDataSize();
+ size_t yLength = yuv->mYStride * ySize.height;
+ size_t length = yLength + (yuv->mCbCrStride * cbcrSize.height * 2);
+
+ RefPtr<IMFSample> input;
+ HRESULT hr = mEncoder->CreateInputSample(&input, length);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ hr = input->GetBufferByIndex(0, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ hr = buffer->SetCurrentLength(length);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ LockBuffer lockBuffer(buffer);
+ NS_ENSURE_TRUE(SUCCEEDED(lockBuffer.Result()), nullptr);
+
+ bool ok = libyuv::I420ToNV12(
+ yuv->mYChannel, yuv->mYStride, yuv->mCbChannel,
+ yuv->mCbCrStride, yuv->mCrChannel, yuv->mCbCrStride,
+ lockBuffer.Data(), yuv->mYStride, lockBuffer.Data() + yLength,
+ yuv->mCbCrStride * 2, ySize.width, ySize.height) == 0;
+ NS_ENSURE_TRUE(ok, nullptr);
+
+ hr = input->SetSampleTime(UsecsToHNs(aData->mTime.ToMicroseconds()));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ hr =
+ input->SetSampleDuration(UsecsToHNs(aData->mDuration.ToMicroseconds()));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ return input.forget();
+ }
+
+ RefPtr<EncodePromise> ProcessOutputSamples(
+ nsTArray<RefPtr<IMFSample>>& aSamples) {
+ EncodedData frames;
+ for (auto sample : aSamples) {
+ RefPtr<MediaRawData> frame = IMFSampleToMediaData(sample);
+ if (frame) {
+ frames.AppendElement(std::move(frame));
+ } else {
+ WMF_ENC_LOGE("failed to convert output frame");
+ }
+ }
+ aSamples.Clear();
+ return EncodePromise::CreateAndResolve(std::move(frames), __func__);
+ }
+
+ already_AddRefed<MediaRawData> IMFSampleToMediaData(
+ RefPtr<IMFSample>& aSample) {
+ AssertOnTaskQueue();
+ MOZ_ASSERT(aSample);
+
+ RefPtr<IMFMediaBuffer> buffer;
+ HRESULT hr = aSample->GetBufferByIndex(0, getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ LockBuffer lockBuffer(buffer);
+ NS_ENSURE_TRUE(SUCCEEDED(lockBuffer.Result()), nullptr);
+
+ LONGLONG time = 0;
+ hr = aSample->GetSampleTime(&time);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ LONGLONG duration = 0;
+ hr = aSample->GetSampleDuration(&duration);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), nullptr);
+
+ bool isKeyframe =
+ MFGetAttributeUINT32(aSample, MFSampleExtension_CleanPoint, false);
+
+ auto frame = MakeRefPtr<MediaRawData>();
+ if (!WriteFrameData(frame, lockBuffer, isKeyframe)) {
+ return nullptr;
+ }
+
+ frame->mTime = media::TimeUnit::FromMicroseconds(HNsToUsecs(time));
+ frame->mDuration = media::TimeUnit::FromMicroseconds(HNsToUsecs(duration));
+ frame->mKeyframe = isKeyframe;
+
+ return frame.forget();
+ }
+
+ bool WriteFrameData(RefPtr<MediaRawData>& aDest, LockBuffer& aSrc,
+ bool aIsKeyframe) {
+ if (std::is_same_v<ConfigType, MediaDataEncoder::H264Config>) {
+ size_t prependLength = 0;
+ RefPtr<MediaByteBuffer> avccHeader;
+ if (aIsKeyframe && mConfigData) {
+ if (mConfig.mUsage == Usage::Realtime) {
+ prependLength = mConfigData->Length();
+ } else {
+ avccHeader = mConfigData;
+ }
+ }
+
+ UniquePtr<MediaRawDataWriter> writer(aDest->CreateWriter());
+ if (!writer->SetSize(prependLength + aSrc.Length())) {
+ WMF_ENC_LOGE("fail to allocate output buffer");
+ return false;
+ }
+
+ if (prependLength > 0) {
+ PodCopy(writer->Data(), mConfigData->Elements(), prependLength);
+ }
+ PodCopy(writer->Data() + prependLength, aSrc.Data(), aSrc.Length());
+
+ if (mConfig.mUsage != Usage::Realtime &&
+ !AnnexB::ConvertSampleToAVCC(aDest, avccHeader)) {
+ WMF_ENC_LOGE("fail to convert annex-b sample to AVCC");
+ return false;
+ }
+
+ return true;
+ }
+ UniquePtr<MediaRawDataWriter> writer(aDest->CreateWriter());
+ if (!writer->SetSize(aSrc.Length())) {
+ WMF_ENC_LOGE("fail to allocate output buffer");
+ return false;
+ }
+
+ PodCopy(writer->Data(), aSrc.Data(), aSrc.Length());
+ return true;
+ }
+
+ void AssertOnTaskQueue() { MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn()); }
+
+ const ConfigType mConfig;
+ const RefPtr<TaskQueue> mTaskQueue;
+ const bool mHardwareNotAllowed;
+ RefPtr<MFTEncoder> mEncoder;
+ // SPS/PPS NALUs for realtime usage, avcC otherwise.
+ RefPtr<MediaByteBuffer> mConfigData;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/wmf/WMFUtils.cpp b/dom/media/platforms/wmf/WMFUtils.cpp
new file mode 100644
index 0000000000..75888c12c3
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFUtils.cpp
@@ -0,0 +1,632 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFUtils.h"
+
+#include <mfidl.h>
+#include <shlobj.h>
+#include <shlwapi.h>
+#include <initguid.h>
+#include <stdint.h>
+
+#ifdef MOZ_AV1
+# include "AOMDecoder.h"
+#endif
+#include "MP4Decoder.h"
+#include "OpusDecoder.h"
+#include "VideoUtils.h"
+#include "VorbisDecoder.h"
+#include "VPXDecoder.h"
+#include "mozilla/ArrayUtils.h"
+#include "mozilla/CheckedInt.h"
+#include "mozilla/Logging.h"
+#include "mozilla/RefPtr.h"
+#include "nsTArray.h"
+#include "nsThreadUtils.h"
+#include "nsWindowsHelpers.h"
+#include "prenv.h"
+#include "mozilla/mscom/EnsureMTA.h"
+#include "mozilla/WindowsVersion.h"
+
+#ifndef WAVE_FORMAT_OPUS
+# define WAVE_FORMAT_OPUS 0x704F
+#endif
+DEFINE_GUID(MEDIASUBTYPE_OPUS, WAVE_FORMAT_OPUS, 0x000, 0x0010, 0x80, 0x00,
+ 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71);
+
+namespace mozilla {
+
+using media::TimeUnit;
+
+bool StreamTypeIsVideo(const WMFStreamType& aType) {
+ switch (aType) {
+ case WMFStreamType::H264:
+ case WMFStreamType::VP8:
+ case WMFStreamType::VP9:
+ case WMFStreamType::AV1:
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool StreamTypeIsAudio(const WMFStreamType& aType) {
+ switch (aType) {
+ case WMFStreamType::MP3:
+ case WMFStreamType::AAC:
+ case WMFStreamType::OPUS:
+ case WMFStreamType::VORBIS:
+ return true;
+ default:
+ return false;
+ }
+}
+
+// Get a string representation of the stream type. Useful for logging.
+const char* StreamTypeToString(WMFStreamType aStreamType) {
+ switch (aStreamType) {
+ case WMFStreamType::H264:
+ return "H264";
+ case WMFStreamType::VP8:
+ return "VP8";
+ case WMFStreamType::VP9:
+ return "VP9";
+ case WMFStreamType::AV1:
+ return "AV1";
+ case WMFStreamType::MP3:
+ return "MP3";
+ case WMFStreamType::AAC:
+ return "AAC";
+ case WMFStreamType::OPUS:
+ return "OPUS";
+ case WMFStreamType::VORBIS:
+ return "VORBIS";
+ default:
+ MOZ_ASSERT(aStreamType == WMFStreamType::Unknown);
+ return "Unknown";
+ }
+}
+
+WMFStreamType GetStreamTypeFromMimeType(const nsCString& aMimeType) {
+ if (MP4Decoder::IsH264(aMimeType)) {
+ return WMFStreamType::H264;
+ }
+ if (VPXDecoder::IsVP8(aMimeType)) {
+ return WMFStreamType::VP8;
+ }
+ if (VPXDecoder::IsVP9(aMimeType)) {
+ return WMFStreamType::VP9;
+ }
+#ifdef MOZ_AV1
+ if (AOMDecoder::IsAV1(aMimeType)) {
+ return WMFStreamType::AV1;
+ }
+#endif
+ if (aMimeType.EqualsLiteral("audio/mp4a-latm") ||
+ aMimeType.EqualsLiteral("audio/mp4")) {
+ return WMFStreamType::AAC;
+ }
+ if (aMimeType.EqualsLiteral("audio/mpeg")) {
+ return WMFStreamType::MP3;
+ }
+ if (OpusDataDecoder::IsOpus(aMimeType)) {
+ return WMFStreamType::OPUS;
+ }
+ if (VorbisDataDecoder::IsVorbis(aMimeType)) {
+ return WMFStreamType::VORBIS;
+ }
+ return WMFStreamType::Unknown;
+}
+
+HRESULT
+HNsToFrames(int64_t aHNs, uint32_t aRate, int64_t* aOutFrames) {
+ MOZ_ASSERT(aOutFrames);
+ const int64_t HNS_PER_S = USECS_PER_S * 10;
+ CheckedInt<int64_t> i = aHNs;
+ i *= aRate;
+ i /= HNS_PER_S;
+ NS_ENSURE_TRUE(i.isValid(), E_FAIL);
+ *aOutFrames = i.value();
+ return S_OK;
+}
+
+HRESULT
+GetDefaultStride(IMFMediaType* aType, uint32_t aWidth, uint32_t* aOutStride) {
+ // Try to get the default stride from the media type.
+ HRESULT hr = aType->GetUINT32(MF_MT_DEFAULT_STRIDE, aOutStride);
+ if (SUCCEEDED(hr)) {
+ return S_OK;
+ }
+
+ // Stride attribute not set, calculate it.
+ GUID subtype = GUID_NULL;
+
+ hr = aType->GetGUID(MF_MT_SUBTYPE, &subtype);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = wmf::MFGetStrideForBitmapInfoHeader(subtype.Data1, aWidth,
+ (LONG*)(aOutStride));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ return hr;
+}
+
+Maybe<gfx::YUVColorSpace> GetYUVColorSpace(IMFMediaType* aType) {
+ UINT32 yuvColorMatrix;
+ HRESULT hr = aType->GetUINT32(MF_MT_YUV_MATRIX, &yuvColorMatrix);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), {});
+
+ switch (yuvColorMatrix) {
+ case MFVideoTransferMatrix_BT2020_10:
+ case MFVideoTransferMatrix_BT2020_12:
+ return Some(gfx::YUVColorSpace::BT2020);
+ case MFVideoTransferMatrix_BT709:
+ return Some(gfx::YUVColorSpace::BT709);
+ case MFVideoTransferMatrix_BT601:
+ return Some(gfx::YUVColorSpace::BT601);
+ default:
+ MOZ_ASSERT_UNREACHABLE("Unhandled MFVideoTransferMatrix_?");
+ return {};
+ }
+}
+
+int32_t MFOffsetToInt32(const MFOffset& aOffset) {
+ return int32_t(aOffset.value + (aOffset.fract / 65536.0f));
+}
+
+TimeUnit GetSampleDuration(IMFSample* aSample) {
+ NS_ENSURE_TRUE(aSample, TimeUnit::Invalid());
+ int64_t duration = 0;
+ HRESULT hr = aSample->GetSampleDuration(&duration);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), TimeUnit::Invalid());
+ return TimeUnit::FromMicroseconds(HNsToUsecs(duration));
+}
+
+TimeUnit GetSampleTime(IMFSample* aSample) {
+ NS_ENSURE_TRUE(aSample, TimeUnit::Invalid());
+ LONGLONG timestampHns = 0;
+ HRESULT hr = aSample->GetSampleTime(&timestampHns);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), TimeUnit::Invalid());
+ return TimeUnit::FromMicroseconds(HNsToUsecs(timestampHns));
+}
+
+// Gets the sub-region of the video frame that should be displayed.
+// See:
+// http://msdn.microsoft.com/en-us/library/windows/desktop/bb530115(v=vs.85).aspx
+HRESULT
+GetPictureRegion(IMFMediaType* aMediaType, gfx::IntRect& aOutPictureRegion) {
+ // Determine if "pan and scan" is enabled for this media. If it is, we
+ // only display a region of the video frame, not the entire frame.
+ BOOL panScan =
+ MFGetAttributeUINT32(aMediaType, MF_MT_PAN_SCAN_ENABLED, FALSE);
+
+ // If pan and scan mode is enabled. Try to get the display region.
+ HRESULT hr = E_FAIL;
+ MFVideoArea videoArea;
+ memset(&videoArea, 0, sizeof(MFVideoArea));
+ if (panScan) {
+ hr = aMediaType->GetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&videoArea,
+ sizeof(MFVideoArea), nullptr);
+ }
+
+ // If we're not in pan-and-scan mode, or the pan-and-scan region is not set,
+ // check for a minimimum display aperture.
+ if (!panScan || hr == MF_E_ATTRIBUTENOTFOUND) {
+ hr = aMediaType->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&videoArea,
+ sizeof(MFVideoArea), nullptr);
+ }
+
+ if (hr == MF_E_ATTRIBUTENOTFOUND) {
+ // Minimum display aperture is not set, for "backward compatibility with
+ // some components", check for a geometric aperture.
+ hr = aMediaType->GetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&videoArea,
+ sizeof(MFVideoArea), nullptr);
+ }
+
+ if (SUCCEEDED(hr)) {
+ // The media specified a picture region, return it.
+ aOutPictureRegion = gfx::IntRect(MFOffsetToInt32(videoArea.OffsetX),
+ MFOffsetToInt32(videoArea.OffsetY),
+ videoArea.Area.cx, videoArea.Area.cy);
+ return S_OK;
+ }
+
+ // No picture region defined, fall back to using the entire video area.
+ UINT32 width = 0, height = 0;
+ hr = MFGetAttributeSize(aMediaType, MF_MT_FRAME_SIZE, &width, &height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL);
+ NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL);
+
+ aOutPictureRegion = gfx::IntRect(0, 0, width, height);
+ return S_OK;
+}
+
+nsString GetProgramW6432Path() {
+ char* programPath = PR_GetEnvSecure("ProgramW6432");
+ if (!programPath) {
+ programPath = PR_GetEnvSecure("ProgramFiles");
+ }
+
+ if (!programPath) {
+ return u"C:\\Program Files"_ns;
+ }
+ return NS_ConvertUTF8toUTF16(programPath);
+}
+
+const char* MFTMessageTypeToStr(MFT_MESSAGE_TYPE aMsg) {
+ switch (aMsg) {
+ case MFT_MESSAGE_COMMAND_FLUSH:
+ return "MFT_MESSAGE_COMMAND_FLUSH";
+ case MFT_MESSAGE_COMMAND_DRAIN:
+ return "MFT_MESSAGE_COMMAND_DRAIN";
+ case MFT_MESSAGE_COMMAND_MARKER:
+ return "MFT_MESSAGE_COMMAND_MARKER";
+ case MFT_MESSAGE_SET_D3D_MANAGER:
+ return "MFT_MESSAGE_SET_D3D_MANAGER";
+ case MFT_MESSAGE_NOTIFY_BEGIN_STREAMING:
+ return "MFT_MESSAGE_NOTIFY_BEGIN_STREAMING";
+ case MFT_MESSAGE_NOTIFY_END_STREAMING:
+ return "MFT_MESSAGE_NOTIFY_END_STREAMING";
+ case MFT_MESSAGE_NOTIFY_END_OF_STREAM:
+ return "MFT_MESSAGE_NOTIFY_END_OF_STREAM";
+ case MFT_MESSAGE_NOTIFY_START_OF_STREAM:
+ return "MFT_MESSAGE_NOTIFY_START_OF_STREAM";
+ case MFT_MESSAGE_DROP_SAMPLES:
+ return "MFT_MESSAGE_DROP_SAMPLES";
+ case MFT_MESSAGE_COMMAND_TICK:
+ return "MFT_MESSAGE_COMMAND_TICK";
+ case MFT_MESSAGE_NOTIFY_RELEASE_RESOURCES:
+ return "MFT_MESSAGE_NOTIFY_RELEASE_RESOURCES";
+ case MFT_MESSAGE_NOTIFY_REACQUIRE_RESOURCES:
+ return "MFT_MESSAGE_NOTIFY_REACQUIRE_RESOURCES";
+ case MFT_MESSAGE_NOTIFY_EVENT:
+ return "MFT_MESSAGE_NOTIFY_EVENT";
+ case MFT_MESSAGE_COMMAND_SET_OUTPUT_STREAM_STATE:
+ return "MFT_MESSAGE_COMMAND_SET_OUTPUT_STREAM_STATE";
+ case MFT_MESSAGE_COMMAND_FLUSH_OUTPUT_STREAM:
+ return "MFT_MESSAGE_COMMAND_FLUSH_OUTPUT_STREAM";
+ default:
+ return "Invalid message?";
+ }
+}
+
+GUID AudioMimeTypeToMediaFoundationSubtype(const nsACString& aMimeType) {
+ if (aMimeType.EqualsLiteral("audio/mpeg")) {
+ return MFAudioFormat_MP3;
+ } else if (MP4Decoder::IsAAC(aMimeType)) {
+ return MFAudioFormat_AAC;
+ } else if (VorbisDataDecoder::IsVorbis(aMimeType)) {
+ return MFAudioFormat_Vorbis;
+ } else if (OpusDataDecoder::IsOpus(aMimeType)) {
+ return MFAudioFormat_Opus;
+ }
+ NS_WARNING("Unsupport audio mimetype");
+ return GUID_NULL;
+}
+
+GUID VideoMimeTypeToMediaFoundationSubtype(const nsACString& aMimeType) {
+ if (MP4Decoder::IsH264(aMimeType)) {
+ return MFVideoFormat_H264;
+ } else if (VPXDecoder::IsVP8(aMimeType)) {
+ return MFVideoFormat_VP80;
+ } else if (VPXDecoder::IsVP9(aMimeType)) {
+ return MFVideoFormat_VP90;
+ }
+#ifdef MOZ_AV1
+ else if (AOMDecoder::IsAV1(aMimeType)) {
+ return MFVideoFormat_AV1;
+ }
+#endif
+ NS_WARNING("Unsupport video mimetype");
+ return GUID_NULL;
+}
+
+void AACAudioSpecificConfigToUserData(uint8_t aAACProfileLevelIndication,
+ const uint8_t* aAudioSpecConfig,
+ uint32_t aConfigLength,
+ nsTArray<BYTE>& aOutUserData) {
+ MOZ_ASSERT(aOutUserData.IsEmpty());
+
+ // The MF_MT_USER_DATA for AAC is defined here:
+ // http://msdn.microsoft.com/en-us/library/windows/desktop/dd742784%28v=vs.85%29.aspx
+ //
+ // For MFAudioFormat_AAC, MF_MT_USER_DATA contains the portion of
+ // the HEAACWAVEINFO structure that appears after the WAVEFORMATEX
+ // structure (that is, after the wfx member). This is followed by
+ // the AudioSpecificConfig() data, as defined by ISO/IEC 14496-3.
+ // [...]
+ // The length of the AudioSpecificConfig() data is 2 bytes for AAC-LC
+ // or HE-AAC with implicit signaling of SBR/PS. It is more than 2 bytes
+ // for HE-AAC with explicit signaling of SBR/PS.
+ //
+ // The value of audioObjectType as defined in AudioSpecificConfig()
+ // must be 2, indicating AAC-LC. The value of extensionAudioObjectType
+ // must be 5 for SBR or 29 for PS.
+ //
+ // HEAACWAVEINFO structure:
+ // typedef struct heaacwaveinfo_tag {
+ // WAVEFORMATEX wfx;
+ // WORD wPayloadType;
+ // WORD wAudioProfileLevelIndication;
+ // WORD wStructType;
+ // WORD wReserved1;
+ // DWORD dwReserved2;
+ // }
+ const UINT32 heeInfoLen = 4 * sizeof(WORD) + sizeof(DWORD);
+
+ // The HEAACWAVEINFO must have payload and profile set,
+ // the rest can be all 0x00.
+ BYTE heeInfo[heeInfoLen] = {0};
+ WORD* w = (WORD*)heeInfo;
+ w[0] = 0x0; // Payload type raw AAC packet
+ w[1] = aAACProfileLevelIndication;
+
+ aOutUserData.AppendElements(heeInfo, heeInfoLen);
+
+ if (aAACProfileLevelIndication == 2 && aConfigLength > 2) {
+ // The AudioSpecificConfig is TTTTTFFF|FCCCCGGG
+ // (T=ObjectType, F=Frequency, C=Channel, G=GASpecificConfig)
+ // If frequency = 0xf, then the frequency is explicitly defined on 24 bits.
+ int8_t frequency =
+ (aAudioSpecConfig[0] & 0x7) << 1 | (aAudioSpecConfig[1] & 0x80) >> 7;
+ int8_t channels = (aAudioSpecConfig[1] & 0x78) >> 3;
+ int8_t gasc = aAudioSpecConfig[1] & 0x7;
+ if (frequency != 0xf && channels && !gasc) {
+ // We enter this condition if the AudioSpecificConfig should theorically
+ // be 2 bytes long but it's not.
+ // The WMF AAC decoder will error if unknown extensions are found,
+ // so remove them.
+ aConfigLength = 2;
+ }
+ }
+ aOutUserData.AppendElements(aAudioSpecConfig, aConfigLength);
+}
+
+namespace wmf {
+
+static const wchar_t* sDLLs[] = {
+ L"mfplat.dll",
+ L"mf.dll",
+ L"dxva2.dll",
+ L"evr.dll",
+};
+
+HRESULT
+LoadDLLs() {
+ static bool sDLLsLoaded = false;
+ static bool sFailedToLoadDlls = false;
+
+ if (sDLLsLoaded) {
+ return S_OK;
+ }
+ if (sFailedToLoadDlls) {
+ return E_FAIL;
+ }
+
+ // Try to load all the required DLLs. If we fail to load any dll,
+ // unload the dlls we succeeded in loading.
+ nsTArray<const wchar_t*> loadedDlls;
+ for (const wchar_t* dll : sDLLs) {
+ if (!LoadLibrarySystem32(dll)) {
+ NS_WARNING("Failed to load WMF DLLs");
+ for (const wchar_t* loadedDll : loadedDlls) {
+ FreeLibrary(GetModuleHandleW(loadedDll));
+ }
+ sFailedToLoadDlls = true;
+ return E_FAIL;
+ }
+ loadedDlls.AppendElement(dll);
+ }
+ sDLLsLoaded = true;
+
+ return S_OK;
+}
+
+#define ENSURE_FUNCTION_PTR_HELPER(FunctionType, FunctionName, DLL) \
+ static FunctionType FunctionName##Ptr = nullptr; \
+ if (!FunctionName##Ptr) { \
+ FunctionName##Ptr = (FunctionType)GetProcAddress( \
+ GetModuleHandleW(L## #DLL), #FunctionName); \
+ if (!FunctionName##Ptr) { \
+ NS_WARNING("Failed to get GetProcAddress of " #FunctionName \
+ " from " #DLL); \
+ return E_FAIL; \
+ } \
+ }
+
+#define ENSURE_FUNCTION_PTR(FunctionName, DLL) \
+ ENSURE_FUNCTION_PTR_HELPER(decltype(::FunctionName)*, FunctionName, DLL)
+
+#define ENSURE_FUNCTION_PTR_(FunctionName, DLL) \
+ ENSURE_FUNCTION_PTR_HELPER(FunctionName##Ptr_t, FunctionName, DLL)
+
+#define DECL_FUNCTION_PTR(FunctionName, ...) \
+ typedef HRESULT(STDMETHODCALLTYPE* FunctionName##Ptr_t)(__VA_ARGS__)
+
+HRESULT
+MediaFoundationInitializer::MFStartup() {
+ if (IsWin7AndPre2000Compatible()) {
+ /*
+ * Specific exclude the usage of WMF on Win 7 with compatibility mode
+ * prior to Win 2000 as we may crash while trying to startup WMF.
+ * Using GetVersionEx API which takes compatibility mode into account.
+ * See Bug 1279171.
+ */
+ return E_FAIL;
+ }
+
+ HRESULT hr = LoadDLLs();
+ if (FAILED(hr)) {
+ return hr;
+ }
+
+ const int MF_WIN7_VERSION = (0x0002 << 16 | MF_API_VERSION);
+
+ // decltype is unusable for functions having default parameters
+ DECL_FUNCTION_PTR(MFStartup, ULONG, DWORD);
+ ENSURE_FUNCTION_PTR_(MFStartup, Mfplat.dll)
+
+ hr = E_FAIL;
+ mozilla::mscom::EnsureMTA(
+ [&]() -> void { hr = MFStartupPtr(MF_WIN7_VERSION, MFSTARTUP_FULL); });
+ return hr;
+}
+
+HRESULT
+MediaFoundationInitializer::MFShutdown() {
+ ENSURE_FUNCTION_PTR(MFShutdown, Mfplat.dll)
+ HRESULT hr = E_FAIL;
+ mozilla::mscom::EnsureMTA([&]() -> void { hr = (MFShutdownPtr)(); });
+ return hr;
+}
+
+HRESULT
+MFCreateMediaType(IMFMediaType** aOutMFType) {
+ ENSURE_FUNCTION_PTR(MFCreateMediaType, Mfplat.dll)
+ return (MFCreateMediaTypePtr)(aOutMFType);
+}
+
+HRESULT
+MFGetStrideForBitmapInfoHeader(DWORD aFormat, DWORD aWidth, LONG* aOutStride) {
+ ENSURE_FUNCTION_PTR(MFGetStrideForBitmapInfoHeader, evr.dll)
+ return (MFGetStrideForBitmapInfoHeaderPtr)(aFormat, aWidth, aOutStride);
+}
+
+HRESULT MFGetService(IUnknown* punkObject, REFGUID guidService, REFIID riid,
+ LPVOID* ppvObject) {
+ ENSURE_FUNCTION_PTR(MFGetService, mf.dll)
+ return (MFGetServicePtr)(punkObject, guidService, riid, ppvObject);
+}
+
+HRESULT
+DXVA2CreateDirect3DDeviceManager9(UINT* pResetToken,
+ IDirect3DDeviceManager9** ppDXVAManager) {
+ ENSURE_FUNCTION_PTR(DXVA2CreateDirect3DDeviceManager9, dxva2.dll)
+ return (DXVA2CreateDirect3DDeviceManager9Ptr)(pResetToken, ppDXVAManager);
+}
+
+HRESULT
+MFCreateSample(IMFSample** ppIMFSample) {
+ ENSURE_FUNCTION_PTR(MFCreateSample, mfplat.dll)
+ return (MFCreateSamplePtr)(ppIMFSample);
+}
+
+HRESULT
+MFCreateAlignedMemoryBuffer(DWORD cbMaxLength, DWORD fAlignmentFlags,
+ IMFMediaBuffer** ppBuffer) {
+ ENSURE_FUNCTION_PTR(MFCreateAlignedMemoryBuffer, mfplat.dll)
+ return (MFCreateAlignedMemoryBufferPtr)(cbMaxLength, fAlignmentFlags,
+ ppBuffer);
+}
+
+HRESULT
+MFCreateDXGIDeviceManager(UINT* pResetToken,
+ IMFDXGIDeviceManager** ppDXVAManager) {
+ ENSURE_FUNCTION_PTR(MFCreateDXGIDeviceManager, mfplat.dll)
+ return (MFCreateDXGIDeviceManagerPtr)(pResetToken, ppDXVAManager);
+}
+
+HRESULT
+MFCreateDXGISurfaceBuffer(REFIID riid, IUnknown* punkSurface,
+ UINT uSubresourceIndex, BOOL fButtomUpWhenLinear,
+ IMFMediaBuffer** ppBuffer) {
+ ENSURE_FUNCTION_PTR(MFCreateDXGISurfaceBuffer, mfplat.dll)
+ return (MFCreateDXGISurfaceBufferPtr)(riid, punkSurface, uSubresourceIndex,
+ fButtomUpWhenLinear, ppBuffer);
+}
+
+HRESULT
+MFTEnumEx(GUID guidCategory, UINT32 Flags,
+ const MFT_REGISTER_TYPE_INFO* pInputType,
+ const MFT_REGISTER_TYPE_INFO* pOutputType,
+ IMFActivate*** pppMFTActivate, UINT32* pnumMFTActivate) {
+ ENSURE_FUNCTION_PTR(MFTEnumEx, mfplat.dll)
+ return (MFTEnumExPtr)(guidCategory, Flags, pInputType, pOutputType,
+ pppMFTActivate, pnumMFTActivate);
+}
+
+HRESULT MFTGetInfo(CLSID clsidMFT, LPWSTR* pszName,
+ MFT_REGISTER_TYPE_INFO** ppInputTypes, UINT32* pcInputTypes,
+ MFT_REGISTER_TYPE_INFO** ppOutputTypes,
+ UINT32* pcOutputTypes, IMFAttributes** ppAttributes) {
+ ENSURE_FUNCTION_PTR(MFTGetInfo, mfplat.dll)
+ return (MFTGetInfoPtr)(clsidMFT, pszName, ppInputTypes, pcInputTypes,
+ ppOutputTypes, pcOutputTypes, ppAttributes);
+}
+
+HRESULT
+MFCreateAttributes(IMFAttributes** ppMFAttributes, UINT32 cInitialSize) {
+ ENSURE_FUNCTION_PTR(MFCreateAttributes, mfplat.dll)
+ return (MFCreateAttributesPtr)(ppMFAttributes, cInitialSize);
+}
+
+HRESULT MFCreateEventQueue(IMFMediaEventQueue** ppMediaEventQueue) {
+ ENSURE_FUNCTION_PTR(MFCreateEventQueue, mfplat.dll)
+ return (MFCreateEventQueuePtr)(ppMediaEventQueue);
+}
+
+HRESULT MFCreateStreamDescriptor(DWORD dwStreamIdentifier, DWORD cMediaTypes,
+ IMFMediaType** apMediaTypes,
+ IMFStreamDescriptor** ppDescriptor) {
+ ENSURE_FUNCTION_PTR(MFCreateStreamDescriptor, mfplat.dll)
+ return (MFCreateStreamDescriptorPtr)(dwStreamIdentifier, cMediaTypes,
+ apMediaTypes, ppDescriptor);
+}
+
+HRESULT MFCreateAsyncResult(IUnknown* punkObject, IMFAsyncCallback* pCallback,
+ IUnknown* punkState,
+ IMFAsyncResult** ppAsyncResult) {
+ ENSURE_FUNCTION_PTR(MFCreateAsyncResult, mfplat.dll)
+ return (MFCreateAsyncResultPtr)(punkObject, pCallback, punkState,
+ ppAsyncResult);
+}
+
+HRESULT MFCreatePresentationDescriptor(
+ DWORD cStreamDescriptors, IMFStreamDescriptor** apStreamDescriptors,
+ IMFPresentationDescriptor** ppPresentationDescriptor) {
+ ENSURE_FUNCTION_PTR(MFCreatePresentationDescriptor, mfplat.dll)
+ return (MFCreatePresentationDescriptorPtr)(cStreamDescriptors,
+ apStreamDescriptors,
+ ppPresentationDescriptor);
+}
+
+HRESULT MFCreateMemoryBuffer(DWORD cbMaxLength, IMFMediaBuffer** ppBuffer) {
+ ENSURE_FUNCTION_PTR(MFCreateMemoryBuffer, mfplat.dll);
+ return (MFCreateMemoryBufferPtr)(cbMaxLength, ppBuffer);
+}
+
+HRESULT MFLockDXGIDeviceManager(UINT* pResetToken,
+ IMFDXGIDeviceManager** ppManager) {
+ ENSURE_FUNCTION_PTR(MFLockDXGIDeviceManager, mfplat.dll);
+ return (MFLockDXGIDeviceManagerPtr)(pResetToken, ppManager);
+}
+
+HRESULT MFUnlockDXGIDeviceManager() {
+ ENSURE_FUNCTION_PTR(MFUnlockDXGIDeviceManager, mfplat.dll);
+ return (MFUnlockDXGIDeviceManagerPtr)();
+}
+
+HRESULT MFPutWorkItem(DWORD dwQueue, IMFAsyncCallback* pCallback,
+ IUnknown* pState) {
+ ENSURE_FUNCTION_PTR(MFPutWorkItem, mfplat.dll);
+ return (MFPutWorkItemPtr)(dwQueue, pCallback, pState);
+}
+
+HRESULT MFSerializeAttributesToStream(IMFAttributes* pAttr, DWORD dwOptions,
+ IStream* pStm) {
+ ENSURE_FUNCTION_PTR(MFSerializeAttributesToStream, mfplat.dll);
+ return (MFSerializeAttributesToStreamPtr)(pAttr, dwOptions, pStm);
+}
+
+HRESULT MFWrapMediaType(IMFMediaType* pOrig, REFGUID MajorType, REFGUID SubType,
+ IMFMediaType** ppWrap) {
+ ENSURE_FUNCTION_PTR(MFWrapMediaType, mfplat.dll);
+ return (MFWrapMediaTypePtr)(pOrig, MajorType, SubType, ppWrap);
+}
+
+} // end namespace wmf
+} // end namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFUtils.h b/dom/media/platforms/wmf/WMFUtils.h
new file mode 100644
index 0000000000..316c3dd78c
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFUtils.h
@@ -0,0 +1,104 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WMFUtils_h
+#define WMFUtils_h
+
+#include "ImageTypes.h"
+#include "TimeUnits.h"
+#include "VideoUtils.h"
+#include "WMF.h"
+#include "mozilla/gfx/Rect.h"
+#include "nsString.h"
+
+// Various utilities shared by WMF backend files.
+
+namespace mozilla {
+
+static const GUID CLSID_MSOpusDecoder = {
+ 0x63e17c10,
+ 0x2d43,
+ 0x4c42,
+ {0x8f, 0xe3, 0x8d, 0x8b, 0x63, 0xe4, 0x6a, 0x6a}};
+
+// Media types supported by Media Foundation.
+enum class WMFStreamType {
+ Unknown,
+ H264,
+ VP8,
+ VP9,
+ AV1,
+ MP3,
+ AAC,
+ OPUS,
+ VORBIS,
+ SENTINEL
+};
+
+bool StreamTypeIsVideo(const WMFStreamType& aType);
+
+bool StreamTypeIsAudio(const WMFStreamType& aType);
+
+// Get a string representation of the stream type. Useful for logging.
+const char* StreamTypeToString(WMFStreamType aStreamType);
+
+WMFStreamType GetStreamTypeFromMimeType(const nsCString& aMimeType);
+
+// Converts from microseconds to hundreds of nanoseconds.
+// We use microseconds for our timestamps, whereas WMF uses
+// hundreds of nanoseconds.
+inline int64_t UsecsToHNs(int64_t aUsecs) { return aUsecs * 10; }
+
+// Converts from hundreds of nanoseconds to microseconds.
+// We use microseconds for our timestamps, whereas WMF uses
+// hundreds of nanoseconds.
+inline int64_t HNsToUsecs(int64_t hNanoSecs) { return hNanoSecs / 10; }
+
+HRESULT HNsToFrames(int64_t aHNs, uint32_t aRate, int64_t* aOutFrames);
+
+HRESULT
+GetDefaultStride(IMFMediaType* aType, uint32_t aWidth, uint32_t* aOutStride);
+
+Maybe<gfx::YUVColorSpace> GetYUVColorSpace(IMFMediaType* aType);
+
+int32_t MFOffsetToInt32(const MFOffset& aOffset);
+
+// Gets the sub-region of the video frame that should be displayed.
+// See:
+// http://msdn.microsoft.com/en-us/library/windows/desktop/bb530115(v=vs.85).aspx
+HRESULT
+GetPictureRegion(IMFMediaType* aMediaType, gfx::IntRect& aOutPictureRegion);
+
+// Returns the duration of a IMFSample in TimeUnit.
+// Returns media::TimeUnit::Invalid() on failure.
+media::TimeUnit GetSampleDuration(IMFSample* aSample);
+
+// Returns the presentation time of a IMFSample in TimeUnit.
+// Returns media::TimeUnit::Invalid() on failure.
+media::TimeUnit GetSampleTime(IMFSample* aSample);
+
+inline bool IsFlagSet(DWORD flags, DWORD pattern) {
+ return (flags & pattern) == pattern;
+}
+
+// Will return %ProgramW6432% value as per:
+// https://msdn.microsoft.com/library/windows/desktop/aa384274.aspx
+nsString GetProgramW6432Path();
+
+const char* MFTMessageTypeToStr(MFT_MESSAGE_TYPE aMsg);
+
+GUID AudioMimeTypeToMediaFoundationSubtype(const nsACString& aMimeType);
+
+GUID VideoMimeTypeToMediaFoundationSubtype(const nsACString& aMimeType);
+
+void AACAudioSpecificConfigToUserData(uint8_t aAACProfileLevelIndication,
+ const uint8_t* aAudioSpecConfig,
+ uint32_t aConfigLength,
+ nsTArray<BYTE>& aOutUserData);
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.cpp b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
new file mode 100644
index 0000000000..79cfd1cc0b
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.cpp
@@ -0,0 +1,1096 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WMFVideoMFTManager.h"
+
+#include <psapi.h>
+#include <winsdkver.h>
+#include <algorithm>
+#include "DXVA2Manager.h"
+#include "GMPUtils.h" // For SplitAt. TODO: Move SplitAt to a central place.
+#include "IMFYCbCrImage.h"
+#include "ImageContainer.h"
+#include "MediaInfo.h"
+#include "MediaTelemetryConstants.h"
+#include "VideoUtils.h"
+#include "WMFDecoderModule.h"
+#include "WMFUtils.h"
+#include "gfx2DGlue.h"
+#include "gfxWindowsPlatform.h"
+#include "mozilla/AbstractThread.h"
+#include "mozilla/ClearOnShutdown.h"
+#include "mozilla/Logging.h"
+#include "mozilla/SchedulerGroup.h"
+#include "mozilla/StaticPrefs_gfx.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/SyncRunnable.h"
+#include "mozilla/Telemetry.h"
+#include "mozilla/WindowsVersion.h"
+#include "mozilla/gfx/DeviceManagerDx.h"
+#include "mozilla/gfx/gfxVars.h"
+#include "mozilla/layers/LayersTypes.h"
+#include "nsPrintfCString.h"
+#include "nsThreadUtils.h"
+#include "nsWindowsHelpers.h"
+
+#define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
+
+using mozilla::layers::Image;
+using mozilla::layers::IMFYCbCrImage;
+using mozilla::layers::LayerManager;
+using mozilla::layers::LayersBackend;
+using mozilla::media::TimeUnit;
+
+#if WINVER_MAXVER < 0x0A00
+// Windows 10+ SDK has VP80 and VP90 defines
+const GUID MFVideoFormat_VP80 = {
+ 0x30385056,
+ 0x0000,
+ 0x0010,
+ {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
+
+const GUID MFVideoFormat_VP90 = {
+ 0x30395056,
+ 0x0000,
+ 0x0010,
+ {0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71}};
+#endif
+
+#if !defined(__MINGW32__) && _WIN32_WINNT < _WIN32_WINNT_WIN8
+const GUID MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT = {
+ 0x851745d5,
+ 0xc3d6,
+ 0x476d,
+ {0x95, 0x27, 0x49, 0x8e, 0xf2, 0xd1, 0xd, 0x18}};
+const GUID MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT_PROGRESSIVE = {
+ 0xf5523a5,
+ 0x1cb2,
+ 0x47c5,
+ {0xa5, 0x50, 0x2e, 0xeb, 0x84, 0xb4, 0xd1, 0x4a}};
+const GUID MF_SA_D3D11_BINDFLAGS = {
+ 0xeacf97ad,
+ 0x065c,
+ 0x4408,
+ {0xbe, 0xe3, 0xfd, 0xcb, 0xfd, 0x12, 0x8b, 0xe2}};
+const GUID MF_SA_D3D11_SHARED_WITHOUT_MUTEX = {
+ 0x39dbd44d,
+ 0x2e44,
+ 0x4931,
+ {0xa4, 0xc8, 0x35, 0x2d, 0x3d, 0xc4, 0x21, 0x15}};
+#endif
+
+namespace mozilla {
+
+static bool IsWin7H264Decoder4KCapable() {
+ WCHAR systemPath[MAX_PATH + 1];
+ if (!ConstructSystem32Path(L"msmpeg2vdec.dll", systemPath, MAX_PATH + 1)) {
+ // Cannot build path -> Assume it's the old DLL or it's missing.
+ return false;
+ }
+
+ DWORD zero;
+ DWORD infoSize = GetFileVersionInfoSizeW(systemPath, &zero);
+ if (infoSize == 0) {
+ // Can't get file info -> Assume it's the old DLL or it's missing.
+ return false;
+ }
+ auto infoData = MakeUnique<unsigned char[]>(infoSize);
+ VS_FIXEDFILEINFO* vInfo;
+ UINT vInfoLen;
+ if (GetFileVersionInfoW(systemPath, 0, infoSize, infoData.get()) &&
+ VerQueryValueW(infoData.get(), L"\\", (LPVOID*)&vInfo, &vInfoLen)) {
+ uint64_t version = uint64_t(vInfo->dwFileVersionMS) << 32 |
+ uint64_t(vInfo->dwFileVersionLS);
+ // 12.0.9200.16426 & later allow for >1920x1088 resolutions.
+ const uint64_t minimum =
+ (uint64_t(12) << 48) | (uint64_t(9200) << 16) | uint64_t(16426);
+ return version >= minimum;
+ }
+ // Can't get file version -> Assume it's the old DLL.
+ return false;
+}
+
+LayersBackend GetCompositorBackendType(
+ layers::KnowsCompositor* aKnowsCompositor) {
+ if (aKnowsCompositor) {
+ return aKnowsCompositor->GetCompositorBackendType();
+ }
+ return LayersBackend::LAYERS_NONE;
+}
+
+WMFVideoMFTManager::WMFVideoMFTManager(
+ const VideoInfo& aConfig, layers::KnowsCompositor* aKnowsCompositor,
+ layers::ImageContainer* aImageContainer, float aFramerate,
+ const CreateDecoderParams::OptionSet& aOptions, bool aDXVAEnabled,
+ Maybe<TrackingId> aTrackingId)
+ : mVideoInfo(aConfig),
+ mImageSize(aConfig.mImage),
+ mStreamType(GetStreamTypeFromMimeType(aConfig.mMimeType)),
+ mSoftwareImageSize(aConfig.mImage),
+ mSoftwarePictureSize(aConfig.mImage),
+ mVideoStride(0),
+ mColorSpace(aConfig.mColorSpace),
+ mColorRange(aConfig.mColorRange),
+ mImageContainer(aImageContainer),
+ mKnowsCompositor(aKnowsCompositor),
+ mDXVAEnabled(aDXVAEnabled &&
+ !aOptions.contains(
+ CreateDecoderParams::Option::HardwareDecoderNotAllowed)),
+ mZeroCopyNV12Texture(false),
+ mFramerate(aFramerate),
+ mLowLatency(aOptions.contains(CreateDecoderParams::Option::LowLatency)),
+ mTrackingId(std::move(aTrackingId))
+// mVideoStride, mVideoWidth, mVideoHeight, mUseHwAccel are initialized in
+// Init().
+{
+ MOZ_COUNT_CTOR(WMFVideoMFTManager);
+
+ // The V and U planes are stored 16-row-aligned, so we need to add padding
+ // to the row heights to ensure the Y'CbCr planes are referenced properly.
+ // This value is only used with software decoder.
+ if (mSoftwareImageSize.height % 16 != 0) {
+ mSoftwareImageSize.height += 16 - (mSoftwareImageSize.height % 16);
+ }
+}
+
+WMFVideoMFTManager::~WMFVideoMFTManager() {
+ MOZ_COUNT_DTOR(WMFVideoMFTManager);
+}
+
+/* static */
+const GUID& WMFVideoMFTManager::GetMediaSubtypeGUID() {
+ MOZ_ASSERT(StreamTypeIsVideo(mStreamType));
+ switch (mStreamType) {
+ case WMFStreamType::H264:
+ return MFVideoFormat_H264;
+ case WMFStreamType::VP8:
+ return MFVideoFormat_VP80;
+ case WMFStreamType::VP9:
+ return MFVideoFormat_VP90;
+ case WMFStreamType::AV1:
+ return MFVideoFormat_AV1;
+ default:
+ return GUID_NULL;
+ };
+}
+
+bool WMFVideoMFTManager::InitializeDXVA() {
+ // If we use DXVA but aren't running with a D3D layer manager then the
+ // readback of decoded video frames from GPU to CPU memory grinds painting
+ // to a halt, and makes playback performance *worse*.
+ if (!mDXVAEnabled) {
+ mDXVAFailureReason.AssignLiteral(
+ "Hardware video decoding disabled or blacklisted");
+ return false;
+ }
+ MOZ_ASSERT(!mDXVA2Manager);
+ if (!mKnowsCompositor || !mKnowsCompositor->SupportsD3D11()) {
+ mDXVAFailureReason.AssignLiteral("Unsupported layers backend");
+ return false;
+ }
+
+ if (!XRE_IsRDDProcess() && !XRE_IsGPUProcess()) {
+ mDXVAFailureReason.AssignLiteral(
+ "DXVA only supported in RDD or GPU process");
+ return false;
+ }
+
+ bool d3d11 = true;
+ if (!StaticPrefs::media_wmf_dxva_d3d11_enabled()) {
+ mDXVAFailureReason = nsPrintfCString(
+ "D3D11: %s is false",
+ StaticPrefs::GetPrefName_media_wmf_dxva_d3d11_enabled());
+ d3d11 = false;
+ }
+ if (!IsWin8OrLater()) {
+ mDXVAFailureReason.AssignLiteral("D3D11: Requires Windows 8 or later");
+ d3d11 = false;
+ }
+
+ if (d3d11) {
+ mDXVAFailureReason.AppendLiteral("D3D11: ");
+ mDXVA2Manager.reset(
+ DXVA2Manager::CreateD3D11DXVA(mKnowsCompositor, mDXVAFailureReason));
+ if (mDXVA2Manager) {
+ return true;
+ }
+ }
+
+ // Try again with d3d9, but record the failure reason
+ // into a new var to avoid overwriting the d3d11 failure.
+ nsAutoCString d3d9Failure;
+ mDXVA2Manager.reset(
+ DXVA2Manager::CreateD3D9DXVA(mKnowsCompositor, d3d9Failure));
+ // Make sure we include the messages from both attempts (if applicable).
+ if (!d3d9Failure.IsEmpty()) {
+ mDXVAFailureReason.AppendLiteral("; D3D9: ");
+ mDXVAFailureReason.Append(d3d9Failure);
+ }
+
+ return mDXVA2Manager != nullptr;
+}
+
+MediaResult WMFVideoMFTManager::ValidateVideoInfo() {
+ NS_ENSURE_TRUE(StreamTypeIsVideo(mStreamType),
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Invalid stream type")));
+ switch (mStreamType) {
+ case WMFStreamType::H264:
+ if (!StaticPrefs::media_wmf_allow_unsupported_resolutions()) {
+ // The WMF H.264 decoder is documented to have a minimum resolution
+ // 48x48 pixels for resolution, but we won't enable hw decoding for the
+ // resolution < 132 pixels. It's assumed the software decoder doesn't
+ // have this limitation, but it still might have maximum resolution
+ // limitation.
+ // https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx
+ const bool Is4KCapable =
+ IsWin8OrLater() || IsWin7H264Decoder4KCapable();
+ static const int32_t MAX_H264_PIXEL_COUNT =
+ Is4KCapable ? 4096 * 2304 : 1920 * 1088;
+ const CheckedInt32 pixelCount =
+ CheckedInt32(mVideoInfo.mImage.width) * mVideoInfo.mImage.height;
+
+ if (!pixelCount.isValid() ||
+ pixelCount.value() > MAX_H264_PIXEL_COUNT) {
+ mIsValid = false;
+ return MediaResult(
+ NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Can't decode H.264 stream because its "
+ "resolution is out of the maximum limitation"));
+ }
+ }
+ break;
+ default:
+ break;
+ }
+
+ return NS_OK;
+}
+
+MediaResult WMFVideoMFTManager::Init() {
+ MediaResult result = ValidateVideoInfo();
+ if (NS_FAILED(result)) {
+ return result;
+ }
+
+ result = InitInternal();
+ if (NS_SUCCEEDED(result) && mDXVA2Manager) {
+ // If we had some failures but eventually made it work,
+ // make sure we preserve the messages.
+ if (mDXVA2Manager->IsD3D11()) {
+ mDXVAFailureReason.AppendLiteral("Using D3D11 API");
+ } else {
+ mDXVAFailureReason.AppendLiteral("Using D3D9 API");
+ }
+ }
+
+ return result;
+}
+
+MediaResult WMFVideoMFTManager::InitInternal() {
+ // The H264 SanityTest uses a 132x132 videos to determine if DXVA can be used.
+ // so we want to use the software decoder for videos with lower resolutions.
+ static const int MIN_H264_HW_WIDTH = 132;
+ static const int MIN_H264_HW_HEIGHT = 132;
+
+ mUseHwAccel = false; // default value; changed if D3D setup succeeds.
+ bool useDxva = true;
+
+ if (mStreamType == WMFStreamType::H264 &&
+ (mVideoInfo.ImageRect().width <= MIN_H264_HW_WIDTH ||
+ mVideoInfo.ImageRect().height <= MIN_H264_HW_HEIGHT)) {
+ useDxva = false;
+ mDXVAFailureReason = nsPrintfCString(
+ "H264 video resolution too low: %" PRIu32 "x%" PRIu32,
+ mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height);
+ }
+
+ if (useDxva) {
+ useDxva = InitializeDXVA();
+ }
+
+ RefPtr<MFTDecoder> decoder = new MFTDecoder();
+ HRESULT hr = WMFDecoderModule::CreateMFTDecoder(mStreamType, decoder);
+ NS_ENSURE_TRUE(SUCCEEDED(hr),
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Can't create the MFT decoder.")));
+
+ RefPtr<IMFAttributes> attr(decoder->GetAttributes());
+ UINT32 aware = 0;
+ if (attr) {
+ attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
+ attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
+ WMFDecoderModule::GetNumDecoderThreads());
+ bool lowLatency =
+ (StaticPrefs::media_wmf_low_latency_enabled() || IsWin10OrLater()) &&
+ !StaticPrefs::media_wmf_low_latency_force_disabled();
+ if (mLowLatency || lowLatency) {
+ hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
+ if (SUCCEEDED(hr)) {
+ LOG("Enabling Low Latency Mode");
+ } else {
+ LOG("Couldn't enable Low Latency Mode");
+ }
+ }
+
+ if (gfx::gfxVars::HwDecodedVideoZeroCopy() && mKnowsCompositor &&
+ mKnowsCompositor->UsingHardwareWebRender() && mDXVA2Manager &&
+ mDXVA2Manager->SupportsZeroCopyNV12Texture()) {
+ mZeroCopyNV12Texture = true;
+ const int kOutputBufferSize = 10;
+
+ // Each picture buffer can store a sample, plus one in
+ // pending_output_samples_. The decoder adds this number to the number of
+ // reference pictures it expects to need and uses that to determine the
+ // array size of the output texture.
+ const int kMaxOutputSamples = kOutputBufferSize + 1;
+ attr->SetUINT32(MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT_PROGRESSIVE,
+ kMaxOutputSamples);
+ attr->SetUINT32(MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, kMaxOutputSamples);
+ }
+ }
+
+ if (useDxva) {
+ if (aware) {
+ // TODO: Test if I need this anywhere... Maybe on Vista?
+ // hr = attr->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
+ // NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ MOZ_ASSERT(mDXVA2Manager);
+ ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
+ hr = decoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager);
+ if (SUCCEEDED(hr)) {
+ mUseHwAccel = true;
+ } else {
+ mDXVAFailureReason = nsPrintfCString(
+ "MFT_MESSAGE_SET_D3D_MANAGER failed with code %lX", hr);
+ }
+ } else {
+ mDXVAFailureReason.AssignLiteral(
+ "Decoder returned false for MF_SA_D3D_AWARE");
+ }
+ }
+
+ if (!mDXVAFailureReason.IsEmpty()) {
+ // DXVA failure reason being set can mean that D3D11 failed, or that DXVA is
+ // entirely disabled.
+ LOG("DXVA failure: %s", mDXVAFailureReason.get());
+ }
+
+ if (!mUseHwAccel) {
+ if (mDXVA2Manager) {
+ // Either mDXVAEnabled was set to false prior the second call to
+ // InitInternal() due to CanUseDXVA() returning false, or
+ // MFT_MESSAGE_SET_D3D_MANAGER failed
+ mDXVA2Manager.reset();
+ }
+ if (mStreamType == WMFStreamType::VP9 ||
+ mStreamType == WMFStreamType::VP8 ||
+ mStreamType == WMFStreamType::AV1) {
+ return MediaResult(
+ NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Use VP8/VP9/AV1 MFT only if HW acceleration "
+ "is available."));
+ }
+ Telemetry::Accumulate(Telemetry::MEDIA_DECODER_BACKEND_USED,
+ uint32_t(media::MediaDecoderBackend::WMFSoftware));
+ }
+
+ mDecoder = decoder;
+ hr = SetDecoderMediaTypes();
+ NS_ENSURE_TRUE(
+ SUCCEEDED(hr),
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Fail to set the decoder media types.")));
+
+ RefPtr<IMFMediaType> inputType;
+ hr = mDecoder->GetInputMediaType(inputType);
+ NS_ENSURE_TRUE(
+ SUCCEEDED(hr),
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Fail to get the input media type.")));
+
+ RefPtr<IMFMediaType> outputType;
+ hr = mDecoder->GetOutputMediaType(outputType);
+ NS_ENSURE_TRUE(
+ SUCCEEDED(hr),
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Fail to get the output media type.")));
+
+ if (mUseHwAccel && !CanUseDXVA(inputType, outputType)) {
+ LOG("DXVA manager determined that the input type was unsupported in "
+ "hardware, retrying init without DXVA.");
+ mDXVAEnabled = false;
+ // DXVA initialization with current decoder actually failed,
+ // re-do initialization.
+ return InitInternal();
+ }
+
+ LOG("Video Decoder initialized, Using DXVA: %s",
+ (mUseHwAccel ? "Yes" : "No"));
+
+ if (mUseHwAccel) {
+ hr = mDXVA2Manager->ConfigureForSize(
+ outputType,
+ mColorSpace.refOr(
+ DefaultColorSpace({mImageSize.width, mImageSize.height})),
+ mColorRange, mVideoInfo.ImageRect().width,
+ mVideoInfo.ImageRect().height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr),
+ MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ RESULT_DETAIL("Fail to configure image size for "
+ "DXVA2Manager.")));
+ } else {
+ GetDefaultStride(outputType, mVideoInfo.ImageRect().width, &mVideoStride);
+ }
+ LOG("WMFVideoMFTManager frame geometry stride=%u picture=(%d, %d, %d, %d) "
+ "display=(%d,%d)",
+ mVideoStride, mVideoInfo.ImageRect().x, mVideoInfo.ImageRect().y,
+ mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height,
+ mVideoInfo.mDisplay.width, mVideoInfo.mDisplay.height);
+
+ if (!mUseHwAccel) {
+ RefPtr<ID3D11Device> device = gfx::DeviceManagerDx::Get()->GetImageDevice();
+ if (device) {
+ mIMFUsable = true;
+ }
+ }
+ return MediaResult(NS_OK);
+}
+
+HRESULT
+WMFVideoMFTManager::SetDecoderMediaTypes() {
+ // Setup the input/output media types.
+ RefPtr<IMFMediaType> inputType;
+ HRESULT hr = wmf::MFCreateMediaType(getter_AddRefs(inputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID());
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE,
+ MFVideoInterlace_MixedInterlaceOrProgressive);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE,
+ mVideoInfo.ImageRect().width,
+ mVideoInfo.ImageRect().height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ UINT32 fpsDenominator = 1000;
+ UINT32 fpsNumerator = static_cast<uint32_t>(mFramerate * fpsDenominator);
+ hr = MFSetAttributeRatio(inputType, MF_MT_FRAME_RATE, fpsNumerator,
+ fpsDenominator);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ RefPtr<IMFMediaType> outputType;
+ hr = wmf::MFCreateMediaType(getter_AddRefs(outputType));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = MFSetAttributeSize(outputType, MF_MT_FRAME_SIZE,
+ mVideoInfo.ImageRect().width,
+ mVideoInfo.ImageRect().height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = MFSetAttributeRatio(outputType, MF_MT_FRAME_RATE, fpsNumerator,
+ fpsDenominator);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ GUID outputSubType = [&]() {
+ switch (mVideoInfo.mColorDepth) {
+ case gfx::ColorDepth::COLOR_8:
+ return mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
+ case gfx::ColorDepth::COLOR_10:
+ return MFVideoFormat_P010;
+ case gfx::ColorDepth::COLOR_12:
+ case gfx::ColorDepth::COLOR_16:
+ return MFVideoFormat_P016;
+ default:
+ MOZ_ASSERT_UNREACHABLE("Unexpected color depth");
+ }
+ }();
+ hr = outputType->SetGUID(MF_MT_SUBTYPE, outputSubType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (mZeroCopyNV12Texture) {
+ RefPtr<IMFAttributes> attr(mDecoder->GetOutputStreamAttributes());
+ if (attr) {
+ hr = attr->SetUINT32(MF_SA_D3D11_SHARED_WITHOUT_MUTEX, TRUE);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ hr = attr->SetUINT32(MF_SA_D3D11_BINDFLAGS,
+ D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_DECODER);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+ }
+
+ return mDecoder->SetMediaTypes(inputType, outputType);
+}
+
+HRESULT
+WMFVideoMFTManager::Input(MediaRawData* aSample) {
+ if (!mIsValid) {
+ return E_FAIL;
+ }
+
+ if (!mDecoder) {
+ // This can happen during shutdown.
+ return E_FAIL;
+ }
+
+ mTrackingId.apply([&](const auto& aId) {
+ MediaInfoFlag flag = MediaInfoFlag::None;
+ flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame
+ : MediaInfoFlag::NonKeyFrame);
+ flag |= (mUseHwAccel ? MediaInfoFlag::HardwareDecoding
+ : MediaInfoFlag::SoftwareDecoding);
+ switch (mStreamType) {
+ case WMFStreamType::H264:
+ flag |= MediaInfoFlag::VIDEO_H264;
+ break;
+ case WMFStreamType::VP8:
+ flag |= MediaInfoFlag::VIDEO_VP8;
+ break;
+ case WMFStreamType::VP9:
+ flag |= MediaInfoFlag::VIDEO_VP9;
+ break;
+ case WMFStreamType::AV1:
+ flag |= MediaInfoFlag::VIDEO_AV1;
+ break;
+ default:
+ break;
+ };
+ mPerformanceRecorder.Start(aSample->mTime.ToMicroseconds(),
+ "WMFVideoDecoder"_ns, aId, flag);
+ });
+
+ RefPtr<IMFSample> inputSample;
+ HRESULT hr = mDecoder->CreateInputSample(
+ aSample->Data(), uint32_t(aSample->Size()),
+ aSample->mTime.ToMicroseconds(), aSample->mDuration.ToMicroseconds(),
+ &inputSample);
+ NS_ENSURE_TRUE(SUCCEEDED(hr) && inputSample != nullptr, hr);
+
+ if (!mColorSpace && aSample->mTrackInfo) {
+ // The colorspace definition is found in the H264 SPS NAL, available out of
+ // band, while for VP9 it's only available within the VP9 bytestream.
+ // The info would have been updated by the MediaChangeMonitor.
+ mColorSpace = aSample->mTrackInfo->GetAsVideoInfo()->mColorSpace;
+ mColorRange = aSample->mTrackInfo->GetAsVideoInfo()->mColorRange;
+ }
+ mLastDuration = aSample->mDuration;
+
+ // Forward sample data to the decoder.
+ return mDecoder->Input(inputSample);
+}
+
+// The MFTransforms we use for decoding H264 and AV1 video will silently fall
+// back to software decoding (even if we've negotiated DXVA) if the GPU
+// doesn't support decoding the given codec and resolution. It will then upload
+// the software decoded frames into d3d textures to preserve behaviour.
+//
+// Unfortunately this seems to cause corruption (see bug 1193547) and is
+// slow because the upload is done into a non-shareable texture and requires
+// us to copy it.
+//
+// This code tests if the given codec and resolution can be supported directly
+// on the GPU, and makes sure we only ask the MFT for DXVA if it can be
+// supported properly.
+//
+// Ideally we'd know the framerate during initialization and would also ensure
+// that new decoders are created if the resolution changes. Then we could move
+// this check into Init and consolidate the main thread blocking code.
+bool WMFVideoMFTManager::CanUseDXVA(IMFMediaType* aInputType,
+ IMFMediaType* aOutputType) {
+ MOZ_ASSERT(mDXVA2Manager);
+ // Check if we're able to use hardware decoding for the current codec config.
+ return mDXVA2Manager->SupportsConfig(mVideoInfo, aInputType, aOutputType);
+}
+
+TimeUnit WMFVideoMFTManager::GetSampleDurationOrLastKnownDuration(
+ IMFSample* aSample) const {
+ TimeUnit duration = GetSampleDuration(aSample);
+ if (!duration.IsValid()) {
+ // WMF returned a non-success code (likely duration unknown, but the API
+ // also allows for other, unspecified codes).
+ LOG("Got unknown sample duration -- bad return code. Using mLastDuration.");
+ } else if (duration == TimeUnit::Zero()) {
+ // Duration is zero. WMF uses this to indicate an unknown duration.
+ LOG("Got unknown sample duration -- zero duration returned. Using "
+ "mLastDuration.");
+ } else if (duration.IsNegative()) {
+ // A negative duration will cause issues up the stack. It's also unclear
+ // why this would happen, but the API allows for it by returning a signed
+ // int, so we handle it here.
+ LOG("Got negative sample duration: %f seconds. Using mLastDuration "
+ "instead.",
+ duration.ToSeconds());
+ } else {
+ // We got a duration without any problems.
+ return duration;
+ }
+
+ return mLastDuration;
+}
+
+HRESULT
+WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
+ int64_t aStreamOffset,
+ VideoData** aOutVideoData) {
+ NS_ENSURE_TRUE(aSample, E_POINTER);
+ NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
+
+ *aOutVideoData = nullptr;
+
+ HRESULT hr;
+ RefPtr<IMFMediaBuffer> buffer;
+
+ // Must convert to contiguous buffer to use IMD2DBuffer interface.
+ hr = aSample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ // Try and use the IMF2DBuffer interface if available, otherwise fallback
+ // to the IMFMediaBuffer interface. Apparently IMF2DBuffer is more efficient,
+ // but only some systems (Windows 8?) support it.
+ BYTE* data = nullptr;
+ LONG stride = 0;
+ RefPtr<IMF2DBuffer> twoDBuffer;
+ hr = buffer->QueryInterface(
+ static_cast<IMF2DBuffer**>(getter_AddRefs(twoDBuffer)));
+ if (SUCCEEDED(hr)) {
+ hr = twoDBuffer->Lock2D(&data, &stride);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ } else {
+ hr = buffer->Lock(&data, nullptr, nullptr);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ stride = mVideoStride;
+ }
+
+ const GUID& subType = mDecoder->GetOutputMediaSubType();
+ MOZ_DIAGNOSTIC_ASSERT(subType == MFVideoFormat_YV12 ||
+ subType == MFVideoFormat_P010 ||
+ subType == MFVideoFormat_P016);
+ const gfx::ColorDepth colorDepth = subType == MFVideoFormat_YV12
+ ? gfx::ColorDepth::COLOR_8
+ : gfx::ColorDepth::COLOR_16;
+
+ // YV12, planar format (3 planes): [YYYY....][VVVV....][UUUU....]
+ // i.e., Y, then V, then U.
+ // P010, P016 planar format (2 planes) [YYYY....][UVUV...]
+ // See
+ // https://docs.microsoft.com/en-us/windows/desktop/medfound/10-bit-and-16-bit-yuv-video-formats
+ VideoData::YCbCrBuffer b;
+
+ const uint32_t videoWidth = mSoftwareImageSize.width;
+ const uint32_t videoHeight = mSoftwareImageSize.height;
+
+ // Y (Y') plane
+ b.mPlanes[0].mData = data;
+ b.mPlanes[0].mStride = stride;
+ b.mPlanes[0].mHeight = videoHeight;
+ b.mPlanes[0].mWidth = videoWidth;
+ b.mPlanes[0].mSkip = 0;
+
+ MOZ_DIAGNOSTIC_ASSERT(mSoftwareImageSize.height % 16 == 0,
+ "decoded height must be 16 bytes aligned");
+ const uint32_t y_size = stride * mSoftwareImageSize.height;
+ const uint32_t v_size = stride * mSoftwareImageSize.height / 4;
+ const uint32_t halfStride = (stride + 1) / 2;
+ const uint32_t halfHeight = (videoHeight + 1) / 2;
+ const uint32_t halfWidth = (videoWidth + 1) / 2;
+
+ if (subType == MFVideoFormat_YV12) {
+ // U plane (Cb)
+ b.mPlanes[1].mData = data + y_size + v_size;
+ b.mPlanes[1].mStride = halfStride;
+ b.mPlanes[1].mHeight = halfHeight;
+ b.mPlanes[1].mWidth = halfWidth;
+ b.mPlanes[1].mSkip = 0;
+
+ // V plane (Cr)
+ b.mPlanes[2].mData = data + y_size;
+ b.mPlanes[2].mStride = halfStride;
+ b.mPlanes[2].mHeight = halfHeight;
+ b.mPlanes[2].mWidth = halfWidth;
+ b.mPlanes[2].mSkip = 0;
+ } else {
+ // U plane (Cb)
+ b.mPlanes[1].mData = data + y_size;
+ b.mPlanes[1].mStride = stride;
+ b.mPlanes[1].mHeight = halfHeight;
+ b.mPlanes[1].mWidth = halfWidth;
+ b.mPlanes[1].mSkip = 1;
+
+ // V plane (Cr)
+ b.mPlanes[2].mData = data + y_size + sizeof(short);
+ b.mPlanes[2].mStride = stride;
+ b.mPlanes[2].mHeight = halfHeight;
+ b.mPlanes[2].mWidth = halfWidth;
+ b.mPlanes[2].mSkip = 1;
+ }
+
+ b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
+
+ // YuvColorSpace
+ b.mYUVColorSpace =
+ mColorSpace.refOr(DefaultColorSpace({videoWidth, videoHeight}));
+ b.mColorDepth = colorDepth;
+ b.mColorRange = mColorRange;
+
+ TimeUnit pts = GetSampleTime(aSample);
+ NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
+ TimeUnit duration = GetSampleDurationOrLastKnownDuration(aSample);
+ NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
+ gfx::IntRect pictureRegion = mVideoInfo.ScaledImageRect(
+ mSoftwarePictureSize.width, mSoftwarePictureSize.height);
+
+ if (colorDepth != gfx::ColorDepth::COLOR_8 || !mKnowsCompositor ||
+ !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) {
+ RefPtr<VideoData> v = VideoData::CreateAndCopyData(
+ mVideoInfo, mImageContainer, aStreamOffset, pts, duration, b, false,
+ TimeUnit::FromMicroseconds(-1), pictureRegion, mKnowsCompositor);
+ if (twoDBuffer) {
+ twoDBuffer->Unlock2D();
+ } else {
+ buffer->Unlock();
+ }
+ v.forget(aOutVideoData);
+ return S_OK;
+ }
+
+ RefPtr<layers::PlanarYCbCrImage> image =
+ new IMFYCbCrImage(buffer, twoDBuffer, mKnowsCompositor, mImageContainer);
+
+ VideoData::SetVideoDataToImage(image, mVideoInfo, b, pictureRegion, false);
+
+ RefPtr<VideoData> v = VideoData::CreateFromImage(
+ mVideoInfo.mDisplay, aStreamOffset, pts, duration, image.forget(), false,
+ TimeUnit::FromMicroseconds(-1));
+
+ mPerformanceRecorder.Record(pts.ToMicroseconds(), [&](DecodeStage& aStage) {
+ aStage.SetColorDepth(b.mColorDepth);
+ aStage.SetColorRange(b.mColorRange);
+ aStage.SetYUVColorSpace(b.mYUVColorSpace);
+ if (subType == MFVideoFormat_NV12) {
+ aStage.SetImageFormat(DecodeStage::NV12);
+ } else if (subType == MFVideoFormat_YV12) {
+ aStage.SetImageFormat(DecodeStage::YV12);
+ } else if (subType == MFVideoFormat_P010) {
+ aStage.SetImageFormat(DecodeStage::P010);
+ } else if (subType == MFVideoFormat_P016) {
+ aStage.SetImageFormat(DecodeStage::P016);
+ }
+ aStage.SetResolution(videoWidth, videoHeight);
+ });
+
+ v.forget(aOutVideoData);
+ return S_OK;
+}
+
+HRESULT
+WMFVideoMFTManager::CreateD3DVideoFrame(IMFSample* aSample,
+ int64_t aStreamOffset,
+ VideoData** aOutVideoData) {
+ NS_ENSURE_TRUE(aSample, E_POINTER);
+ NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
+ NS_ENSURE_TRUE(mDXVA2Manager, E_ABORT);
+ NS_ENSURE_TRUE(mUseHwAccel, E_ABORT);
+
+ *aOutVideoData = nullptr;
+ HRESULT hr;
+
+ gfx::IntRect pictureRegion =
+ mVideoInfo.ScaledImageRect(mImageSize.width, mImageSize.height);
+ RefPtr<Image> image;
+ if (mZeroCopyNV12Texture && mDXVA2Manager->SupportsZeroCopyNV12Texture()) {
+ hr = mDXVA2Manager->WrapTextureWithImage(aSample, pictureRegion,
+ getter_AddRefs(image));
+ } else {
+ hr = mDXVA2Manager->CopyToImage(aSample, pictureRegion,
+ getter_AddRefs(image));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ }
+ NS_ENSURE_TRUE(image, E_FAIL);
+
+ gfx::IntSize size = image->GetSize();
+
+ TimeUnit pts = GetSampleTime(aSample);
+ NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
+ TimeUnit duration = GetSampleDurationOrLastKnownDuration(aSample);
+ NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
+ RefPtr<VideoData> v = VideoData::CreateFromImage(
+ mVideoInfo.mDisplay, aStreamOffset, pts, duration, image.forget(), false,
+ TimeUnit::FromMicroseconds(-1));
+
+ NS_ENSURE_TRUE(v, E_FAIL);
+ v.forget(aOutVideoData);
+
+ mPerformanceRecorder.Record(pts.ToMicroseconds(), [&](DecodeStage& aStage) {
+ aStage.SetColorDepth(mVideoInfo.mColorDepth);
+ aStage.SetColorRange(mColorRange);
+ aStage.SetYUVColorSpace(mColorSpace.refOr(
+ DefaultColorSpace({mImageSize.width, mImageSize.height})));
+ const GUID& subType = mDecoder->GetOutputMediaSubType();
+ if (subType == MFVideoFormat_NV12) {
+ aStage.SetImageFormat(DecodeStage::NV12);
+ } else if (subType == MFVideoFormat_YV12) {
+ aStage.SetImageFormat(DecodeStage::YV12);
+ } else if (subType == MFVideoFormat_P010) {
+ aStage.SetImageFormat(DecodeStage::P010);
+ } else if (subType == MFVideoFormat_P016) {
+ aStage.SetImageFormat(DecodeStage::P016);
+ }
+ aStage.SetResolution(size.width, size.height);
+ });
+
+ return S_OK;
+}
+
+// Blocks until decoded sample is produced by the decoder.
+HRESULT
+WMFVideoMFTManager::Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutData) {
+ RefPtr<IMFSample> sample;
+ HRESULT hr;
+ aOutData = nullptr;
+ int typeChangeCount = 0;
+
+ // Loop until we decode a sample, or an unexpected error that we can't
+ // handle occurs.
+ while (true) {
+ hr = mDecoder->Output(&sample);
+ if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
+ return MF_E_TRANSFORM_NEED_MORE_INPUT;
+ }
+
+ if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
+ MOZ_ASSERT(!sample);
+ // Video stream output type change, probably geometric aperture change or
+ // pixel type.
+ // We must reconfigure the decoder output type.
+
+ // Attempt to find an appropriate OutputType, trying in order:
+ // if HW accelerated: NV12, P010, P016
+ // if SW: YV12, P010, P016
+ if (FAILED(
+ (hr = (mDecoder->FindDecoderOutputTypeWithSubtype(
+ mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12)))) &&
+ FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
+ MFVideoFormat_P010))) &&
+ FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
+ MFVideoFormat_P016)))) {
+ LOG("No suitable output format found");
+ return hr;
+ }
+
+ RefPtr<IMFMediaType> outputType;
+ hr = mDecoder->GetOutputMediaType(outputType);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ if (mUseHwAccel) {
+ hr = mDXVA2Manager->ConfigureForSize(
+ outputType,
+ mColorSpace.refOr(
+ DefaultColorSpace({mImageSize.width, mImageSize.height})),
+ mColorRange, mVideoInfo.ImageRect().width,
+ mVideoInfo.ImageRect().height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ } else {
+ // The stride may have changed, recheck for it.
+ hr = GetDefaultStride(outputType, mVideoInfo.ImageRect().width,
+ &mVideoStride);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+
+ UINT32 width = 0, height = 0;
+ hr = MFGetAttributeSize(outputType, MF_MT_FRAME_SIZE, &width, &height);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL);
+ NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL);
+ mSoftwareImageSize = gfx::IntSize(width, height);
+
+ gfx::IntRect picture;
+ hr = GetPictureRegion(outputType, picture);
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ MOZ_ASSERT(picture.width != 0 && picture.height != 0);
+ mSoftwarePictureSize = gfx::IntSize(picture.width, picture.height);
+ LOG("Output stream change, image size=[%ux%u], picture=[%u,%u]",
+ mSoftwareImageSize.width, mSoftwareImageSize.height,
+ mSoftwarePictureSize.width, mSoftwarePictureSize.height);
+ }
+ // Catch infinite loops, but some decoders perform at least 2 stream
+ // changes on consecutive calls, so be permissive.
+ // 100 is arbitrarily > 2.
+ NS_ENSURE_TRUE(typeChangeCount < 100, MF_E_TRANSFORM_STREAM_CHANGE);
+ // Loop back and try decoding again...
+ ++typeChangeCount;
+ continue;
+ }
+
+ if (SUCCEEDED(hr)) {
+ if (!sample) {
+ LOG("Video MFTDecoder returned success but no output!");
+ // On some machines/input the MFT returns success but doesn't output
+ // a video frame. If we detect this, try again, but only up to a
+ // point; after 250 failures, give up. Note we count all failures
+ // over the life of the decoder, as we may end up exiting with a
+ // NEED_MORE_INPUT and coming back to hit the same error. So just
+ // counting with a local variable (like typeChangeCount does) may
+ // not work in this situation.
+ ++mNullOutputCount;
+ if (mNullOutputCount > 250) {
+ LOG("Excessive Video MFTDecoder returning success but no output; "
+ "giving up");
+ mGotExcessiveNullOutput = true;
+ return E_FAIL;
+ }
+ continue;
+ }
+ TimeUnit pts = GetSampleTime(sample);
+ TimeUnit duration = GetSampleDurationOrLastKnownDuration(sample);
+
+ // AV1 MFT fix: Sample duration after seeking is always equal to the
+ // sample time, for some reason. Set it to last duration instead.
+ if (mStreamType == WMFStreamType::AV1 && duration == pts) {
+ LOG("Video sample duration (%" PRId64 ") matched timestamp (%" PRId64
+ "), setting to previous sample duration (%" PRId64 ") instead.",
+ pts.ToMicroseconds(), duration.ToMicroseconds(),
+ mLastDuration.ToMicroseconds());
+ duration = mLastDuration;
+ sample->SetSampleDuration(UsecsToHNs(duration.ToMicroseconds()));
+ }
+
+ if (!pts.IsValid() || !duration.IsValid()) {
+ return E_FAIL;
+ }
+ if (mSeekTargetThreshold.isSome()) {
+ if ((pts + duration) < mSeekTargetThreshold.ref()) {
+ LOG("Dropping video frame which pts (%" PRId64 " + %" PRId64
+ ") is smaller than seek target (%" PRId64 ").",
+ pts.ToMicroseconds(), duration.ToMicroseconds(),
+ mSeekTargetThreshold->ToMicroseconds());
+ // It is necessary to clear the pointer to release the previous output
+ // buffer.
+ sample = nullptr;
+ continue;
+ }
+ mSeekTargetThreshold.reset();
+ }
+ break;
+ }
+ // Else unexpected error so bail.
+ NS_WARNING("WMFVideoMFTManager::Output() unexpected error");
+ return hr;
+ }
+
+ RefPtr<VideoData> frame;
+ if (mUseHwAccel) {
+ hr = CreateD3DVideoFrame(sample, aStreamOffset, getter_AddRefs(frame));
+ } else {
+ hr = CreateBasicVideoFrame(sample, aStreamOffset, getter_AddRefs(frame));
+ }
+ // Frame should be non null only when we succeeded.
+ MOZ_ASSERT((frame != nullptr) == SUCCEEDED(hr));
+ NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
+ NS_ENSURE_TRUE(frame, E_FAIL);
+
+ aOutData = frame;
+
+ if (mNullOutputCount) {
+ mGotValidOutputAfterNullOutput = true;
+ }
+
+ return S_OK;
+}
+
+void WMFVideoMFTManager::Flush() {
+ MFTManager::Flush();
+ mPerformanceRecorder.Record(std::numeric_limits<int64_t>::max());
+}
+
+void WMFVideoMFTManager::Shutdown() {
+ if (mDXVA2Manager) {
+ mDXVA2Manager->BeforeShutdownVideoMFTDecoder();
+ }
+ mDecoder = nullptr;
+ mDXVA2Manager.reset();
+}
+
+bool WMFVideoMFTManager::IsHardwareAccelerated(
+ nsACString& aFailureReason) const {
+ aFailureReason = mDXVAFailureReason;
+ return mDecoder && mUseHwAccel;
+}
+
+nsCString WMFVideoMFTManager::GetDescriptionName() const {
+ nsCString failureReason;
+ bool hw = IsHardwareAccelerated(failureReason);
+
+ const char* formatName = [&]() {
+ if (!mDecoder) {
+ return "not initialized";
+ }
+ GUID format = mDecoder->GetOutputMediaSubType();
+ if (format == MFVideoFormat_NV12) {
+ if (!gfx::DeviceManagerDx::Get()->CanUseNV12()) {
+ return "nv12->argb32";
+ }
+ return "nv12";
+ }
+ if (format == MFVideoFormat_P010) {
+ if (!gfx::DeviceManagerDx::Get()->CanUseP010()) {
+ return "p010->argb32";
+ }
+ return "p010";
+ }
+ if (format == MFVideoFormat_P016) {
+ if (!gfx::DeviceManagerDx::Get()->CanUseP016()) {
+ return "p016->argb32";
+ }
+ return "p016";
+ }
+ if (format == MFVideoFormat_YV12) {
+ return "yv12";
+ }
+ return "unknown";
+ }();
+
+ const char* dxvaName = [&]() {
+ if (!mDXVA2Manager) {
+ return "no DXVA";
+ }
+ if (mDXVA2Manager->IsD3D11()) {
+ return "D3D11";
+ }
+ return "D3D9";
+ }();
+
+ return nsPrintfCString("wmf %s codec %s video decoder - %s, %s",
+ StreamTypeToString(mStreamType),
+ hw ? "hardware" : "software", dxvaName, formatName);
+}
+nsCString WMFVideoMFTManager::GetCodecName() const {
+ switch (mStreamType) {
+ case WMFStreamType::H264:
+ return "h264"_ns;
+ case WMFStreamType::VP8:
+ return "vp8"_ns;
+ case WMFStreamType::VP9:
+ return "vp9"_ns;
+ case WMFStreamType::AV1:
+ return "av1"_ns;
+ default:
+ return "unknown"_ns;
+ };
+}
+
+} // namespace mozilla
diff --git a/dom/media/platforms/wmf/WMFVideoMFTManager.h b/dom/media/platforms/wmf/WMFVideoMFTManager.h
new file mode 100644
index 0000000000..4982acadab
--- /dev/null
+++ b/dom/media/platforms/wmf/WMFVideoMFTManager.h
@@ -0,0 +1,132 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#if !defined(WMFVideoMFTManager_h_)
+# define WMFVideoMFTManager_h_
+
+# include "MFTDecoder.h"
+# include "MediaResult.h"
+# include "PerformanceRecorder.h"
+# include "WMF.h"
+# include "WMFDecoderModule.h"
+# include "WMFMediaDataDecoder.h"
+# include "mozilla/Atomics.h"
+# include "mozilla/RefPtr.h"
+# include "mozilla/gfx/Rect.h"
+
+namespace mozilla {
+
+class DXVA2Manager;
+
+class WMFVideoMFTManager : public MFTManager {
+ public:
+ WMFVideoMFTManager(const VideoInfo& aConfig,
+ layers::KnowsCompositor* aKnowsCompositor,
+ layers::ImageContainer* aImageContainer, float aFramerate,
+ const CreateDecoderParams::OptionSet& aOptions,
+ bool aDXVAEnabled, Maybe<TrackingId> aTrackingId);
+ ~WMFVideoMFTManager();
+
+ MediaResult Init();
+
+ HRESULT Input(MediaRawData* aSample) override;
+
+ HRESULT Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutput) override;
+
+ void Flush() override;
+
+ void Shutdown() override;
+
+ bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
+
+ TrackInfo::TrackType GetType() override { return TrackInfo::kVideoTrack; }
+
+ nsCString GetDescriptionName() const override;
+
+ nsCString GetCodecName() const override;
+
+ MediaDataDecoder::ConversionRequired NeedsConversion() const override {
+ return mStreamType == WMFStreamType::H264
+ ? MediaDataDecoder::ConversionRequired::kNeedAnnexB
+ : MediaDataDecoder::ConversionRequired::kNeedNone;
+ }
+
+ private:
+ MediaResult ValidateVideoInfo();
+
+ bool InitializeDXVA();
+
+ MediaResult InitInternal();
+
+ HRESULT CreateBasicVideoFrame(IMFSample* aSample, int64_t aStreamOffset,
+ VideoData** aOutVideoData);
+
+ HRESULT CreateD3DVideoFrame(IMFSample* aSample, int64_t aStreamOffset,
+ VideoData** aOutVideoData);
+
+ HRESULT SetDecoderMediaTypes();
+
+ bool CanUseDXVA(IMFMediaType* aInputType, IMFMediaType* aOutputType);
+
+ // Gets the duration from aSample, and if an unknown or invalid duration is
+ // returned from WMF, this instead returns the last known input duration.
+ // The sample duration is unknown per `IMFSample::GetSampleDuration` docs
+ // 'If the retrieved duration is zero, or if the method returns
+ // MF_E_NO_SAMPLE_DURATION, the duration is unknown'. The same API also
+ // suggests it may return other unspecified error codes, so we handle those
+ // too. It also returns a signed int, but since a negative duration doesn't
+ // make sense, we also handle that case.
+ media::TimeUnit GetSampleDurationOrLastKnownDuration(
+ IMFSample* aSample) const;
+
+ // Video frame geometry.
+ const VideoInfo mVideoInfo;
+ const gfx::IntSize mImageSize;
+ const WMFStreamType mStreamType;
+
+ // The size we update from the IMFMediaType which might include paddings when
+ // the stream format changes. This is only used for software decoding.
+ gfx::IntSize mSoftwareImageSize;
+
+ // The picture size we update from the IMFMediaType when the stream format
+ // changes. We assume it's equal to the image size by default (no cropping).
+ // This is only used for software decoding.
+ gfx::IntSize mSoftwarePictureSize;
+
+ uint32_t mVideoStride;
+ Maybe<gfx::YUVColorSpace> mColorSpace;
+ gfx::ColorRange mColorRange;
+
+ RefPtr<layers::ImageContainer> mImageContainer;
+ RefPtr<layers::KnowsCompositor> mKnowsCompositor;
+ UniquePtr<DXVA2Manager> mDXVA2Manager;
+
+ media::TimeUnit mLastDuration;
+
+ bool mDXVAEnabled;
+ bool mUseHwAccel;
+
+ bool mZeroCopyNV12Texture;
+
+ nsCString mDXVAFailureReason;
+
+ const GUID& GetMediaSubtypeGUID();
+
+ uint32_t mNullOutputCount = 0;
+ bool mGotValidOutputAfterNullOutput = false;
+ bool mGotExcessiveNullOutput = false;
+ bool mIsValid = true;
+ bool mIMFUsable = false;
+ const float mFramerate;
+ const bool mLowLatency;
+
+ PerformanceRecorderMulti<DecodeStage> mPerformanceRecorder;
+ const Maybe<TrackingId> mTrackingId;
+};
+
+} // namespace mozilla
+
+#endif // WMFVideoMFTManager_h_
diff --git a/dom/media/platforms/wmf/gtest/TestCanCreateMFTDecoder.cpp b/dom/media/platforms/wmf/gtest/TestCanCreateMFTDecoder.cpp
new file mode 100644
index 0000000000..fc11e89dfd
--- /dev/null
+++ b/dom/media/platforms/wmf/gtest/TestCanCreateMFTDecoder.cpp
@@ -0,0 +1,21 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include <gtest/gtest.h>
+
+#include "WMFDecoderModule.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/Preferences.h"
+
+using namespace mozilla;
+
+TEST(CanCreateMFTDecoder, NoIPC)
+{
+ const auto ffvpxMP3Pref = StaticPrefs::GetPrefName_media_ffvpx_mp3_enabled();
+ const bool ffvpxMP3WasOn = Preferences::GetBool(ffvpxMP3Pref);
+ Preferences::SetBool(ffvpxMP3Pref, false);
+ EXPECT_TRUE(WMFDecoderModule::CanCreateMFTDecoder(WMFStreamType::MP3));
+ Preferences::SetBool(ffvpxMP3Pref, ffvpxMP3WasOn);
+}
diff --git a/dom/media/platforms/wmf/gtest/moz.build b/dom/media/platforms/wmf/gtest/moz.build
new file mode 100644
index 0000000000..ccd056ecf1
--- /dev/null
+++ b/dom/media/platforms/wmf/gtest/moz.build
@@ -0,0 +1,15 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+UNIFIED_SOURCES += [
+ "TestCanCreateMFTDecoder.cpp",
+]
+
+LOCAL_INCLUDES += [
+ "/dom/media/platforms/wmf/gtest",
+]
+
+FINAL_LIBRARY = "xul-gtest"
diff --git a/dom/media/platforms/wmf/moz.build b/dom/media/platforms/wmf/moz.build
new file mode 100644
index 0000000000..9e0f3aa94a
--- /dev/null
+++ b/dom/media/platforms/wmf/moz.build
@@ -0,0 +1,85 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+EXPORTS += [
+ "DXVA2Manager.h",
+ "MFTDecoder.h",
+ "WMF.h",
+ "WMFAudioMFTManager.h",
+ "WMFDataEncoderUtils.h",
+ "WMFDecoderModule.h",
+ "WMFEncoderModule.h",
+ "WMFMediaDataDecoder.h",
+ "WMFMediaDataEncoder.h",
+ "WMFUtils.h",
+ "WMFVideoMFTManager.h",
+]
+
+if CONFIG["MOZ_WMF_MEDIA_ENGINE"]:
+ EXPORTS += [
+ "MFMediaEngineAudioStream.h",
+ "MFMediaEngineDecoderModule.h",
+ "MFMediaEngineExtra.h",
+ "MFMediaEngineStream.h",
+ "MFMediaEngineVideoStream.h",
+ "MFMediaSource.h",
+ ]
+ UNIFIED_SOURCES += [
+ "MFMediaEngineAudioStream.cpp",
+ "MFMediaEngineDecoderModule.cpp",
+ "MFMediaEngineExtension.cpp",
+ "MFMediaEngineNotify.cpp",
+ "MFMediaEngineStream.cpp",
+ "MFMediaEngineVideoStream.cpp",
+ "MFMediaSource.cpp",
+ ]
+
+if CONFIG["MOZ_WMF_CDM"]:
+ EXPORTS += [
+ "MFCDMExtra.h",
+ "MFCDMProxy.h",
+ "MFCDMSession.h",
+ "MFContentProtectionManager.h",
+ "MFPMPHostWrapper.h",
+ ]
+ UNIFIED_SOURCES += [
+ "MFCDMProxy.cpp",
+ "MFCDMSession.cpp",
+ "MFContentProtectionManager.cpp",
+ "MFPMPHostWrapper.cpp",
+ ]
+
+UNIFIED_SOURCES += [
+ "DXVA2Manager.cpp",
+ "MFTDecoder.cpp",
+ "MFTEncoder.cpp",
+ "WMFAudioMFTManager.cpp",
+ "WMFDecoderModule.cpp",
+ "WMFEncoderModule.cpp",
+ "WMFMediaDataDecoder.cpp",
+ "WMFVideoMFTManager.cpp",
+]
+
+SOURCES += [
+ "WMFUtils.cpp",
+]
+
+LOCAL_INCLUDES += [
+ "../../ipc/",
+ "/gfx/cairo/cairo/src",
+ "/media/libyuv/libyuv/include",
+]
+
+TEST_DIRS += [
+ "gtest",
+]
+
+include("/ipc/chromium/chromium-config.mozbuild")
+
+FINAL_LIBRARY = "xul"
+
+# Add libFuzzer configuration directives
+include("/tools/fuzzing/libfuzzer-config.mozbuild")