/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* vim:set ts=2 sw=2 sts=2 et cindent: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include "mozilla/dom/VideoFrame.h" #include "mozilla/dom/VideoFrameBinding.h" #include #include #include #include "ImageContainer.h" #include "VideoColorSpace.h" #include "js/StructuredClone.h" #include "mozilla/Maybe.h" #include "mozilla/Result.h" #include "mozilla/ResultVariant.h" #include "mozilla/ScopeExit.h" #include "mozilla/UniquePtr.h" #include "mozilla/dom/CanvasUtils.h" #include "mozilla/dom/DOMRect.h" #include "mozilla/dom/HTMLCanvasElement.h" #include "mozilla/dom/HTMLImageElement.h" #include "mozilla/dom/HTMLVideoElement.h" #include "mozilla/dom/ImageBitmap.h" #include "mozilla/dom/ImageUtils.h" #include "mozilla/dom/OffscreenCanvas.h" #include "mozilla/dom/Promise.h" #include "mozilla/dom/SVGImageElement.h" #include "mozilla/dom/StructuredCloneHolder.h" #include "mozilla/dom/StructuredCloneTags.h" #include "mozilla/dom/UnionTypes.h" #include "mozilla/gfx/2D.h" #include "mozilla/gfx/Swizzle.h" #include "nsLayoutUtils.h" #include "nsIPrincipal.h" #include "nsIURI.h" namespace mozilla::dom { // Only needed for refcounted objects. NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE(VideoFrame, mParent) NS_IMPL_CYCLE_COLLECTING_ADDREF(VideoFrame) NS_IMPL_CYCLE_COLLECTING_RELEASE(VideoFrame) NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(VideoFrame) NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY NS_INTERFACE_MAP_ENTRY(nsISupports) NS_INTERFACE_MAP_END /* * The below are helpers to operate ArrayBuffer or ArrayBufferView. */ template static Result, nsresult> GetArrayBufferData(const T& aBuffer) { // Get buffer's data and length before using it. aBuffer.ComputeState(); CheckedInt byteLength(sizeof(typename T::element_type)); byteLength *= aBuffer.Length(); if (!byteLength.isValid()) { return Err(NS_ERROR_INVALID_ARG); } return Span(aBuffer.Data(), byteLength.value()); } static Result, nsresult> GetSharedArrayBufferData( const MaybeSharedArrayBufferViewOrMaybeSharedArrayBuffer& aBuffer) { if (aBuffer.IsArrayBufferView()) { return GetArrayBufferData(aBuffer.GetAsArrayBufferView()); } MOZ_ASSERT(aBuffer.IsArrayBuffer()); return GetArrayBufferData(aBuffer.GetAsArrayBuffer()); } /* * The following are utilities to convert between VideoColorSpace values to * gfx's values. */ static gfx::YUVColorSpace ToColorSpace(VideoMatrixCoefficients aMatrix) { switch (aMatrix) { case VideoMatrixCoefficients::Rgb: return gfx::YUVColorSpace::Identity; case VideoMatrixCoefficients::Bt709: case VideoMatrixCoefficients::Bt470bg: return gfx::YUVColorSpace::BT709; case VideoMatrixCoefficients::Smpte170m: return gfx::YUVColorSpace::BT601; case VideoMatrixCoefficients::Bt2020_ncl: return gfx::YUVColorSpace::BT2020; case VideoMatrixCoefficients::EndGuard_: break; } MOZ_ASSERT_UNREACHABLE("unsupported VideoMatrixCoefficients"); return gfx::YUVColorSpace::Default; } static gfx::TransferFunction ToTransferFunction( VideoTransferCharacteristics aTransfer) { switch (aTransfer) { case VideoTransferCharacteristics::Bt709: case VideoTransferCharacteristics::Smpte170m: return gfx::TransferFunction::BT709; case VideoTransferCharacteristics::Iec61966_2_1: return gfx::TransferFunction::SRGB; case VideoTransferCharacteristics::Pq: return gfx::TransferFunction::PQ; case VideoTransferCharacteristics::Hlg: return gfx::TransferFunction::HLG; case VideoTransferCharacteristics::Linear: case VideoTransferCharacteristics::EndGuard_: break; } MOZ_ASSERT_UNREACHABLE("unsupported VideoTransferCharacteristics"); return gfx::TransferFunction::Default; } static gfx::ColorSpace2 ToPrimaries(VideoColorPrimaries aPrimaries) { switch (aPrimaries) { case VideoColorPrimaries::Bt709: return gfx::ColorSpace2::BT709; case VideoColorPrimaries::Bt470bg: return gfx::ColorSpace2::BT601_625; case VideoColorPrimaries::Smpte170m: return gfx::ColorSpace2::BT601_525; case VideoColorPrimaries::Bt2020: return gfx::ColorSpace2::BT2020; case VideoColorPrimaries::Smpte432: return gfx::ColorSpace2::DISPLAY_P3; case VideoColorPrimaries::EndGuard_: break; } MOZ_ASSERT_UNREACHABLE("unsupported VideoTransferCharacteristics"); return gfx::ColorSpace2::UNKNOWN; } static Maybe ToVideoPixelFormat(gfx::SurfaceFormat aFormat) { switch (aFormat) { case gfx::SurfaceFormat::B8G8R8A8: return Some(VideoPixelFormat::BGRA); case gfx::SurfaceFormat::B8G8R8X8: return Some(VideoPixelFormat::BGRX); case gfx::SurfaceFormat::R8G8B8A8: return Some(VideoPixelFormat::RGBA); case gfx::SurfaceFormat::R8G8B8X8: return Some(VideoPixelFormat::RGBX); case gfx::SurfaceFormat::NV12: return Some(VideoPixelFormat::NV12); default: break; } return Nothing(); } static Maybe ToVideoPixelFormat(ImageBitmapFormat aFormat) { switch (aFormat) { case ImageBitmapFormat::RGBA32: return Some(VideoPixelFormat::RGBA); case ImageBitmapFormat::BGRA32: return Some(VideoPixelFormat::BGRA); case ImageBitmapFormat::YUV444P: return Some(VideoPixelFormat::I444); case ImageBitmapFormat::YUV422P: return Some(VideoPixelFormat::I422); case ImageBitmapFormat::YUV420P: return Some(VideoPixelFormat::I420); case ImageBitmapFormat::YUV420SP_NV12: return Some(VideoPixelFormat::NV12); default: break; } return Nothing(); } /* * The following are helpers to read the image data from the given buffer and * the format. The data layout is illustrated in the comments for * `VideoFrame::Format` below. */ static int32_t CeilingOfHalf(int32_t aValue) { MOZ_ASSERT(aValue >= 0); return aValue / 2 + (aValue % 2); } class YUVBufferReaderBase { public: YUVBufferReaderBase(const Span& aBuffer, int32_t aWidth, int32_t aHeight) : mWidth(aWidth), mHeight(aHeight), mStrideY(aWidth), mBuffer(aBuffer) {} virtual ~YUVBufferReaderBase() = default; const uint8_t* DataY() const { return mBuffer.data(); } const int32_t mWidth; const int32_t mHeight; const int32_t mStrideY; protected: CheckedInt YByteSize() const { return CheckedInt(mStrideY) * mHeight; } const Span mBuffer; }; class I420ABufferReader; class I420BufferReader : public YUVBufferReaderBase { public: I420BufferReader(const Span& aBuffer, int32_t aWidth, int32_t aHeight) : YUVBufferReaderBase(aBuffer, aWidth, aHeight), mStrideU(CeilingOfHalf(aWidth)), mStrideV(CeilingOfHalf(aWidth)) {} virtual ~I420BufferReader() = default; const uint8_t* DataU() const { return &mBuffer[YByteSize().value()]; } const uint8_t* DataV() const { return &mBuffer[YByteSize().value() + UByteSize().value()]; } virtual I420ABufferReader* AsI420ABufferReader() { return nullptr; } const int32_t mStrideU; const int32_t mStrideV; protected: CheckedInt UByteSize() const { return CheckedInt(CeilingOfHalf(mHeight)) * mStrideU; } CheckedInt VSize() const { return CheckedInt(CeilingOfHalf(mHeight)) * mStrideV; } }; class I420ABufferReader final : public I420BufferReader { public: I420ABufferReader(const Span& aBuffer, int32_t aWidth, int32_t aHeight) : I420BufferReader(aBuffer, aWidth, aHeight), mStrideA(aWidth) { MOZ_ASSERT(mStrideA == mStrideY); } virtual ~I420ABufferReader() = default; const uint8_t* DataA() const { return &mBuffer[YByteSize().value() + UByteSize().value() + VSize().value()]; } virtual I420ABufferReader* AsI420ABufferReader() override { return this; } const int32_t mStrideA; }; class NV12BufferReader final : public YUVBufferReaderBase { public: NV12BufferReader(const Span& aBuffer, int32_t aWidth, int32_t aHeight) : YUVBufferReaderBase(aBuffer, aWidth, aHeight), mStrideUV(aWidth + aWidth % 2) {} virtual ~NV12BufferReader() = default; const uint8_t* DataUV() const { return &mBuffer[YByteSize().value()]; } const int32_t mStrideUV; }; /* * The followings are helpers defined in * https://w3c.github.io/webcodecs/#videoframe-algorithms */ static bool IsSameOrigin(nsIGlobalObject* aGlobal, nsIURI* aURI) { MOZ_ASSERT(aGlobal); nsIPrincipal* principal = aGlobal->PrincipalOrNull(); // If VideoFrames is created in worker, then it's from the same origin. In // this case, principal or aURI is null. Otherwise, check the origin. return !principal || !aURI || principal->IsSameOrigin(aURI); } static bool IsSameOrigin(nsIGlobalObject* aGlobal, const VideoFrame& aFrame) { MOZ_ASSERT(aGlobal); MOZ_ASSERT(aFrame.GetParentObject()); nsIPrincipal* principalX = aGlobal->PrincipalOrNull(); nsIPrincipal* principalY = aFrame.GetParentObject()->PrincipalOrNull(); // If both of VideoFrames are created in worker, they are in the same origin // domain. if (!principalX) { return !principalY; } // Otherwise, check their domains. return principalX->Equals(principalY); } static bool IsSameOrigin(nsIGlobalObject* aGlobal, HTMLVideoElement& aVideoElement) { MOZ_ASSERT(aGlobal); // If CORS is in use, consider the video source is same-origin. if (aVideoElement.GetCORSMode() != CORS_NONE) { return true; } // Otherwise, check if video source has cross-origin redirect or not. if (aVideoElement.HadCrossOriginRedirects()) { return false; } // Finally, compare the VideoFrame's domain and video's one. nsIPrincipal* principal = aGlobal->PrincipalOrNull(); nsCOMPtr elementPrincipal = aVideoElement.GetCurrentVideoPrincipal(); //