summaryrefslogtreecommitdiffstats
path: root/dom/media/utils
diff options
context:
space:
mode:
Diffstat (limited to 'dom/media/utils')
-rw-r--r--dom/media/utils/MediaElementEventRunners.cpp140
-rw-r--r--dom/media/utils/MediaElementEventRunners.h190
-rw-r--r--dom/media/utils/PerformanceRecorder.cpp308
-rw-r--r--dom/media/utils/PerformanceRecorder.h407
-rw-r--r--dom/media/utils/TelemetryProbesReporter.cpp673
-rw-r--r--dom/media/utils/TelemetryProbesReporter.h172
-rw-r--r--dom/media/utils/gtest/TestPerformanceRecorder.cpp110
-rw-r--r--dom/media/utils/gtest/moz.build15
-rw-r--r--dom/media/utils/moz.build26
9 files changed, 2041 insertions, 0 deletions
diff --git a/dom/media/utils/MediaElementEventRunners.cpp b/dom/media/utils/MediaElementEventRunners.cpp
new file mode 100644
index 0000000000..57be04528c
--- /dev/null
+++ b/dom/media/utils/MediaElementEventRunners.cpp
@@ -0,0 +1,140 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaElementEventRunners.h"
+
+#include "mozilla/dom/HTMLMediaElement.h"
+
+extern mozilla::LazyLogModule gMediaElementEventsLog;
+#define LOG_EVENT(type, msg) MOZ_LOG(gMediaElementEventsLog, type, msg)
+
+namespace mozilla::dom {
+
+nsMediaEventRunner::nsMediaEventRunner(const nsAString& aName,
+ HTMLMediaElement* aElement,
+ const nsAString& aEventName)
+ : mElement(aElement),
+ mName(aName),
+ mEventName(aEventName),
+ mLoadID(mElement->GetCurrentLoadID()) {}
+
+bool nsMediaEventRunner::IsCancelled() {
+ return !mElement || mElement->GetCurrentLoadID() != mLoadID;
+}
+
+nsresult nsMediaEventRunner::DispatchEvent(const nsAString& aName) {
+ return mElement ? mElement->DispatchEvent(aName) : NS_OK;
+}
+
+NS_IMPL_CYCLE_COLLECTION(nsMediaEventRunner, mElement)
+NS_IMPL_CYCLE_COLLECTING_ADDREF(nsMediaEventRunner)
+NS_IMPL_CYCLE_COLLECTING_RELEASE(nsMediaEventRunner)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(nsMediaEventRunner)
+ NS_INTERFACE_MAP_ENTRY(nsINamed)
+ NS_INTERFACE_MAP_ENTRY(nsIRunnable)
+ NS_INTERFACE_MAP_ENTRY_AMBIGUOUS(nsISupports, nsIRunnable)
+NS_INTERFACE_MAP_END
+
+NS_IMETHODIMP nsAsyncEventRunner::Run() {
+ // Silently cancel if our load has been cancelled or element has been CCed.
+ return IsCancelled() ? NS_OK : DispatchEvent(mEventName);
+}
+
+nsResolveOrRejectPendingPlayPromisesRunner::
+ nsResolveOrRejectPendingPlayPromisesRunner(
+ HTMLMediaElement* aElement, nsTArray<RefPtr<PlayPromise>>&& aPromises,
+ nsresult aError)
+ : nsMediaEventRunner(u"nsResolveOrRejectPendingPlayPromisesRunner"_ns,
+ aElement),
+ mPromises(std::move(aPromises)),
+ mError(aError) {
+ mElement->mPendingPlayPromisesRunners.AppendElement(this);
+}
+
+void nsResolveOrRejectPendingPlayPromisesRunner::ResolveOrReject() {
+ if (NS_SUCCEEDED(mError)) {
+ PlayPromise::ResolvePromisesWithUndefined(mPromises);
+ } else {
+ PlayPromise::RejectPromises(mPromises, mError);
+ }
+}
+
+NS_IMETHODIMP nsResolveOrRejectPendingPlayPromisesRunner::Run() {
+ if (!IsCancelled()) {
+ ResolveOrReject();
+ }
+
+ mElement->mPendingPlayPromisesRunners.RemoveElement(this);
+ return NS_OK;
+}
+
+NS_IMETHODIMP nsNotifyAboutPlayingRunner::Run() {
+ if (!IsCancelled()) {
+ DispatchEvent(u"playing"_ns);
+ }
+ return nsResolveOrRejectPendingPlayPromisesRunner::Run();
+}
+
+NS_IMPL_CYCLE_COLLECTION_INHERITED(nsResolveOrRejectPendingPlayPromisesRunner,
+ nsMediaEventRunner, mPromises)
+NS_IMPL_ADDREF_INHERITED(nsResolveOrRejectPendingPlayPromisesRunner,
+ nsMediaEventRunner)
+NS_IMPL_RELEASE_INHERITED(nsResolveOrRejectPendingPlayPromisesRunner,
+ nsMediaEventRunner)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
+ nsResolveOrRejectPendingPlayPromisesRunner)
+NS_INTERFACE_MAP_END_INHERITING(nsMediaEventRunner)
+
+NS_IMETHODIMP nsSourceErrorEventRunner::Run() {
+ // Silently cancel if our load has been cancelled.
+ if (IsCancelled()) {
+ return NS_OK;
+ }
+ LOG_EVENT(LogLevel::Debug,
+ ("%p Dispatching simple event source error", mElement.get()));
+ return nsContentUtils::DispatchTrustedEvent(mElement->OwnerDoc(), mSource,
+ u"error"_ns, CanBubble::eNo,
+ Cancelable::eNo);
+}
+
+NS_IMPL_CYCLE_COLLECTION_INHERITED(nsSourceErrorEventRunner, nsMediaEventRunner,
+ mSource)
+NS_IMPL_ADDREF_INHERITED(nsSourceErrorEventRunner, nsMediaEventRunner)
+NS_IMPL_RELEASE_INHERITED(nsSourceErrorEventRunner, nsMediaEventRunner)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(nsSourceErrorEventRunner)
+NS_INTERFACE_MAP_END_INHERITING(nsMediaEventRunner)
+
+NS_IMETHODIMP nsTimeupdateRunner::Run() {
+ if (IsCancelled() || !ShouldDispatchTimeupdate()) {
+ return NS_OK;
+ }
+ // After dispatching `timeupdate`, if the timeupdate event listener takes lots
+ // of time then we end up spending all time handling just timeupdate events.
+ // The spec is vague in this situation, so we choose to update time after we
+ // dispatch the event in order to solve that issue.
+ nsresult rv = DispatchEvent(mEventName);
+ if (NS_WARN_IF(NS_FAILED(rv))) {
+ LOG_EVENT(LogLevel::Debug,
+ ("%p Failed to dispatch 'timeupdate'", mElement.get()));
+ } else {
+ mElement->UpdateLastTimeupdateDispatchTime();
+ }
+ return rv;
+}
+
+bool nsTimeupdateRunner::ShouldDispatchTimeupdate() const {
+ if (mIsMandatory) {
+ return true;
+ }
+
+ // If the main thread is busy, tasks may be delayed and dispatched at
+ // unexpected times. Ensure we don't dispatch `timeupdate` more often
+ // than once per `TIMEUPDATE_MS`.
+ const TimeStamp& lastTime = mElement->LastTimeupdateDispatchTime();
+ return lastTime.IsNull() || TimeStamp::Now() - lastTime >
+ TimeDuration::FromMilliseconds(TIMEUPDATE_MS);
+}
+
+#undef LOG_EVENT
+} // namespace mozilla::dom
diff --git a/dom/media/utils/MediaElementEventRunners.h b/dom/media/utils/MediaElementEventRunners.h
new file mode 100644
index 0000000000..3f13494493
--- /dev/null
+++ b/dom/media/utils/MediaElementEventRunners.h
@@ -0,0 +1,190 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef mozilla_media_mediaelementeventrunners_h
+#define mozilla_media_mediaelementeventrunners_h
+
+#include "mozilla/dom/PlayPromise.h"
+#include "nsCycleCollectionParticipant.h"
+#include "nsIContent.h"
+#include "nsINamed.h"
+#include "nsIRunnable.h"
+#include "nsString.h"
+#include "nsISupportsImpl.h"
+#include "nsTString.h"
+
+namespace mozilla::dom {
+
+class HTMLMediaElement;
+
+// Under certain conditions there may be no-one holding references to
+// a media element from script, DOM parent, etc, but the element may still
+// fire meaningful events in the future so we can't destroy it yet:
+// 1) If the element is delaying the load event (or would be, if it were
+// in a document), then events up to loadeddata or error could be fired,
+// so we need to stay alive.
+// 2) If the element is not paused and playback has not ended, then
+// we will (or might) play, sending timeupdate and ended events and possibly
+// audio output, so we need to stay alive.
+// 3) if the element is seeking then we will fire seeking events and possibly
+// start playing afterward, so we need to stay alive.
+// 4) If autoplay could start playback in this element (if we got enough data),
+// then we need to stay alive.
+// 5) if the element is currently loading, not suspended, and its source is
+// not a MediaSource, then script might be waiting for progress events or a
+// 'stalled' or 'suspend' event, so we need to stay alive.
+// If we're already suspended then (all other conditions being met),
+// it's OK to just disappear without firing any more events,
+// since we have the freedom to remain suspended indefinitely. Note
+// that we could use this 'suspended' loophole to garbage-collect a suspended
+// element in case 4 even if it had 'autoplay' set, but we choose not to.
+// If someone throws away all references to a loading 'autoplay' element
+// sound should still eventually play.
+// 6) If the source is a MediaSource, most loading events will not fire unless
+// appendBuffer() is called on a SourceBuffer, in which case something is
+// already referencing the SourceBuffer, which keeps the associated media
+// element alive. Further, a MediaSource will never time out the resource
+// fetch, and so should not keep the media element alive if it is
+// unreferenced. A pending 'stalled' event keeps the media element alive.
+//
+// Media elements owned by inactive documents (i.e. documents not contained in
+// any document viewer) should never hold a self-reference because none of the
+// above conditions are allowed: the element will stop loading and playing
+// and never resume loading or playing unless its owner document changes to
+// an active document (which can only happen if there is an external reference
+// to the element).
+// Media elements with no owner doc should be able to hold a self-reference.
+// Something native must have created the element and may expect it to
+// stay alive to play.
+
+// It's very important that any change in state which could change the value of
+// needSelfReference in AddRemoveSelfReference be followed by a call to
+// AddRemoveSelfReference before this element could die!
+// It's especially important if needSelfReference would change to 'true',
+// since if we neglect to add a self-reference, this element might be
+// garbage collected while there are still event listeners that should
+// receive events. If we neglect to remove the self-reference then the element
+// just lives longer than it needs to.
+
+class nsMediaEventRunner : public nsIRunnable, public nsINamed {
+ public:
+ NS_DECL_CYCLE_COLLECTING_ISUPPORTS
+ NS_DECL_CYCLE_COLLECTION_CLASS_AMBIGUOUS(nsMediaEventRunner, nsIRunnable)
+
+ explicit nsMediaEventRunner(const nsAString& aName,
+ HTMLMediaElement* aElement,
+ const nsAString& aEventName = u"unknown"_ns);
+
+ void Cancel() { mElement = nullptr; }
+ NS_IMETHODIMP GetName(nsACString& aName) override {
+ aName = NS_ConvertUTF16toUTF8(mName).get();
+ return NS_OK;
+ }
+ nsString Name() const { return mName; }
+ nsString EventName() const { return mEventName; }
+
+ protected:
+ virtual ~nsMediaEventRunner() = default;
+ bool IsCancelled();
+ nsresult DispatchEvent(const nsAString& aName);
+
+ RefPtr<HTMLMediaElement> mElement;
+ nsString mName;
+ nsString mEventName;
+ uint32_t mLoadID;
+};
+
+/**
+ * This runner is used to dispatch async event on media element.
+ */
+class nsAsyncEventRunner : public nsMediaEventRunner {
+ public:
+ nsAsyncEventRunner(const nsAString& aEventName, HTMLMediaElement* aElement)
+ : nsMediaEventRunner(u"nsAsyncEventRunner"_ns, aElement, aEventName) {}
+ NS_IMETHOD Run() override;
+};
+
+/**
+ * These runners are used to handle `playing` event and address play promise.
+ *
+ * If no error is passed while constructing an instance, the instance will
+ * resolve the passed promises with undefined; otherwise, the instance will
+ * reject the passed promises with the passed error.
+ *
+ * The constructor appends the constructed instance into the passed media
+ * element's mPendingPlayPromisesRunners member and once the the runner is run
+ * (whether fulfilled or canceled), it removes itself from
+ * mPendingPlayPromisesRunners.
+ */
+class nsResolveOrRejectPendingPlayPromisesRunner : public nsMediaEventRunner {
+ public:
+ NS_DECL_ISUPPORTS_INHERITED
+ NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(
+ nsResolveOrRejectPendingPlayPromisesRunner, nsMediaEventRunner)
+
+ nsResolveOrRejectPendingPlayPromisesRunner(
+ HTMLMediaElement* aElement, nsTArray<RefPtr<PlayPromise>>&& aPromises,
+ nsresult aError = NS_OK);
+ void ResolveOrReject();
+ NS_IMETHOD Run() override;
+
+ protected:
+ virtual ~nsResolveOrRejectPendingPlayPromisesRunner() = default;
+
+ private:
+ nsTArray<RefPtr<PlayPromise>> mPromises;
+ nsresult mError;
+};
+
+class nsNotifyAboutPlayingRunner
+ : public nsResolveOrRejectPendingPlayPromisesRunner {
+ public:
+ nsNotifyAboutPlayingRunner(
+ HTMLMediaElement* aElement,
+ nsTArray<RefPtr<PlayPromise>>&& aPendingPlayPromises)
+ : nsResolveOrRejectPendingPlayPromisesRunner(
+ aElement, std::move(aPendingPlayPromises)) {}
+ NS_IMETHOD Run() override;
+};
+
+/**
+ * This runner is used to dispatch a source error event, which would happen when
+ * loading resource failed.
+ */
+class nsSourceErrorEventRunner : public nsMediaEventRunner {
+ public:
+ NS_DECL_ISUPPORTS_INHERITED
+ NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(nsSourceErrorEventRunner,
+ nsMediaEventRunner)
+ nsSourceErrorEventRunner(HTMLMediaElement* aElement, nsIContent* aSource)
+ : nsMediaEventRunner(u"nsSourceErrorEventRunner"_ns, aElement),
+ mSource(aSource) {}
+ NS_IMETHOD Run() override;
+
+ private:
+ virtual ~nsSourceErrorEventRunner() = default;
+ nsCOMPtr<nsIContent> mSource;
+};
+
+/**
+ * This runner is used to dispatch `timeupdate` event and ensure we don't
+ * dispatch `timeupdate` more often than once per `TIMEUPDATE_MS` if that is not
+ * a mandatory event.
+ */
+class nsTimeupdateRunner : public nsMediaEventRunner {
+ public:
+ nsTimeupdateRunner(HTMLMediaElement* aElement, bool aIsMandatory)
+ : nsMediaEventRunner(u"nsTimeupdateRunner"_ns, aElement,
+ u"timeupdate"_ns),
+ mIsMandatory(aIsMandatory) {}
+ NS_IMETHOD Run() override;
+
+ private:
+ bool ShouldDispatchTimeupdate() const;
+ bool mIsMandatory;
+};
+
+} // namespace mozilla::dom
+
+#endif // mozilla_media_mediaelementeventrunners_h
diff --git a/dom/media/utils/PerformanceRecorder.cpp b/dom/media/utils/PerformanceRecorder.cpp
new file mode 100644
index 0000000000..d6124e8cf6
--- /dev/null
+++ b/dom/media/utils/PerformanceRecorder.cpp
@@ -0,0 +1,308 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "PerformanceRecorder.h"
+
+#include "base/process_util.h"
+#include "mozilla/Logging.h"
+#include "mozilla/gfx/Types.h"
+#include "nsPrintfCString.h"
+
+namespace mozilla {
+
+static const char* SourceToStr(TrackingId::Source aSource) {
+ switch (aSource) {
+ case TrackingId::Source::Unimplemented:
+ MOZ_ASSERT_UNREACHABLE("Unimplemented TrackingId Source");
+ return "Unimplemented";
+ case TrackingId::Source::AudioDestinationNode:
+ return "AudioDestinationNode";
+ case TrackingId::Source::Camera:
+ return "CameraCapture";
+ case TrackingId::Source::Canvas:
+ return "CanvasCapture";
+ case TrackingId::Source::ChannelDecoder:
+ return "ChannelDecoder";
+ case TrackingId::Source::HLSDecoder:
+ return "HLSDecoder";
+ case TrackingId::Source::MediaCapabilities:
+ return "MediaCapabilities";
+ case TrackingId::Source::MediaElementDecoder:
+ return "MediaElementDecoderCapture";
+ case TrackingId::Source::MediaElementStream:
+ return "MediaElementStreamCapture";
+ case TrackingId::Source::MSEDecoder:
+ return "MSEDecoder";
+ case TrackingId::Source::RTCRtpReceiver:
+ return "RTCRtpReceiver";
+ case TrackingId::Source::Screen:
+ return "ScreenCapture";
+ case TrackingId::Source::Tab:
+ return "TabCapture";
+ case TrackingId::Source::Window:
+ return "WindowCapture";
+ case TrackingId::Source::LAST:
+ MOZ_ASSERT_UNREACHABLE("Invalid TrackingId Source");
+ return "Invalid";
+ }
+ MOZ_ASSERT_UNREACHABLE("Unexpected TrackingId Source");
+ return "Unexpected";
+}
+
+TrackingId::TrackingId() : mSource(Source::Unimplemented), mUniqueInProcId(0) {}
+
+TrackingId::TrackingId(
+ Source aSource, uint32_t aUniqueInProcId,
+ TrackAcrossProcesses aTrack /* = TrackAcrossProcesses::NO */)
+ : mSource(aSource),
+ mUniqueInProcId(aUniqueInProcId),
+ mProcId(aTrack == TrackAcrossProcesses::Yes
+ ? Some(base::GetCurrentProcId())
+ : Nothing()) {}
+
+nsCString TrackingId::ToString() const {
+ if (mProcId) {
+ return nsPrintfCString("%s-%u-%u", SourceToStr(mSource), *mProcId,
+ mUniqueInProcId);
+ }
+ return nsPrintfCString("%s-%u", SourceToStr(mSource), mUniqueInProcId);
+}
+
+static const char* StageToStr(MediaStage aStage) {
+ switch (aStage) {
+ case MediaStage::RequestData:
+ return "RequestData";
+ case MediaStage::RequestDemux:
+ return "RequestDemux";
+ case MediaStage::CopyDemuxedData:
+ return "CopyDemuxedData";
+ case MediaStage::RequestDecode:
+ return "RequestDecode";
+ case MediaStage::CopyDecodedVideo:
+ return "CopyDecodedVideo";
+ default:
+ return "InvalidStage";
+ }
+}
+
+static void AppendMediaInfoFlagToName(nsCString& aName, MediaInfoFlag aFlag) {
+ if (aFlag & MediaInfoFlag::KeyFrame) {
+ aName.Append("kf,");
+ }
+ // Decoding
+ if (aFlag & MediaInfoFlag::SoftwareDecoding) {
+ aName.Append("sw,");
+ } else if (aFlag & MediaInfoFlag::HardwareDecoding) {
+ aName.Append("hw,");
+ }
+ // Codec type
+ if (aFlag & MediaInfoFlag::VIDEO_AV1) {
+ aName.Append("av1,");
+ } else if (aFlag & MediaInfoFlag::VIDEO_H264) {
+ aName.Append("h264,");
+ } else if (aFlag & MediaInfoFlag::VIDEO_VP8) {
+ aName.Append("vp8,");
+ } else if (aFlag & MediaInfoFlag::VIDEO_VP9) {
+ aName.Append("vp9,");
+ } else if (aFlag & MediaInfoFlag::VIDEO_THEORA) {
+ aName.Append("theora,");
+ }
+}
+
+static void AppendImageFormatToName(nsCString& aName,
+ DecodeStage::ImageFormat aFormat) {
+ aName.Append([&] {
+ switch (aFormat) {
+ case DecodeStage::YUV420P:
+ return "yuv420p,";
+ case DecodeStage::YUV422P:
+ return "yuv422p,";
+ case DecodeStage::YUV444P:
+ return "yuv444p,";
+ case DecodeStage::NV12:
+ return "nv12,";
+ case DecodeStage::YV12:
+ return "yv12,";
+ case DecodeStage::NV21:
+ return "nv21,";
+ case DecodeStage::P010:
+ return "p010,";
+ case DecodeStage::P016:
+ return "p016,";
+ case DecodeStage::RGBA32:
+ return "rgba32,";
+ case DecodeStage::RGB24:
+ return "rgb24,";
+ case DecodeStage::GBRP:
+ return "gbrp,";
+ case DecodeStage::ANDROID_SURFACE:
+ return "android.Surface,";
+ }
+ MOZ_ASSERT_UNREACHABLE("Unhandled DecodeStage::ImageFormat");
+ return "";
+ }());
+}
+
+static void AppendYUVColorSpaceToName(nsCString& aName,
+ gfx::YUVColorSpace aSpace) {
+ aName.Append([&] {
+ switch (aSpace) {
+ case gfx::YUVColorSpace::BT601:
+ return "space=BT.601,";
+ case gfx::YUVColorSpace::BT709:
+ return "space=BT.709,";
+ case gfx::YUVColorSpace::BT2020:
+ return "space=BT.2020,";
+ case gfx::YUVColorSpace::Identity:
+ return "space=Identity,";
+ }
+ MOZ_ASSERT_UNREACHABLE("Unhandled gfx::YUVColorSpace");
+ return "";
+ }());
+}
+
+static void AppendColorRangeToName(nsCString& aName, gfx::ColorRange aRange) {
+ aName.Append([&] {
+ switch (aRange) {
+ case gfx::ColorRange::LIMITED:
+ return "range=Limited,";
+ case gfx::ColorRange::FULL:
+ return "range=Full,";
+ }
+ MOZ_ASSERT_UNREACHABLE("Unhandled gfx::ColorRange");
+ return "";
+ }());
+}
+
+static void AppendColorDepthToName(nsCString& aName, gfx::ColorDepth aDepth) {
+ aName.Append([&] {
+ switch (aDepth) {
+ case gfx::ColorDepth::COLOR_8:
+ return "depth=8,";
+ case gfx::ColorDepth::COLOR_10:
+ return "depth=10,";
+ case gfx::ColorDepth::COLOR_12:
+ return "depth=12,";
+ case gfx::ColorDepth::COLOR_16:
+ return "depth=16,";
+ }
+ MOZ_ASSERT_UNREACHABLE("Unhandled gfx::ColorDepth");
+ return "";
+ }());
+}
+
+/* static */
+const char* FindMediaResolution(int32_t aHeight) {
+ static const struct {
+ const int32_t mH;
+ const nsCString mRes;
+ } sResolutions[] = {{0, "A:0"_ns}, // other followings are for video
+ {240, "V:0<h<=240"_ns},
+ {480, "V:240<h<=480"_ns},
+ {576, "V:480<h<=576"_ns},
+ {720, "V:576<h<=720"_ns},
+ {1080, "V:720<h<=1080"_ns},
+ {1440, "V:1080<h<=1440"_ns},
+ {2160, "V:1440<h<=2160"_ns},
+ {INT_MAX, "V:h>2160"_ns}};
+ const char* resolution = sResolutions[0].mRes.get();
+ for (auto&& res : sResolutions) {
+ if (aHeight <= res.mH) {
+ resolution = res.mRes.get();
+ break;
+ }
+ }
+ return resolution;
+}
+
+/* static */
+bool PerformanceRecorderBase::IsMeasurementEnabled() {
+ return profiler_thread_is_being_profiled_for_markers() ||
+ PerformanceRecorderBase::sEnableMeasurementForTesting;
+}
+
+/* static */
+TimeStamp PerformanceRecorderBase::GetCurrentTimeForMeasurement() {
+ // The system call to get the clock is rather expensive on Windows. As we
+ // only report the measurement report via markers, if the marker isn't enabled
+ // then we won't do any measurement in order to save CPU time.
+ return IsMeasurementEnabled() ? TimeStamp::Now() : TimeStamp();
+}
+
+ProfilerString8View PlaybackStage::Name() const {
+ if (!mName) {
+ mName.emplace(StageToStr(mStage));
+ mName->Append(":");
+ mName->Append(FindMediaResolution(mHeight));
+ mName->Append(":");
+ AppendMediaInfoFlagToName(*mName, mFlag);
+ }
+ return *mName;
+}
+
+ProfilerString8View CaptureStage::Name() const {
+ if (!mName) {
+ auto imageTypeToStr = [](ImageType aType) -> const char* {
+ switch (aType) {
+ case ImageType::I420:
+ return "I420";
+ case ImageType::YUY2:
+ return "YUY2";
+ case ImageType::YV12:
+ return "YV12";
+ case ImageType::UYVY:
+ return "UYVY";
+ case ImageType::NV12:
+ return "NV12";
+ case ImageType::NV21:
+ return "NV21";
+ case ImageType::MJPEG:
+ return "MJPEG";
+ case ImageType::Unknown:
+ return "(unknown image type)";
+ default:
+ return "(unimplemented image type)";
+ };
+ };
+ mName = Some(nsPrintfCString(
+ "CaptureVideoFrame %s %dx%d %s %s", mSource.Data(), mWidth, mHeight,
+ imageTypeToStr(mImageType), mTrackingId.ToString().get()));
+ }
+ return *mName;
+}
+
+ProfilerString8View CopyVideoStage::Name() const {
+ if (!mName) {
+ mName =
+ Some(nsPrintfCString("CopyVideoFrame %s %dx%d %s", mSource.Data(),
+ mWidth, mHeight, mTrackingId.ToString().get()));
+ }
+ return *mName;
+}
+
+ProfilerString8View DecodeStage::Name() const {
+ if (!mName) {
+ nsCString extras;
+ AppendMediaInfoFlagToName(extras, mFlag);
+ mImageFormat.apply(
+ [&](ImageFormat aFormat) { AppendImageFormatToName(extras, aFormat); });
+ mColorDepth.apply([&](gfx::ColorDepth aDepth) {
+ AppendColorDepthToName(extras, aDepth);
+ });
+ mColorRange.apply([&](gfx::ColorRange aRange) {
+ AppendColorRangeToName(extras, aRange);
+ });
+ mYUVColorSpace.apply([&](gfx::YUVColorSpace aColorSpace) {
+ AppendYUVColorSpaceToName(extras, aColorSpace);
+ });
+ mName = Some(nsPrintfCString("DecodeFrame %s %dx%d %s %s", mSource.Data(),
+ mWidth.valueOr(-1), mHeight.valueOr(-1),
+ extras.get(), mTrackingId.ToString().get()));
+ }
+ return *mName;
+}
+
+} // namespace mozilla
diff --git a/dom/media/utils/PerformanceRecorder.h b/dom/media/utils/PerformanceRecorder.h
new file mode 100644
index 0000000000..582d56e5e3
--- /dev/null
+++ b/dom/media/utils/PerformanceRecorder.h
@@ -0,0 +1,407 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set sw=2 ts=8 et ft=cpp : */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef mozilla_PerformanceRecorder_h
+#define mozilla_PerformanceRecorder_h
+
+#include <type_traits>
+
+#include "mozilla/Attributes.h"
+#include "mozilla/BaseProfilerMarkersPrerequisites.h"
+#include "mozilla/Maybe.h"
+#include "mozilla/Mutex.h"
+#include "mozilla/TimeStamp.h"
+#include "mozilla/TypedEnumBits.h"
+#include "nsStringFwd.h"
+#include "nsTPriorityQueue.h"
+#include "mozilla/ProfilerMarkers.h"
+
+namespace mozilla {
+namespace gfx {
+enum class YUVColorSpace : uint8_t;
+enum class ColorDepth : uint8_t;
+enum class ColorRange : uint8_t;
+} // namespace gfx
+
+struct TrackingId {
+ enum class Source : uint8_t {
+ Unimplemented,
+ AudioDestinationNode,
+ Camera,
+ Canvas,
+ ChannelDecoder,
+ HLSDecoder,
+ MediaCapabilities,
+ MediaElementDecoder,
+ MediaElementStream,
+ MSEDecoder,
+ RTCRtpReceiver,
+ Screen,
+ Tab,
+ Window,
+ LAST
+ };
+ enum class TrackAcrossProcesses : uint8_t {
+ Yes,
+ No,
+ };
+ TrackingId();
+ TrackingId(Source aSource, uint32_t aUniqueInProcId,
+ TrackAcrossProcesses aTrack = TrackAcrossProcesses::No);
+
+ nsCString ToString() const;
+
+ Source mSource;
+ uint32_t mUniqueInProcId;
+ Maybe<uint32_t> mProcId;
+};
+
+enum class MediaInfoFlag : uint16_t {
+ None = (0 << 0),
+ NonKeyFrame = (1 << 0),
+ KeyFrame = (1 << 1),
+ SoftwareDecoding = (1 << 2),
+ HardwareDecoding = (1 << 3),
+ VIDEO_AV1 = (1 << 4),
+ VIDEO_H264 = (1 << 5),
+ VIDEO_VP8 = (1 << 6),
+ VIDEO_VP9 = (1 << 7),
+ VIDEO_THEORA = (1 << 8),
+};
+MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(MediaInfoFlag)
+
+/**
+ * This represents the different stages that a media data will go through
+ * within the playback journey.
+ *
+ * |---| |---| |------|
+ * Copy Demuxed Copy Demuxed Copy Decoded
+ * Data Data Video
+ * |------------- | |-----------------------------------|
+ * Request Demux Request Decode
+ * |-----------------------------------------------------------|
+ * Request Data
+ *
+ * RequestData : Record the time where MediaDecoderStateMachine(MDSM) starts
+ * asking for a decoded data to MDSM receives a decoded data.
+ *
+ * RequestDemux : Record the time where MediaFormatReader(MFR) starts asking
+ * a demuxed sample to MFR received a demuxed sample. This stage is a sub-
+ * stage of RequestData.
+ *
+ * CopyDemuxedData : On some situations, we will need to copy the demuxed
+ * data, which is still not decoded yet so its size is still small. This
+ * records the time which we spend on copying data. This stage could happen
+ * multiple times, either being a sub-stage of RequestDemux (in MSE case),
+ * or being a sub-stage of RequestDecode (when sending data via IPC).
+ *
+ * RequestDecode : Record the time where MFR starts asking decoder to return
+ * a decoded data to MFR receives a decoded data. As the decoder might be
+ * remote, this stage might include the time spending on IPC trips. This
+ * stage is a sub-stage of RequestData.
+ *
+ * CopyDecodedVideo : If we can't reuse same decoder texture to the
+ * compositor, then we have to copy video data to to another sharable
+ * texture. This records the time which we spend on copying data. This stage
+ * is a sub- stage of RequestDecode.
+ */
+enum class MediaStage : uint8_t {
+ Invalid,
+ RequestData,
+ RequestDemux,
+ CopyDemuxedData,
+ RequestDecode,
+ CopyDecodedVideo,
+};
+
+class PlaybackStage {
+ public:
+ explicit PlaybackStage(MediaStage aStage, int32_t aHeight = 0,
+ MediaInfoFlag aFlag = MediaInfoFlag::None)
+ : mStage(aStage), mHeight(aHeight), mFlag(aFlag) {
+ MOZ_ASSERT(aStage != MediaStage::Invalid);
+ }
+
+ ProfilerString8View Name() const;
+ const MarkerCategory& Category() const {
+ return baseprofiler::category::MEDIA_PLAYBACK;
+ }
+
+ MediaStage mStage;
+ int32_t mHeight;
+ MediaInfoFlag mFlag;
+
+ private:
+ mutable Maybe<nsCString> mName;
+};
+
+class CaptureStage {
+ public:
+ enum class ImageType : uint8_t {
+ Unknown,
+ I420,
+ YUY2,
+ YV12,
+ UYVY,
+ NV12,
+ NV21,
+ MJPEG,
+ };
+
+ CaptureStage(nsCString aSource, TrackingId aTrackingId, int32_t aWidth,
+ int32_t aHeight, ImageType aImageType)
+ : mSource(std::move(aSource)),
+ mTrackingId(std::move(aTrackingId)),
+ mWidth(aWidth),
+ mHeight(aHeight),
+ mImageType(aImageType) {}
+
+ ProfilerString8View Name() const;
+ const MarkerCategory& Category() const {
+ return baseprofiler::category::MEDIA_RT;
+ }
+
+ nsCString mSource;
+ TrackingId mTrackingId;
+ int32_t mWidth;
+ int32_t mHeight;
+ ImageType mImageType;
+
+ private:
+ mutable Maybe<nsCString> mName;
+};
+
+class CopyVideoStage {
+ public:
+ CopyVideoStage(nsCString aSource, TrackingId aTrackingId, int32_t aWidth,
+ int32_t aHeight)
+ : mSource(std::move(aSource)),
+ mTrackingId(std::move(aTrackingId)),
+ mWidth(aWidth),
+ mHeight(aHeight) {}
+
+ ProfilerString8View Name() const;
+ const MarkerCategory& Category() const {
+ return baseprofiler::category::MEDIA_RT;
+ }
+
+ // The name of the source that performs this stage.
+ nsCString mSource;
+ // A unique id identifying the source of the video frame this stage is
+ // performed for.
+ TrackingId mTrackingId;
+ int32_t mWidth;
+ int32_t mHeight;
+
+ private:
+ mutable Maybe<nsCString> mName;
+};
+
+class DecodeStage {
+ public:
+ enum ImageFormat : uint8_t {
+ YUV420P,
+ YUV422P,
+ YUV444P,
+ NV12,
+ YV12,
+ NV21,
+ P010,
+ P016,
+ RGBA32,
+ RGB24,
+ GBRP,
+ ANDROID_SURFACE,
+ };
+
+ DecodeStage(nsCString aSource, TrackingId aTrackingId, MediaInfoFlag aFlag)
+ : mSource(std::move(aSource)),
+ mTrackingId(std::move(aTrackingId)),
+ mFlag(aFlag) {}
+ ProfilerString8View Name() const;
+ const MarkerCategory& Category() const {
+ return baseprofiler::category::MEDIA_PLAYBACK;
+ }
+
+ void SetResolution(int aWidth, int aHeight) {
+ mWidth = Some(aWidth);
+ mHeight = Some(aHeight);
+ }
+ void SetImageFormat(ImageFormat aFormat) { mImageFormat = Some(aFormat); }
+ void SetYUVColorSpace(gfx::YUVColorSpace aColorSpace) {
+ mYUVColorSpace = Some(aColorSpace);
+ }
+ void SetColorRange(gfx::ColorRange aColorRange) {
+ mColorRange = Some(aColorRange);
+ }
+ void SetColorDepth(gfx::ColorDepth aColorDepth) {
+ mColorDepth = Some(aColorDepth);
+ }
+
+ // The name of the source that performs this stage.
+ nsCString mSource;
+ // A unique id identifying the source of the video frame this stage is
+ // performed for.
+ TrackingId mTrackingId;
+ MediaInfoFlag mFlag;
+ Maybe<int> mWidth;
+ Maybe<int> mHeight;
+ Maybe<ImageFormat> mImageFormat;
+ Maybe<gfx::YUVColorSpace> mYUVColorSpace;
+ Maybe<gfx::ColorRange> mColorRange;
+ Maybe<gfx::ColorDepth> mColorDepth;
+ mutable Maybe<nsCString> mName;
+};
+
+class PerformanceRecorderBase {
+ public:
+ static bool IsMeasurementEnabled();
+ static TimeStamp GetCurrentTimeForMeasurement();
+
+ // Return the resolution range for the given height. Eg. V:1080<h<=1440.
+ static const char* FindMediaResolution(int32_t aHeight);
+
+ protected:
+ // We would enable the measurement on testing.
+ static inline bool sEnableMeasurementForTesting = false;
+};
+
+template <typename StageType>
+class PerformanceRecorderImpl : public PerformanceRecorderBase {
+ public:
+ ~PerformanceRecorderImpl() = default;
+
+ PerformanceRecorderImpl(PerformanceRecorderImpl&& aRhs) noexcept
+ : mStages(std::move(aRhs.mStages)) {}
+ PerformanceRecorderImpl& operator=(PerformanceRecorderImpl&&) = delete;
+ PerformanceRecorderImpl(const PerformanceRecorderImpl&) = delete;
+ PerformanceRecorderImpl& operator=(const PerformanceRecorderImpl&) = delete;
+
+ protected:
+ PerformanceRecorderImpl() = default;
+
+ // Stores the stage with the current time as its start time, associated with
+ // aId.
+ template <typename... Args>
+ void Start(int64_t aId, Args... aArgs) {
+ if (IsMeasurementEnabled()) {
+ MutexAutoLock lock(mMutex);
+ mStages.Push(std::make_tuple(aId, GetCurrentTimeForMeasurement(),
+ StageType(std::move(aArgs)...)));
+ }
+ }
+
+ // Return the passed time since creation of the aId stage in microseconds if
+ // it has not yet been recorded. Other stages with lower ids will be
+ // discarded. Otherwise, return 0.
+ template <typename F>
+ float Record(int64_t aId, F&& aStageMutator) {
+ Maybe<Entry> entry;
+ {
+ MutexAutoLock lock(mMutex);
+ while (!mStages.IsEmpty() && std::get<0>(mStages.Top()) < aId) {
+ mStages.Pop();
+ }
+ if (mStages.IsEmpty()) {
+ return 0.0;
+ }
+ if (std::get<0>(mStages.Top()) != aId) {
+ return 0.0;
+ }
+ entry = Some(mStages.Pop());
+ }
+ const auto& startTime = std::get<1>(*entry);
+ auto& stage = std::get<2>(*entry);
+ MOZ_ASSERT(std::get<0>(*entry) == aId);
+ double elapsedTimeUs = 0.0;
+ if (!startTime.IsNull() && IsMeasurementEnabled()) {
+ const auto now = TimeStamp::Now();
+ elapsedTimeUs = (now - startTime).ToMicroseconds();
+ MOZ_ASSERT(elapsedTimeUs >= 0, "Elapsed time can't be less than 0!");
+ aStageMutator(stage);
+ AUTO_PROFILER_STATS(PROFILER_MARKER_UNTYPED);
+ ::profiler_add_marker(
+ stage.Name(), stage.Category(),
+ MarkerOptions(MarkerTiming::Interval(startTime, now)));
+ }
+ return static_cast<float>(elapsedTimeUs);
+ }
+ float Record(int64_t aId) {
+ return Record(aId, [](auto&) {});
+ }
+
+ protected:
+ using Entry = std::tuple<int64_t, TimeStamp, StageType>;
+
+ struct IdComparator {
+ bool LessThan(const Entry& aTupleA, const Entry& aTupleB) {
+ return std::get<0>(aTupleA) < std::get<0>(aTupleB);
+ }
+ };
+
+ Mutex mMutex{"PerformanceRecorder::mMutex"};
+ nsTPriorityQueue<Entry, IdComparator> mStages MOZ_GUARDED_BY(mMutex);
+};
+
+/**
+ * This class is used to record the time spent on different stages in the media
+ * pipeline. `Record()` needs to be called explicitly to record a profiler
+ * marker registering the time passed since creation. A stage may be mutated in
+ * `Record()` in case data has become available since the recorder started.
+ *
+ * This variant is intended to be created on the stack when a stage starts, then
+ * recorded with `Record()` when the stage is finished.
+ */
+template <typename StageType>
+class PerformanceRecorder : public PerformanceRecorderImpl<StageType> {
+ using Super = PerformanceRecorderImpl<StageType>;
+
+ public:
+ template <typename... Args>
+ explicit PerformanceRecorder(Args... aArgs) {
+ Start(std::move(aArgs)...);
+ };
+
+ private:
+ template <typename... Args>
+ void Start(Args... aArgs) {
+ Super::Start(0, std::move(aArgs)...);
+ }
+
+ public:
+ template <typename F>
+ float Record(F&& aStageMutator) {
+ return Super::Record(0, std::forward<F>(aStageMutator));
+ }
+ float Record() { return Super::Record(0); }
+};
+
+/**
+ * This class is used to record the time spent on different stages in the media
+ * pipeline. `Start()` and `Record()` needs to be called explicitly to record a
+ * profiler marker registering the time passed since creation. A stage may be
+ * mutated in `Record()` in case data has become available since the recorder
+ * started.
+ *
+ * This variant is intended to be kept as a member in a class and supports async
+ * stages. The async stages may overlap each other. To distinguish different
+ * stages from each other, an int64_t is used as identifier. This is often a
+ * timestamp in microseconds, see TimeUnit::ToMicroseconds.
+ */
+template <typename StageType>
+class PerformanceRecorderMulti : public PerformanceRecorderImpl<StageType> {
+ using Super = PerformanceRecorderImpl<StageType>;
+
+ public:
+ PerformanceRecorderMulti() = default;
+
+ using Super::Record;
+ using Super::Start;
+};
+
+} // namespace mozilla
+
+#endif // mozilla_PerformanceRecorder_h
diff --git a/dom/media/utils/TelemetryProbesReporter.cpp b/dom/media/utils/TelemetryProbesReporter.cpp
new file mode 100644
index 0000000000..dfc4e82241
--- /dev/null
+++ b/dom/media/utils/TelemetryProbesReporter.cpp
@@ -0,0 +1,673 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "TelemetryProbesReporter.h"
+
+#include <cmath>
+
+#include "FrameStatistics.h"
+#include "VideoUtils.h"
+#include "mozilla/EMEUtils.h"
+#include "mozilla/Logging.h"
+#include "mozilla/Telemetry.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "nsThreadUtils.h"
+
+namespace mozilla {
+
+LazyLogModule gTelemetryProbesReporterLog("TelemetryProbesReporter");
+#define LOG(msg, ...) \
+ MOZ_LOG(gTelemetryProbesReporterLog, LogLevel::Debug, \
+ ("TelemetryProbesReporter=%p, " msg, this, ##__VA_ARGS__))
+
+static const char* ToVisibilityStr(
+ TelemetryProbesReporter::Visibility aVisibility) {
+ switch (aVisibility) {
+ case TelemetryProbesReporter::Visibility::eVisible:
+ return "visible";
+ case TelemetryProbesReporter::Visibility::eInvisible:
+ return "invisible";
+ case TelemetryProbesReporter::Visibility::eInitial:
+ return "initial";
+ default:
+ MOZ_ASSERT_UNREACHABLE("invalid visibility");
+ return "unknown";
+ }
+}
+static const char* ToAudibilityStr(
+ TelemetryProbesReporter::AudibleState aAudibleState) {
+ switch (aAudibleState) {
+ case TelemetryProbesReporter::AudibleState::eAudible:
+ return "audible";
+ case TelemetryProbesReporter::AudibleState::eNotAudible:
+ return "inaudible";
+ default:
+ MOZ_ASSERT_UNREACHABLE("invalid audibility");
+ return "unknown";
+ }
+}
+
+static const char* ToMutedStr(bool aMuted) {
+ return aMuted ? "muted" : "unmuted";
+}
+
+MediaContent TelemetryProbesReporter::MediaInfoToMediaContent(
+ const MediaInfo& aInfo) {
+ MediaContent content = MediaContent::MEDIA_HAS_NOTHING;
+ if (aInfo.HasAudio()) {
+ content |= MediaContent::MEDIA_HAS_AUDIO;
+ }
+ if (aInfo.HasVideo()) {
+ content |= MediaContent::MEDIA_HAS_VIDEO;
+ if (aInfo.mVideo.GetAsVideoInfo()->mColorDepth > gfx::ColorDepth::COLOR_8) {
+ content |= MediaContent::MEDIA_HAS_COLOR_DEPTH_ABOVE_8;
+ }
+ }
+ return content;
+}
+
+TelemetryProbesReporter::TelemetryProbesReporter(
+ TelemetryProbesReporterOwner* aOwner)
+ : mOwner(aOwner) {
+ MOZ_ASSERT(mOwner);
+}
+
+void TelemetryProbesReporter::OnPlay(Visibility aVisibility,
+ MediaContent aMediaContent,
+ bool aIsMuted) {
+ LOG("Start time accumulation for total play time");
+
+ AssertOnMainThreadAndNotShutdown();
+ MOZ_ASSERT_IF(mMediaContent & MediaContent::MEDIA_HAS_VIDEO,
+ !mTotalVideoPlayTime.IsStarted());
+ MOZ_ASSERT_IF(mMediaContent & MediaContent::MEDIA_HAS_AUDIO,
+ !mTotalAudioPlayTime.IsStarted());
+
+ if (aMediaContent & MediaContent::MEDIA_HAS_VIDEO) {
+ mTotalVideoPlayTime.Start();
+
+ MOZ_ASSERT_IF(mMediaContent & MediaContent::MEDIA_HAS_COLOR_DEPTH_ABOVE_8,
+ !mTotalVideoHDRPlayTime.IsStarted());
+ if (aMediaContent & MediaContent::MEDIA_HAS_COLOR_DEPTH_ABOVE_8) {
+ mTotalVideoHDRPlayTime.Start();
+ }
+ }
+ if (aMediaContent & MediaContent::MEDIA_HAS_AUDIO) {
+ mTotalAudioPlayTime.Start();
+ }
+
+ OnMediaContentChanged(aMediaContent);
+ OnVisibilityChanged(aVisibility);
+ OnMutedChanged(aIsMuted);
+
+ mOwner->DispatchAsyncTestingEvent(u"moztotalplaytimestarted"_ns);
+
+ mIsPlaying = true;
+}
+
+void TelemetryProbesReporter::OnPause(Visibility aVisibility) {
+ if (!mIsPlaying) {
+ // Not started
+ LOG("TelemetryProbesReporter::OnPause: not started, early return");
+ return;
+ }
+
+ LOG("Pause time accumulation for total play time");
+
+ AssertOnMainThreadAndNotShutdown();
+ MOZ_ASSERT_IF(mMediaContent & MediaContent::MEDIA_HAS_VIDEO,
+ mTotalVideoPlayTime.IsStarted());
+ MOZ_ASSERT_IF(mMediaContent & MediaContent::MEDIA_HAS_AUDIO,
+ mTotalAudioPlayTime.IsStarted());
+
+ if (mMediaContent & MediaContent::MEDIA_HAS_VIDEO) {
+ MOZ_ASSERT_IF(mMediaContent & MediaContent::MEDIA_HAS_COLOR_DEPTH_ABOVE_8,
+ mTotalVideoHDRPlayTime.IsStarted());
+
+ LOG("Pause video time accumulation for total play time");
+ if (mInvisibleVideoPlayTime.IsStarted()) {
+ LOG("Pause invisible video time accumulation for total play time");
+ PauseInvisibleVideoTimeAccumulator();
+ }
+ mTotalVideoPlayTime.Pause();
+ mTotalVideoHDRPlayTime.Pause();
+ }
+ if (mMediaContent & MediaContent::MEDIA_HAS_AUDIO) {
+ LOG("Pause audio time accumulation for total play time");
+ if (mInaudibleAudioPlayTime.IsStarted()) {
+ LOG("Pause audible audio time accumulation for total play time");
+ PauseInaudibleAudioTimeAccumulator();
+ }
+ if (mMutedAudioPlayTime.IsStarted()) {
+ LOG("Pause muted audio time accumulation for total play time");
+ PauseMutedAudioTimeAccumulator();
+ }
+ mTotalAudioPlayTime.Pause();
+ }
+
+ mOwner->DispatchAsyncTestingEvent(u"moztotalplaytimepaused"_ns);
+ ReportTelemetry();
+
+ mIsPlaying = false;
+}
+
+void TelemetryProbesReporter::OnVisibilityChanged(Visibility aVisibility) {
+ AssertOnMainThreadAndNotShutdown();
+ LOG("Corresponding media element visibility change=%s -> %s",
+ ToVisibilityStr(mMediaElementVisibility), ToVisibilityStr(aVisibility));
+ if (aVisibility == Visibility::eInvisible) {
+ StartInvisibleVideoTimeAccumulator();
+ } else {
+ if (aVisibility != Visibility::eInitial) {
+ PauseInvisibleVideoTimeAccumulator();
+ } else {
+ LOG("Visibility was initial, not pausing.");
+ }
+ }
+ mMediaElementVisibility = aVisibility;
+}
+
+void TelemetryProbesReporter::OnAudibleChanged(AudibleState aAudibleState) {
+ AssertOnMainThreadAndNotShutdown();
+ LOG("Audibility changed, now %s", ToAudibilityStr(aAudibleState));
+ if (aAudibleState == AudibleState::eNotAudible) {
+ if (!mInaudibleAudioPlayTime.IsStarted()) {
+ StartInaudibleAudioTimeAccumulator();
+ }
+ } else {
+ // This happens when starting playback, no need to pause, because it hasn't
+ // been started yet.
+ if (mInaudibleAudioPlayTime.IsStarted()) {
+ PauseInaudibleAudioTimeAccumulator();
+ }
+ }
+}
+
+void TelemetryProbesReporter::OnMutedChanged(bool aMuted) {
+ // There are multiple ways to mute an element:
+ // - volume = 0
+ // - muted = true
+ // - set the enabled property of the playing AudioTrack to false
+ // Muted -> Muted "transisition" can therefore happen, and we can't add
+ // asserts here.
+ AssertOnMainThreadAndNotShutdown();
+ if (!(mMediaContent & MediaContent::MEDIA_HAS_AUDIO)) {
+ return;
+ }
+ LOG("Muted changed, was %s now %s", ToMutedStr(mIsMuted), ToMutedStr(aMuted));
+ if (aMuted) {
+ if (!mMutedAudioPlayTime.IsStarted()) {
+ StartMutedAudioTimeAccumulator();
+ }
+ } else {
+ // This happens when starting playback, no need to pause, because it hasn't
+ // been started yet.
+ if (mMutedAudioPlayTime.IsStarted()) {
+ PauseMutedAudioTimeAccumulator();
+ }
+ }
+ mIsMuted = aMuted;
+}
+
+void TelemetryProbesReporter::OnMediaContentChanged(MediaContent aContent) {
+ AssertOnMainThreadAndNotShutdown();
+ if (aContent == mMediaContent) {
+ return;
+ }
+ if (mMediaContent & MediaContent::MEDIA_HAS_VIDEO &&
+ !(aContent & MediaContent::MEDIA_HAS_VIDEO)) {
+ LOG("Video track removed from media.");
+ if (mInvisibleVideoPlayTime.IsStarted()) {
+ PauseInvisibleVideoTimeAccumulator();
+ }
+ if (mTotalVideoPlayTime.IsStarted()) {
+ mTotalVideoPlayTime.Pause();
+ mTotalVideoHDRPlayTime.Pause();
+ }
+ }
+ if (mMediaContent & MediaContent::MEDIA_HAS_AUDIO &&
+ !(aContent & MediaContent::MEDIA_HAS_AUDIO)) {
+ LOG("Audio track removed from media.");
+ if (mTotalAudioPlayTime.IsStarted()) {
+ mTotalAudioPlayTime.Pause();
+ }
+ if (mInaudibleAudioPlayTime.IsStarted()) {
+ mInaudibleAudioPlayTime.Pause();
+ }
+ if (mMutedAudioPlayTime.IsStarted()) {
+ mMutedAudioPlayTime.Pause();
+ }
+ }
+ if (!(mMediaContent & MediaContent::MEDIA_HAS_VIDEO) &&
+ aContent & MediaContent::MEDIA_HAS_VIDEO) {
+ LOG("Video track added to media.");
+ if (mIsPlaying) {
+ mTotalVideoPlayTime.Start();
+ if (mMediaElementVisibility == Visibility::eInvisible) {
+ StartInvisibleVideoTimeAccumulator();
+ }
+ }
+ }
+ if (!(mMediaContent & MediaContent::MEDIA_HAS_COLOR_DEPTH_ABOVE_8) &&
+ aContent & MediaContent::MEDIA_HAS_COLOR_DEPTH_ABOVE_8) {
+ if (mIsPlaying) {
+ mTotalVideoHDRPlayTime.Start();
+ }
+ }
+ if (!(mMediaContent & MediaContent::MEDIA_HAS_AUDIO) &&
+ aContent & MediaContent::MEDIA_HAS_AUDIO) {
+ LOG("Audio track added to media.");
+ if (mIsPlaying) {
+ mTotalAudioPlayTime.Start();
+ if (mIsMuted) {
+ StartMutedAudioTimeAccumulator();
+ }
+ }
+ }
+
+ mMediaContent = aContent;
+}
+
+void TelemetryProbesReporter::OnDecodeSuspended() {
+ AssertOnMainThreadAndNotShutdown();
+ // Suspended time should only be counted after starting accumulating invisible
+ // time.
+ if (!mInvisibleVideoPlayTime.IsStarted()) {
+ return;
+ }
+ LOG("Start time accumulation for video decoding suspension");
+ mVideoDecodeSuspendedTime.Start();
+ mOwner->DispatchAsyncTestingEvent(u"mozvideodecodesuspendedstarted"_ns);
+}
+
+void TelemetryProbesReporter::OnDecodeResumed() {
+ AssertOnMainThreadAndNotShutdown();
+ if (!mVideoDecodeSuspendedTime.IsStarted()) {
+ return;
+ }
+ LOG("Pause time accumulation for video decoding suspension");
+ mVideoDecodeSuspendedTime.Pause();
+ mOwner->DispatchAsyncTestingEvent(u"mozvideodecodesuspendedpaused"_ns);
+}
+
+void TelemetryProbesReporter::OnShutdown() {
+ AssertOnMainThreadAndNotShutdown();
+ LOG("Shutdown");
+ OnPause(Visibility::eInvisible);
+ mOwner = nullptr;
+}
+
+void TelemetryProbesReporter::StartInvisibleVideoTimeAccumulator() {
+ AssertOnMainThreadAndNotShutdown();
+ if (!mTotalVideoPlayTime.IsStarted() || mInvisibleVideoPlayTime.IsStarted() ||
+ !HasOwnerHadValidVideo()) {
+ return;
+ }
+ LOG("Start time accumulation for invisible video");
+ mInvisibleVideoPlayTime.Start();
+ mOwner->DispatchAsyncTestingEvent(u"mozinvisibleplaytimestarted"_ns);
+}
+
+void TelemetryProbesReporter::PauseInvisibleVideoTimeAccumulator() {
+ AssertOnMainThreadAndNotShutdown();
+ if (!mInvisibleVideoPlayTime.IsStarted()) {
+ return;
+ }
+ OnDecodeResumed();
+ LOG("Pause time accumulation for invisible video");
+ mInvisibleVideoPlayTime.Pause();
+ mOwner->DispatchAsyncTestingEvent(u"mozinvisibleplaytimepaused"_ns);
+}
+
+void TelemetryProbesReporter::StartInaudibleAudioTimeAccumulator() {
+ AssertOnMainThreadAndNotShutdown();
+ MOZ_ASSERT(!mInaudibleAudioPlayTime.IsStarted());
+ mInaudibleAudioPlayTime.Start();
+ mOwner->DispatchAsyncTestingEvent(u"mozinaudibleaudioplaytimestarted"_ns);
+}
+
+void TelemetryProbesReporter::PauseInaudibleAudioTimeAccumulator() {
+ AssertOnMainThreadAndNotShutdown();
+ MOZ_ASSERT(mInaudibleAudioPlayTime.IsStarted());
+ mInaudibleAudioPlayTime.Pause();
+ mOwner->DispatchAsyncTestingEvent(u"mozinaudibleaudioplaytimepaused"_ns);
+}
+
+void TelemetryProbesReporter::StartMutedAudioTimeAccumulator() {
+ AssertOnMainThreadAndNotShutdown();
+ MOZ_ASSERT(!mMutedAudioPlayTime.IsStarted());
+ mMutedAudioPlayTime.Start();
+ mOwner->DispatchAsyncTestingEvent(u"mozmutedaudioplaytimestarted"_ns);
+}
+
+void TelemetryProbesReporter::PauseMutedAudioTimeAccumulator() {
+ AssertOnMainThreadAndNotShutdown();
+ MOZ_ASSERT(mMutedAudioPlayTime.IsStarted());
+ mMutedAudioPlayTime.Pause();
+ mOwner->DispatchAsyncTestingEvent(u"mozmutedeaudioplaytimepaused"_ns);
+}
+
+bool TelemetryProbesReporter::HasOwnerHadValidVideo() const {
+ // Checking both image and display dimensions helps address cases such as
+ // suspending, where we use a null decoder. In that case a null decoder
+ // produces 0x0 video frames, which might cause layout to resize the display
+ // size, but the image dimensions would be still non-null.
+ const VideoInfo info = mOwner->GetMediaInfo().mVideo;
+ return (info.mDisplay.height > 0 && info.mDisplay.width > 0) ||
+ (info.mImage.height > 0 && info.mImage.width > 0);
+}
+
+bool TelemetryProbesReporter::HasOwnerHadValidMedia() const {
+ return mMediaContent != MediaContent::MEDIA_HAS_NOTHING;
+}
+
+void TelemetryProbesReporter::AssertOnMainThreadAndNotShutdown() const {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(mOwner, "Already shutdown?");
+}
+
+void TelemetryProbesReporter::ReportTelemetry() {
+ AssertOnMainThreadAndNotShutdown();
+ // ReportResultForAudio needs to be called first, because it can use the video
+ // play time, that is reset in ReportResultForVideo.
+ ReportResultForAudio();
+ ReportResultForVideo();
+ mOwner->DispatchAsyncTestingEvent(u"mozreportedtelemetry"_ns);
+}
+
+void TelemetryProbesReporter::ReportResultForVideo() {
+ // We don't want to know the result for video without valid video frames.
+ if (!HasOwnerHadValidVideo()) {
+ return;
+ }
+
+ const double totalVideoPlayTimeS = mTotalVideoPlayTime.GetAndClearTotal();
+ const double invisiblePlayTimeS = mInvisibleVideoPlayTime.GetAndClearTotal();
+ const double videoDecodeSuspendTimeS =
+ mVideoDecodeSuspendedTime.GetAndClearTotal();
+ const double totalVideoHDRPlayTimeS =
+ mTotalVideoHDRPlayTime.GetAndClearTotal();
+
+ // No need to report result for video that didn't start playing.
+ if (totalVideoPlayTimeS == 0.0) {
+ return;
+ }
+ MOZ_ASSERT(totalVideoPlayTimeS >= invisiblePlayTimeS);
+
+ LOG("VIDEO_PLAY_TIME_S = %f", totalVideoPlayTimeS);
+ Telemetry::Accumulate(Telemetry::VIDEO_PLAY_TIME_MS,
+ SECONDS_TO_MS(totalVideoPlayTimeS));
+
+ LOG("VIDEO_HIDDEN_PLAY_TIME_S = %f", invisiblePlayTimeS);
+ Telemetry::Accumulate(Telemetry::VIDEO_HIDDEN_PLAY_TIME_MS,
+ SECONDS_TO_MS(invisiblePlayTimeS));
+
+ // We only want to accumulate non-zero samples for HDR playback.
+ // This is different from the other timings tracked here, but
+ // we don't need 0-length play times to do our calculations.
+ if (totalVideoHDRPlayTimeS > 0.0) {
+ LOG("VIDEO_HDR_PLAY_TIME_S = %f", totalVideoHDRPlayTimeS);
+ Telemetry::Accumulate(Telemetry::VIDEO_HDR_PLAY_TIME_MS,
+ SECONDS_TO_MS(totalVideoHDRPlayTimeS));
+ }
+
+ if (mOwner->IsEncrypted()) {
+ LOG("VIDEO_ENCRYPTED_PLAY_TIME_S = %f", totalVideoPlayTimeS);
+ Telemetry::Accumulate(Telemetry::VIDEO_ENCRYPTED_PLAY_TIME_MS,
+ SECONDS_TO_MS(totalVideoPlayTimeS));
+ }
+
+ // Report result for video using CDM
+ auto keySystem = mOwner->GetKeySystem();
+ if (keySystem) {
+ if (IsClearkeyKeySystem(*keySystem)) {
+ LOG("VIDEO_CLEARKEY_PLAY_TIME_S = %f", totalVideoPlayTimeS);
+ Telemetry::Accumulate(Telemetry::VIDEO_CLEARKEY_PLAY_TIME_MS,
+ SECONDS_TO_MS(totalVideoPlayTimeS));
+
+ } else if (IsWidevineKeySystem(*keySystem)) {
+ LOG("VIDEO_WIDEVINE_PLAY_TIME_S = %f", totalVideoPlayTimeS);
+ Telemetry::Accumulate(Telemetry::VIDEO_WIDEVINE_PLAY_TIME_MS,
+ SECONDS_TO_MS(totalVideoPlayTimeS));
+ }
+ }
+
+ // Keyed by audio+video or video alone, and by a resolution range.
+ const MediaInfo& info = mOwner->GetMediaInfo();
+ nsCString key(info.HasAudio() ? "AV," : "V,");
+ static const struct {
+ int32_t mH;
+ const char* mRes;
+ } sResolutions[] = {{240, "0<h<=240"}, {480, "240<h<=480"},
+ {576, "480<h<=576"}, {720, "576<h<=720"},
+ {1080, "720<h<=1080"}, {2160, "1080<h<=2160"}};
+ const char* resolution = "h>2160";
+ int32_t height = info.mVideo.mImage.height;
+ for (const auto& res : sResolutions) {
+ if (height <= res.mH) {
+ resolution = res.mRes;
+ break;
+ }
+ }
+ key.AppendASCII(resolution);
+
+ auto visiblePlayTimeS = totalVideoPlayTimeS - invisiblePlayTimeS;
+ LOG("VIDEO_VISIBLE_PLAY_TIME = %f, keys: '%s' and 'All'", visiblePlayTimeS,
+ key.get());
+ Telemetry::Accumulate(Telemetry::VIDEO_VISIBLE_PLAY_TIME_MS, key,
+ SECONDS_TO_MS(visiblePlayTimeS));
+ // Also accumulate result in an "All" key.
+ Telemetry::Accumulate(Telemetry::VIDEO_VISIBLE_PLAY_TIME_MS, "All"_ns,
+ SECONDS_TO_MS(visiblePlayTimeS));
+
+ const uint32_t hiddenPercentage =
+ lround(invisiblePlayTimeS / totalVideoPlayTimeS * 100.0);
+ Telemetry::Accumulate(Telemetry::VIDEO_HIDDEN_PLAY_TIME_PERCENTAGE, key,
+ hiddenPercentage);
+ // Also accumulate all percentages in an "All" key.
+ Telemetry::Accumulate(Telemetry::VIDEO_HIDDEN_PLAY_TIME_PERCENTAGE, "All"_ns,
+ hiddenPercentage);
+ LOG("VIDEO_HIDDEN_PLAY_TIME_PERCENTAGE = %u, keys: '%s' and 'All'",
+ hiddenPercentage, key.get());
+
+ const uint32_t videoDecodeSuspendPercentage =
+ lround(videoDecodeSuspendTimeS / totalVideoPlayTimeS * 100.0);
+ Telemetry::Accumulate(Telemetry::VIDEO_INFERRED_DECODE_SUSPEND_PERCENTAGE,
+ key, videoDecodeSuspendPercentage);
+ Telemetry::Accumulate(Telemetry::VIDEO_INFERRED_DECODE_SUSPEND_PERCENTAGE,
+ "All"_ns, videoDecodeSuspendPercentage);
+ LOG("VIDEO_INFERRED_DECODE_SUSPEND_PERCENTAGE = %u, keys: '%s' and 'All'",
+ videoDecodeSuspendPercentage, key.get());
+
+ ReportResultForVideoFrameStatistics(totalVideoPlayTimeS, key);
+}
+
+void TelemetryProbesReporter::ReportResultForAudio() {
+ // Don't record telemetry for a media that didn't have a valid audio or video
+ // to play, or hasn't played.
+ if (!HasOwnerHadValidMedia() || (mTotalAudioPlayTime.PeekTotal() == 0.0 &&
+ mTotalVideoPlayTime.PeekTotal() == 0.0)) {
+ return;
+ }
+
+ nsCString key;
+ nsCString avKey;
+ const double totalAudioPlayTimeS = mTotalAudioPlayTime.GetAndClearTotal();
+ const double inaudiblePlayTimeS = mInaudibleAudioPlayTime.GetAndClearTotal();
+ const double mutedPlayTimeS = mMutedAudioPlayTime.GetAndClearTotal();
+ const double audiblePlayTimeS = totalAudioPlayTimeS - inaudiblePlayTimeS;
+ const double unmutedPlayTimeS = totalAudioPlayTimeS - mutedPlayTimeS;
+ const uint32_t audiblePercentage =
+ lround(audiblePlayTimeS / totalAudioPlayTimeS * 100.0);
+ const uint32_t unmutedPercentage =
+ lround(unmutedPlayTimeS / totalAudioPlayTimeS * 100.0);
+ const double totalVideoPlayTimeS = mTotalVideoPlayTime.PeekTotal();
+
+ // Key semantics:
+ // - AV: Audible audio + video
+ // - IV: Inaudible audio + video
+ // - MV: Muted audio + video
+ // - A: Audible audio-only
+ // - I: Inaudible audio-only
+ // - M: Muted audio-only
+ // - V: Video-only
+ if (mMediaContent & MediaContent::MEDIA_HAS_AUDIO) {
+ if (audiblePercentage == 0) {
+ // Media element had an audio track, but it was inaudible throughout
+ key.AppendASCII("I");
+ } else if (unmutedPercentage == 0) {
+ // Media element had an audio track, but it was muted throughout
+ key.AppendASCII("M");
+ } else {
+ // Media element had an audible audio track
+ key.AppendASCII("A");
+ }
+ avKey.AppendASCII("A");
+ }
+ if (mMediaContent & MediaContent::MEDIA_HAS_VIDEO) {
+ key.AppendASCII("V");
+ avKey.AppendASCII("V");
+ }
+
+ LOG("Key: %s", key.get());
+
+ if (mMediaContent & MediaContent::MEDIA_HAS_AUDIO) {
+ LOG("Audio:\ntotal: %lf\naudible: %lf\ninaudible: %lf\nmuted: "
+ "%lf\npercentage audible: "
+ "%u\npercentage unmuted: %u\n",
+ totalAudioPlayTimeS, audiblePlayTimeS, inaudiblePlayTimeS,
+ mutedPlayTimeS, audiblePercentage, unmutedPercentage);
+ Telemetry::Accumulate(Telemetry::MEDIA_PLAY_TIME_MS, key,
+ SECONDS_TO_MS(totalAudioPlayTimeS));
+ Telemetry::Accumulate(Telemetry::MUTED_PLAY_TIME_PERCENT, avKey,
+ 100 - unmutedPercentage);
+ Telemetry::Accumulate(Telemetry::AUDIBLE_PLAY_TIME_PERCENT, avKey,
+ audiblePercentage);
+ } else {
+ MOZ_ASSERT(mMediaContent & MediaContent::MEDIA_HAS_VIDEO);
+ Telemetry::Accumulate(Telemetry::MEDIA_PLAY_TIME_MS, key,
+ SECONDS_TO_MS(totalVideoPlayTimeS));
+ }
+}
+
+void TelemetryProbesReporter::ReportResultForVideoFrameStatistics(
+ double aTotalPlayTimeS, const nsCString& key) {
+ FrameStatistics* stats = mOwner->GetFrameStatistics();
+ if (!stats) {
+ return;
+ }
+
+ FrameStatisticsData data = stats->GetFrameStatisticsData();
+ if (data.mInterKeyframeCount != 0) {
+ const uint32_t average_ms = uint32_t(
+ std::min<uint64_t>(lround(double(data.mInterKeyframeSum_us) /
+ double(data.mInterKeyframeCount) / 1000.0),
+ UINT32_MAX));
+ Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_AVERAGE_MS, key,
+ average_ms);
+ Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_AVERAGE_MS, "All"_ns,
+ average_ms);
+ LOG("VIDEO_INTER_KEYFRAME_AVERAGE_MS = %u, keys: '%s' and 'All'",
+ average_ms, key.get());
+
+ const uint32_t max_ms = uint32_t(std::min<uint64_t>(
+ (data.mInterKeyFrameMax_us + 500) / 1000, UINT32_MAX));
+ Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, key, max_ms);
+ Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, "All"_ns,
+ max_ms);
+ LOG("VIDEO_INTER_KEYFRAME_MAX_MS = %u, keys: '%s' and 'All'", max_ms,
+ key.get());
+ } else {
+ // Here, we have played *some* of the video, but didn't get more than 1
+ // keyframe. Report '0' if we have played for longer than the video-
+ // decode-suspend delay (showing recovery would be difficult).
+ const uint32_t suspendDelay_ms =
+ StaticPrefs::media_suspend_bkgnd_video_delay_ms();
+ if (uint32_t(aTotalPlayTimeS * 1000.0) > suspendDelay_ms) {
+ Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, key, 0);
+ Telemetry::Accumulate(Telemetry::VIDEO_INTER_KEYFRAME_MAX_MS, "All"_ns,
+ 0);
+ LOG("VIDEO_INTER_KEYFRAME_MAX_MS = 0 (only 1 keyframe), keys: '%s' and "
+ "'All'",
+ key.get());
+ }
+ }
+
+ const uint64_t parsedFrames = stats->GetParsedFrames();
+ if (parsedFrames) {
+ const uint64_t droppedFrames = stats->GetDroppedFrames();
+ MOZ_ASSERT(droppedFrames <= parsedFrames);
+ // Dropped frames <= total frames, so 'percentage' cannot be higher than
+ // 100 and therefore can fit in a uint32_t (that Telemetry takes).
+ const uint32_t percentage = 100 * droppedFrames / parsedFrames;
+ LOG("DROPPED_FRAMES_IN_VIDEO_PLAYBACK = %u", percentage);
+ Telemetry::Accumulate(Telemetry::VIDEO_DROPPED_FRAMES_PROPORTION,
+ percentage);
+ const uint32_t proportion = 10000 * droppedFrames / parsedFrames;
+ Telemetry::Accumulate(
+ Telemetry::VIDEO_DROPPED_FRAMES_PROPORTION_EXPONENTIAL, proportion);
+
+ {
+ const uint64_t droppedFrames = stats->GetDroppedDecodedFrames();
+ const uint32_t proportion = 10000 * droppedFrames / parsedFrames;
+ Telemetry::Accumulate(
+ Telemetry::VIDEO_DROPPED_DECODED_FRAMES_PROPORTION_EXPONENTIAL,
+ proportion);
+ }
+ {
+ const uint64_t droppedFrames = stats->GetDroppedSinkFrames();
+ const uint32_t proportion = 10000 * droppedFrames / parsedFrames;
+ Telemetry::Accumulate(
+ Telemetry::VIDEO_DROPPED_SINK_FRAMES_PROPORTION_EXPONENTIAL,
+ proportion);
+ }
+ {
+ const uint64_t droppedFrames = stats->GetDroppedCompositorFrames();
+ const uint32_t proportion = 10000 * droppedFrames / parsedFrames;
+ Telemetry::Accumulate(
+ Telemetry::VIDEO_DROPPED_COMPOSITOR_FRAMES_PROPORTION_EXPONENTIAL,
+ proportion);
+ }
+ }
+}
+
+double TelemetryProbesReporter::GetTotalVideoPlayTimeInSeconds() const {
+ return mTotalVideoPlayTime.PeekTotal();
+}
+
+double TelemetryProbesReporter::GetTotalVideoHDRPlayTimeInSeconds() const {
+ return mTotalVideoHDRPlayTime.PeekTotal();
+}
+
+double TelemetryProbesReporter::GetVisibleVideoPlayTimeInSeconds() const {
+ return GetTotalVideoPlayTimeInSeconds() -
+ GetInvisibleVideoPlayTimeInSeconds();
+}
+
+double TelemetryProbesReporter::GetInvisibleVideoPlayTimeInSeconds() const {
+ return mInvisibleVideoPlayTime.PeekTotal();
+}
+
+double TelemetryProbesReporter::GetVideoDecodeSuspendedTimeInSeconds() const {
+ return mVideoDecodeSuspendedTime.PeekTotal();
+}
+
+double TelemetryProbesReporter::GetTotalAudioPlayTimeInSeconds() const {
+ return mTotalAudioPlayTime.PeekTotal();
+}
+
+double TelemetryProbesReporter::GetInaudiblePlayTimeInSeconds() const {
+ return mInaudibleAudioPlayTime.PeekTotal();
+}
+
+double TelemetryProbesReporter::GetMutedPlayTimeInSeconds() const {
+ return mMutedAudioPlayTime.PeekTotal();
+}
+
+double TelemetryProbesReporter::GetAudiblePlayTimeInSeconds() const {
+ return GetTotalAudioPlayTimeInSeconds() - GetInaudiblePlayTimeInSeconds();
+}
+
+#undef LOG
+} // namespace mozilla
diff --git a/dom/media/utils/TelemetryProbesReporter.h b/dom/media/utils/TelemetryProbesReporter.h
new file mode 100644
index 0000000000..73a73f0403
--- /dev/null
+++ b/dom/media/utils/TelemetryProbesReporter.h
@@ -0,0 +1,172 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_TelemetryProbesReporter_H_
+#define DOM_TelemetryProbesReporter_H_
+
+#include "MediaInfo.h"
+#include "mozilla/Maybe.h"
+#include "mozilla/AwakeTimeStamp.h"
+#include "AudioChannelService.h"
+#include "nsISupportsImpl.h"
+
+namespace mozilla {
+class FrameStatistics;
+
+class TelemetryProbesReporterOwner {
+ public:
+ virtual Maybe<nsAutoString> GetKeySystem() const = 0;
+ virtual MediaInfo GetMediaInfo() const = 0;
+ virtual FrameStatistics* GetFrameStatistics() const = 0;
+ virtual bool IsEncrypted() const = 0;
+ virtual void DispatchAsyncTestingEvent(const nsAString& aName) = 0;
+};
+
+enum class MediaContent : uint8_t {
+ MEDIA_HAS_NOTHING = (0 << 0),
+ MEDIA_HAS_VIDEO = (1 << 0),
+ MEDIA_HAS_AUDIO = (1 << 1),
+ MEDIA_HAS_COLOR_DEPTH_ABOVE_8 = (1 << 2),
+};
+
+MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(MediaContent)
+
+/**
+ * This class is used for collecting and reporting telemetry probes for
+ * its owner which should inherit from TelemetryProbesReporterOwner. We use it
+ * for HTMLMediaElement, and each element has one corresponding reporter.
+ */
+class TelemetryProbesReporter final {
+ public:
+ explicit TelemetryProbesReporter(TelemetryProbesReporterOwner* aOwner);
+ ~TelemetryProbesReporter() = default;
+
+ enum class Visibility {
+ eInitial,
+ eVisible,
+ eInvisible,
+ };
+
+ static MediaContent MediaInfoToMediaContent(const MediaInfo& aInfo);
+
+ using AudibleState = dom::AudioChannelService::AudibleState;
+
+ // State transitions
+ void OnPlay(Visibility aVisibility, MediaContent aContent, bool aIsMuted);
+ void OnPause(Visibility aVisibility);
+ void OnShutdown();
+
+ void OnVisibilityChanged(Visibility aVisibility);
+ void OnAudibleChanged(AudibleState aAudible);
+ void OnMediaContentChanged(MediaContent aContent);
+ void OnMutedChanged(bool aMuted);
+ void OnDecodeSuspended();
+ void OnDecodeResumed();
+
+ double GetTotalVideoPlayTimeInSeconds() const;
+ double GetTotalVideoHDRPlayTimeInSeconds() const;
+ double GetVisibleVideoPlayTimeInSeconds() const;
+ double GetInvisibleVideoPlayTimeInSeconds() const;
+ double GetVideoDecodeSuspendedTimeInSeconds() const;
+
+ double GetTotalAudioPlayTimeInSeconds() const;
+ double GetInaudiblePlayTimeInSeconds() const;
+ double GetAudiblePlayTimeInSeconds() const;
+ double GetMutedPlayTimeInSeconds() const;
+
+ private:
+ void StartInvisibleVideoTimeAccumulator();
+ void PauseInvisibleVideoTimeAccumulator();
+ void StartInaudibleAudioTimeAccumulator();
+ void PauseInaudibleAudioTimeAccumulator();
+ void StartMutedAudioTimeAccumulator();
+ void PauseMutedAudioTimeAccumulator();
+ bool HasOwnerHadValidVideo() const;
+ bool HasOwnerHadValidMedia() const;
+ void AssertOnMainThreadAndNotShutdown() const;
+
+ void ReportTelemetry();
+ void ReportResultForVideo();
+ void ReportResultForAudio();
+ void ReportResultForVideoFrameStatistics(double aTotalPlayTimeS,
+ const nsCString& key);
+
+ // Helper class to measure times for playback telemetry stats
+ class TimeDurationAccumulator {
+ public:
+ TimeDurationAccumulator() = default;
+ void Start() {
+ if (IsStarted()) {
+ return;
+ }
+ mStartTime = Some(AwakeTimeStamp::NowLoRes());
+ }
+ void Pause() {
+ if (!IsStarted()) {
+ return;
+ }
+ mSum = (AwakeTimeStamp::NowLoRes() - mStartTime.value());
+ mStartTime = Nothing();
+ }
+ bool IsStarted() const { return mStartTime.isSome(); }
+
+ double GetAndClearTotal() {
+ MOZ_ASSERT(!IsStarted(), "only call this when accumulator is paused");
+ double total = mSum.ToSeconds();
+ mStartTime = Nothing();
+ mSum = AwakeTimeDuration();
+ return total;
+ }
+
+ double PeekTotal() const {
+ if (!IsStarted()) {
+ return mSum.ToSeconds();
+ }
+ return (AwakeTimeStamp::NowLoRes() - mStartTime.value()).ToSeconds();
+ }
+
+ private:
+ Maybe<AwakeTimeStamp> mStartTime;
+ AwakeTimeDuration mSum;
+ };
+
+ // The owner is HTMLMediaElement that is guaranteed being always alive during
+ // our whole life cycle.
+ TelemetryProbesReporterOwner* mOwner;
+
+ // Total time an element has spent on playing video.
+ TimeDurationAccumulator mTotalVideoPlayTime;
+
+ // Total time an element has spent on playing video that has a color depth
+ // greater than 8, which is likely HDR video.
+ TimeDurationAccumulator mTotalVideoHDRPlayTime;
+
+ // Total time an element has spent on playing audio
+ TimeDurationAccumulator mTotalAudioPlayTime;
+
+ // Total time a VIDEO element has spent playing while the corresponding media
+ // element is invisible.
+ TimeDurationAccumulator mInvisibleVideoPlayTime;
+
+ // Total time an element has spent playing audio that was not audible
+ TimeDurationAccumulator mInaudibleAudioPlayTime;
+
+ // Total time an element with an audio track has spent muted
+ TimeDurationAccumulator mMutedAudioPlayTime;
+
+ // Total time a VIDEO has spent in video-decode-suspend mode.
+ TimeDurationAccumulator mVideoDecodeSuspendedTime;
+
+ Visibility mMediaElementVisibility = Visibility::eInitial;
+
+ MediaContent mMediaContent = MediaContent::MEDIA_HAS_NOTHING;
+
+ bool mIsPlaying = false;
+
+ bool mIsMuted = false;
+};
+
+} // namespace mozilla
+
+#endif // DOM_TelemetryProbesReporter_H_
diff --git a/dom/media/utils/gtest/TestPerformanceRecorder.cpp b/dom/media/utils/gtest/TestPerformanceRecorder.cpp
new file mode 100644
index 0000000000..ae5d22a916
--- /dev/null
+++ b/dom/media/utils/gtest/TestPerformanceRecorder.cpp
@@ -0,0 +1,110 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+#include <chrono>
+#include <thread>
+
+#include "PerformanceRecorder.h"
+#include "gtest/gtest.h"
+#include "nsString.h"
+
+using namespace mozilla;
+
+class PerformanceRecorderWrapper : public PerformanceRecorder<PlaybackStage> {
+ public:
+ PerformanceRecorderWrapper(MediaStage aStage, int32_t aHeight)
+ : PerformanceRecorder(aStage, aHeight) {}
+
+ static void EnableMeasurementOnNonMarkerSituation() {
+ sEnableMeasurementForTesting = true;
+ }
+};
+
+TEST(PerformanceRecorder, TestResolution)
+{
+ PerformanceRecorderWrapper::EnableMeasurementOnNonMarkerSituation();
+
+ static const struct {
+ const int32_t mH;
+ const char* mRes;
+ } resolutions[] = {{0, "A:0"},
+ {240, "V:0<h<=240"},
+ {480, "V:240<h<=480"},
+ {576, "V:480<h<=576"},
+ {720, "V:576<h<=720"},
+ {1080, "V:720<h<=1080"},
+ {1440, "V:1080<h<=1440"},
+ {2160, "V:1440<h<=2160"},
+ {4320, "V:h>2160"}};
+
+ const MediaStage stage = MediaStage::RequestDecode;
+ for (auto&& res : resolutions) {
+ PerformanceRecorderWrapper w(stage, res.mH);
+ nsCString name;
+ w.Record([&](auto& aStage) { name = nsCString(aStage.Name()); });
+ ASSERT_NE(name.Find(res.mRes), kNotFound);
+ }
+}
+
+TEST(PerformanceRecorder, TestMoveOperation)
+{
+ PerformanceRecorderWrapper::EnableMeasurementOnNonMarkerSituation();
+
+ const MediaStage stage = MediaStage::RequestDecode;
+ const uint32_t resolution = 1080;
+ PerformanceRecorderWrapper w1(stage, resolution);
+ std::this_thread::sleep_for(std::chrono::milliseconds(1));
+
+ // w1 has been moved which won't continue measuring data.
+ PerformanceRecorderWrapper w2(std::move(w1));
+ ASSERT_DOUBLE_EQ(w1.Record(), 0.0);
+ ASSERT_TRUE(w2.Record() > 0.0);
+}
+
+TEST(PerformanceRecorder, TestRecordInvalidation)
+{
+ PerformanceRecorderWrapper::EnableMeasurementOnNonMarkerSituation();
+
+ const MediaStage stage = MediaStage::RequestDecode;
+ const uint32_t resolution = 1080;
+ PerformanceRecorderWrapper w(stage, resolution);
+ std::this_thread::sleep_for(std::chrono::milliseconds(1));
+
+ ASSERT_TRUE(w.Record() > 0.0);
+
+ w.Record();
+ // w has been recorded and won't continue measuring data.
+ ASSERT_DOUBLE_EQ(w.Record(), 0.0);
+}
+
+TEST(PerformanceRecorder, TestMultipleRecords)
+{
+ PerformanceRecorderWrapper::EnableMeasurementOnNonMarkerSituation();
+
+ const MediaStage stage = MediaStage::RequestDecode;
+ PerformanceRecorderMulti<PlaybackStage> r;
+
+ r.Start(1, stage, 1);
+ r.Start(2, stage, 2);
+ r.Start(3, stage, 3);
+
+ std::this_thread::sleep_for(std::chrono::milliseconds(1));
+
+ // id 0 wasn't started
+ EXPECT_DOUBLE_EQ(r.Record(0), 0.0);
+
+ // id 1 gets recorded normally
+ EXPECT_TRUE(r.Record(1) > 0.0);
+
+ // id 1 was already recorded
+ EXPECT_DOUBLE_EQ(r.Record(1), 0.0);
+
+ // id 2 gets recorded normally
+ EXPECT_TRUE(r.Record(2) > 0.0);
+
+ // id 4 wasn't started
+ EXPECT_DOUBLE_EQ(r.Record(4), 0.0);
+
+ // All lower ids got discarded
+ EXPECT_DOUBLE_EQ(r.Record(3), 0.0);
+}
diff --git a/dom/media/utils/gtest/moz.build b/dom/media/utils/gtest/moz.build
new file mode 100644
index 0000000000..b046869f40
--- /dev/null
+++ b/dom/media/utils/gtest/moz.build
@@ -0,0 +1,15 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+UNIFIED_SOURCES += [
+ "TestPerformanceRecorder.cpp",
+]
+
+LOCAL_INCLUDES += [
+ "/dom/media/utils",
+]
+
+FINAL_LIBRARY = "xul-gtest"
diff --git a/dom/media/utils/moz.build b/dom/media/utils/moz.build
new file mode 100644
index 0000000000..e503c29949
--- /dev/null
+++ b/dom/media/utils/moz.build
@@ -0,0 +1,26 @@
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+with Files("test/**"):
+ BUG_COMPONENT = ("Core", "Audio/Video: Playback")
+
+EXPORTS += [
+ "MediaElementEventRunners.h",
+ "PerformanceRecorder.h",
+ "TelemetryProbesReporter.h",
+]
+
+UNIFIED_SOURCES += [
+ "MediaElementEventRunners.cpp",
+ "PerformanceRecorder.cpp",
+ "TelemetryProbesReporter.cpp",
+]
+
+include("/ipc/chromium/chromium-config.mozbuild")
+
+if CONFIG["ENABLE_TESTS"]:
+ DIRS += ["gtest"]
+
+FINAL_LIBRARY = "xul"