summaryrefslogtreecommitdiffstats
path: root/dom/media/webrtc/libwebrtcglue
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--dom/media/webrtc/libwebrtcglue/AudioConduit.cpp927
-rw-r--r--dom/media/webrtc/libwebrtcglue/AudioConduit.h315
-rw-r--r--dom/media/webrtc/libwebrtcglue/CallWorkerThread.h116
-rw-r--r--dom/media/webrtc/libwebrtcglue/CodecConfig.h237
-rw-r--r--dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp22
-rw-r--r--dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h27
-rw-r--r--dom/media/webrtc/libwebrtcglue/MediaConduitControl.h68
-rw-r--r--dom/media/webrtc/libwebrtcglue/MediaConduitErrors.h46
-rw-r--r--dom/media/webrtc/libwebrtcglue/MediaConduitInterface.cpp151
-rw-r--r--dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h493
-rw-r--r--dom/media/webrtc/libwebrtcglue/MediaDataCodec.cpp70
-rw-r--r--dom/media/webrtc/libwebrtcglue/MediaDataCodec.h32
-rw-r--r--dom/media/webrtc/libwebrtcglue/RtpRtcpConfig.h24
-rw-r--r--dom/media/webrtc/libwebrtcglue/RunningStat.h48
-rw-r--r--dom/media/webrtc/libwebrtcglue/SystemTime.cpp67
-rw-r--r--dom/media/webrtc/libwebrtcglue/SystemTime.h47
-rw-r--r--dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h181
-rw-r--r--dom/media/webrtc/libwebrtcglue/VideoConduit.cpp1849
-rw-r--r--dom/media/webrtc/libwebrtcglue/VideoConduit.h505
-rw-r--r--dom/media/webrtc/libwebrtcglue/VideoStreamFactory.cpp387
-rw-r--r--dom/media/webrtc/libwebrtcglue/VideoStreamFactory.h132
-rw-r--r--dom/media/webrtc/libwebrtcglue/VideoTypes.h60
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.cpp105
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.h114
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.cpp1028
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.h505
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcImageBuffer.h53
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.cpp201
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.h70
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.cpp499
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.h76
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.cpp139
-rw-r--r--dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.h124
-rw-r--r--dom/media/webrtc/libwebrtcglue/moz.build35
34 files changed, 8753 insertions, 0 deletions
diff --git a/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp b/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp
new file mode 100644
index 0000000000..3101523ab7
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp
@@ -0,0 +1,927 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "AudioConduit.h"
+
+#include "common/browser_logging/CSFLog.h"
+#include "MediaConduitControl.h"
+#include "mozilla/media/MediaUtils.h"
+#include "mozilla/Telemetry.h"
+#include "transport/runnable_utils.h"
+#include "transport/SrtpFlow.h" // For SRTP_MAX_EXPANSION
+#include "WebrtcCallWrapper.h"
+
+// libwebrtc includes
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "audio/audio_receive_stream.h"
+#include "media/base/media_constants.h"
+
+// for ntohs
+#ifdef HAVE_NETINET_IN_H
+# include <netinet/in.h>
+#elif defined XP_WIN
+# include <winsock2.h>
+#endif
+
+#ifdef MOZ_WIDGET_ANDROID
+# include "AndroidBridge.h"
+#endif
+
+namespace mozilla {
+
+namespace {
+
+static const char* acLogTag = "WebrtcAudioSessionConduit";
+#ifdef LOGTAG
+# undef LOGTAG
+#endif
+#define LOGTAG acLogTag
+
+using namespace cricket;
+using LocalDirection = MediaSessionConduitLocalDirection;
+
+const char kCodecParamCbr[] = "cbr";
+
+} // namespace
+
+/**
+ * Factory Method for AudioConduit
+ */
+RefPtr<AudioSessionConduit> AudioSessionConduit::Create(
+ RefPtr<WebrtcCallWrapper> aCall,
+ nsCOMPtr<nsISerialEventTarget> aStsThread) {
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
+ MOZ_ASSERT(NS_IsMainThread());
+
+ return MakeRefPtr<WebrtcAudioConduit>(std::move(aCall),
+ std::move(aStsThread));
+}
+
+#define INIT_MIRROR(name, val) \
+ name(aCallThread, val, "WebrtcAudioConduit::Control::" #name " (Mirror)")
+WebrtcAudioConduit::Control::Control(const RefPtr<AbstractThread>& aCallThread)
+ : INIT_MIRROR(mReceiving, false),
+ INIT_MIRROR(mTransmitting, false),
+ INIT_MIRROR(mLocalSsrcs, Ssrcs()),
+ INIT_MIRROR(mLocalCname, std::string()),
+ INIT_MIRROR(mMid, std::string()),
+ INIT_MIRROR(mRemoteSsrc, 0),
+ INIT_MIRROR(mSyncGroup, std::string()),
+ INIT_MIRROR(mLocalRecvRtpExtensions, RtpExtList()),
+ INIT_MIRROR(mLocalSendRtpExtensions, RtpExtList()),
+ INIT_MIRROR(mSendCodec, Nothing()),
+ INIT_MIRROR(mRecvCodecs, std::vector<AudioCodecConfig>()) {}
+#undef INIT_MIRROR
+
+RefPtr<GenericPromise> WebrtcAudioConduit::Shutdown() {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ return InvokeAsync(mCallThread, "WebrtcAudioConduit::Shutdown (main thread)",
+ [this, self = RefPtr<WebrtcAudioConduit>(this)] {
+ mControl.mReceiving.DisconnectIfConnected();
+ mControl.mTransmitting.DisconnectIfConnected();
+ mControl.mLocalSsrcs.DisconnectIfConnected();
+ mControl.mLocalCname.DisconnectIfConnected();
+ mControl.mMid.DisconnectIfConnected();
+ mControl.mRemoteSsrc.DisconnectIfConnected();
+ mControl.mSyncGroup.DisconnectIfConnected();
+ mControl.mLocalRecvRtpExtensions.DisconnectIfConnected();
+ mControl.mLocalSendRtpExtensions.DisconnectIfConnected();
+ mControl.mSendCodec.DisconnectIfConnected();
+ mControl.mRecvCodecs.DisconnectIfConnected();
+ mControl.mOnDtmfEventListener.DisconnectIfExists();
+ mWatchManager.Shutdown();
+
+ {
+ AutoWriteLock lock(mLock);
+ DeleteSendStream();
+ DeleteRecvStream();
+ }
+
+ return GenericPromise::CreateAndResolve(
+ true, "WebrtcAudioConduit::Shutdown (call thread)");
+ });
+}
+
+WebrtcAudioConduit::WebrtcAudioConduit(
+ RefPtr<WebrtcCallWrapper> aCall, nsCOMPtr<nsISerialEventTarget> aStsThread)
+ : mCall(std::move(aCall)),
+ mSendTransport(this),
+ mRecvTransport(this),
+ mRecvStreamConfig(),
+ mRecvStream(nullptr),
+ mSendStreamConfig(&mSendTransport),
+ mSendStream(nullptr),
+ mSendStreamRunning(false),
+ mRecvStreamRunning(false),
+ mDtmfEnabled(false),
+ mLock("WebrtcAudioConduit::mLock"),
+ mCallThread(std::move(mCall->mCallThread)),
+ mStsThread(std::move(aStsThread)),
+ mControl(mCall->mCallThread),
+ mWatchManager(this, mCall->mCallThread) {
+ mRecvStreamConfig.rtcp_send_transport = &mRecvTransport;
+ mRecvStreamConfig.rtp.rtcp_event_observer = this;
+}
+
+/**
+ * Destruction defines for our super-classes
+ */
+WebrtcAudioConduit::~WebrtcAudioConduit() {
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
+ MOZ_ASSERT(!mSendStream && !mRecvStream,
+ "Call DeleteStreams prior to ~WebrtcAudioConduit.");
+}
+
+#define CONNECT(aCanonical, aMirror) \
+ do { \
+ (aMirror).Connect(aCanonical); \
+ mWatchManager.Watch(aMirror, &WebrtcAudioConduit::OnControlConfigChange); \
+ } while (0)
+
+void WebrtcAudioConduit::InitControl(AudioConduitControlInterface* aControl) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ CONNECT(aControl->CanonicalReceiving(), mControl.mReceiving);
+ CONNECT(aControl->CanonicalTransmitting(), mControl.mTransmitting);
+ CONNECT(aControl->CanonicalLocalSsrcs(), mControl.mLocalSsrcs);
+ CONNECT(aControl->CanonicalLocalCname(), mControl.mLocalCname);
+ CONNECT(aControl->CanonicalMid(), mControl.mMid);
+ CONNECT(aControl->CanonicalRemoteSsrc(), mControl.mRemoteSsrc);
+ CONNECT(aControl->CanonicalSyncGroup(), mControl.mSyncGroup);
+ CONNECT(aControl->CanonicalLocalRecvRtpExtensions(),
+ mControl.mLocalRecvRtpExtensions);
+ CONNECT(aControl->CanonicalLocalSendRtpExtensions(),
+ mControl.mLocalSendRtpExtensions);
+ CONNECT(aControl->CanonicalAudioSendCodec(), mControl.mSendCodec);
+ CONNECT(aControl->CanonicalAudioRecvCodecs(), mControl.mRecvCodecs);
+ mControl.mOnDtmfEventListener = aControl->OnDtmfEvent().Connect(
+ mCall->mCallThread, this, &WebrtcAudioConduit::OnDtmfEvent);
+}
+
+#undef CONNECT
+
+void WebrtcAudioConduit::OnDtmfEvent(const DtmfEvent& aEvent) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mSendStream);
+ MOZ_ASSERT(mDtmfEnabled);
+ mSendStream->SendTelephoneEvent(aEvent.mPayloadType, aEvent.mPayloadFrequency,
+ aEvent.mEventCode, aEvent.mLengthMs);
+}
+
+void WebrtcAudioConduit::OnControlConfigChange() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ bool recvStreamReconfigureNeeded = false;
+ bool sendStreamReconfigureNeeded = false;
+ bool recvStreamRecreationNeeded = false;
+ bool sendStreamRecreationNeeded = false;
+
+ if (!mControl.mLocalSsrcs.Ref().empty()) {
+ if (mControl.mLocalSsrcs.Ref()[0] != mSendStreamConfig.rtp.ssrc) {
+ sendStreamRecreationNeeded = true;
+
+ // For now...
+ recvStreamRecreationNeeded = true;
+ }
+ mRecvStreamConfig.rtp.local_ssrc = mControl.mLocalSsrcs.Ref()[0];
+ mSendStreamConfig.rtp.ssrc = mControl.mLocalSsrcs.Ref()[0];
+
+ // In the future we can do this instead of recreating the recv stream:
+ // if (mRecvStream) {
+ // mCall->Call()->OnLocalSsrcUpdated(mRecvStream,
+ // mControl.mLocalSsrcs.Ref()[0]);
+ // }
+ }
+
+ if (mControl.mLocalCname.Ref() != mSendStreamConfig.rtp.c_name) {
+ mSendStreamConfig.rtp.c_name = mControl.mLocalCname.Ref();
+ sendStreamReconfigureNeeded = true;
+ }
+
+ if (mControl.mMid.Ref() != mSendStreamConfig.rtp.mid) {
+ mSendStreamConfig.rtp.mid = mControl.mMid.Ref();
+ sendStreamReconfigureNeeded = true;
+ }
+
+ if (mControl.mRemoteSsrc.Ref() != mControl.mConfiguredRemoteSsrc) {
+ mRecvStreamConfig.rtp.remote_ssrc = mControl.mConfiguredRemoteSsrc =
+ mControl.mRemoteSsrc.Ref();
+ recvStreamRecreationNeeded = true;
+ }
+
+ if (mControl.mSyncGroup.Ref() != mRecvStreamConfig.sync_group) {
+ mRecvStreamConfig.sync_group = mControl.mSyncGroup.Ref();
+ // For now...
+ recvStreamRecreationNeeded = true;
+ // In the future we can do this instead of recreating the recv stream:
+ // if (mRecvStream) {
+ // mCall->Call()->OnUpdateSyncGroup(mRecvStream,
+ // mRecvStreamConfig.sync_group);
+ // }
+ }
+
+ if (auto filteredExtensions = FilterExtensions(
+ LocalDirection::kRecv, mControl.mLocalRecvRtpExtensions);
+ filteredExtensions != mRecvStreamConfig.rtp.extensions) {
+ mRecvStreamConfig.rtp.extensions = std::move(filteredExtensions);
+ // For now...
+ recvStreamRecreationNeeded = true;
+ // In the future we can do this instead of recreating the recv stream:
+ // if (mRecvStream) {
+ // mRecvStream->SetRtpExtensions(mRecvStreamConfig.rtp.extensions);
+ //}
+ }
+
+ if (auto filteredExtensions = FilterExtensions(
+ LocalDirection::kSend, mControl.mLocalSendRtpExtensions);
+ filteredExtensions != mSendStreamConfig.rtp.extensions) {
+ // At the very least, we need a reconfigure. Recreation needed if the
+ // extmap for any extension has changed, but not for adding/removing
+ // extensions.
+ sendStreamReconfigureNeeded = true;
+
+ for (const auto& newExt : filteredExtensions) {
+ if (sendStreamRecreationNeeded) {
+ break;
+ }
+ for (const auto& oldExt : mSendStreamConfig.rtp.extensions) {
+ if (newExt.uri == oldExt.uri) {
+ if (newExt.id != oldExt.id) {
+ sendStreamRecreationNeeded = true;
+ }
+ // We're done handling newExt, one way or another
+ break;
+ }
+ }
+ }
+
+ mSendStreamConfig.rtp.extensions = std::move(filteredExtensions);
+ }
+
+ mControl.mSendCodec.Ref().apply([&](const auto& aConfig) {
+ if (mControl.mConfiguredSendCodec != mControl.mSendCodec.Ref()) {
+ mControl.mConfiguredSendCodec = mControl.mSendCodec;
+ if (ValidateCodecConfig(aConfig, true) == kMediaConduitNoError) {
+ mSendStreamConfig.encoder_factory =
+ webrtc::CreateBuiltinAudioEncoderFactory();
+
+ webrtc::AudioSendStream::Config::SendCodecSpec spec(
+ aConfig.mType, CodecConfigToLibwebrtcFormat(aConfig));
+ mSendStreamConfig.send_codec_spec = spec;
+
+ mDtmfEnabled = aConfig.mDtmfEnabled;
+ sendStreamReconfigureNeeded = true;
+ }
+ }
+ });
+
+ if (mControl.mConfiguredRecvCodecs != mControl.mRecvCodecs.Ref()) {
+ mControl.mConfiguredRecvCodecs = mControl.mRecvCodecs;
+ mRecvStreamConfig.decoder_factory = mCall->mAudioDecoderFactory;
+ mRecvStreamConfig.decoder_map.clear();
+
+ for (const auto& codec : mControl.mRecvCodecs.Ref()) {
+ if (ValidateCodecConfig(codec, false) != kMediaConduitNoError) {
+ continue;
+ }
+ mRecvStreamConfig.decoder_map.emplace(
+ codec.mType, CodecConfigToLibwebrtcFormat(codec));
+ }
+
+ recvStreamReconfigureNeeded = true;
+ }
+
+ if (!recvStreamReconfigureNeeded && !sendStreamReconfigureNeeded &&
+ !recvStreamRecreationNeeded && !sendStreamRecreationNeeded &&
+ mControl.mReceiving == mRecvStreamRunning &&
+ mControl.mTransmitting == mSendStreamRunning) {
+ // No changes applied -- no need to lock.
+ return;
+ }
+
+ if (recvStreamRecreationNeeded) {
+ recvStreamReconfigureNeeded = false;
+ }
+ if (sendStreamRecreationNeeded) {
+ sendStreamReconfigureNeeded = false;
+ }
+
+ {
+ AutoWriteLock lock(mLock);
+ // Recreate/Stop/Start streams as needed.
+ if (recvStreamRecreationNeeded) {
+ DeleteRecvStream();
+ }
+ if (mControl.mReceiving) {
+ CreateRecvStream();
+ }
+ if (sendStreamRecreationNeeded) {
+ DeleteSendStream();
+ }
+ if (mControl.mTransmitting) {
+ CreateSendStream();
+ }
+ }
+
+ // We make sure to not hold the lock while stopping/starting/reconfiguring
+ // streams, so as to not cause deadlocks. These methods can cause our platform
+ // codecs to dispatch sync runnables to main, and main may grab the lock.
+
+ if (mRecvStream && recvStreamReconfigureNeeded) {
+ MOZ_ASSERT(!recvStreamRecreationNeeded);
+ mRecvStream->SetDecoderMap(mRecvStreamConfig.decoder_map);
+ }
+
+ if (mSendStream && sendStreamReconfigureNeeded) {
+ MOZ_ASSERT(!sendStreamRecreationNeeded);
+ mSendStream->Reconfigure(mSendStreamConfig);
+ }
+
+ if (!mControl.mReceiving) {
+ StopReceiving();
+ }
+ if (!mControl.mTransmitting) {
+ StopTransmitting();
+ }
+
+ if (mControl.mReceiving) {
+ StartReceiving();
+ }
+ if (mControl.mTransmitting) {
+ StartTransmitting();
+ }
+}
+
+std::vector<uint32_t> WebrtcAudioConduit::GetLocalSSRCs() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ return std::vector<uint32_t>(1, mRecvStreamConfig.rtp.local_ssrc);
+}
+
+bool WebrtcAudioConduit::OverrideRemoteSSRC(uint32_t aSsrc) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ if (mRecvStreamConfig.rtp.remote_ssrc == aSsrc) {
+ return true;
+ }
+ mRecvStreamConfig.rtp.remote_ssrc = aSsrc;
+
+ const bool wasReceiving = mRecvStreamRunning;
+ const bool hadRecvStream = mRecvStream;
+
+ StopReceiving();
+
+ if (hadRecvStream) {
+ AutoWriteLock lock(mLock);
+ DeleteRecvStream();
+ CreateRecvStream();
+ }
+
+ if (wasReceiving) {
+ StartReceiving();
+ }
+ return true;
+}
+
+Maybe<Ssrc> WebrtcAudioConduit::GetRemoteSSRC() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ // libwebrtc uses 0 to mean a lack of SSRC. That is not to spec.
+ return mRecvStreamConfig.rtp.remote_ssrc == 0
+ ? Nothing()
+ : Some(mRecvStreamConfig.rtp.remote_ssrc);
+}
+
+Maybe<webrtc::AudioReceiveStreamInterface::Stats>
+WebrtcAudioConduit::GetReceiverStats() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ if (!mRecvStream) {
+ return Nothing();
+ }
+ return Some(mRecvStream->GetStats());
+}
+
+Maybe<webrtc::AudioSendStream::Stats> WebrtcAudioConduit::GetSenderStats()
+ const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ if (!mSendStream) {
+ return Nothing();
+ }
+ return Some(mSendStream->GetStats());
+}
+
+Maybe<webrtc::CallBasicStats> WebrtcAudioConduit::GetCallStats() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ if (!mCall->Call()) {
+ return Nothing();
+ }
+ return Some(mCall->Call()->GetStats());
+}
+
+void WebrtcAudioConduit::OnRtcpBye() { mRtcpByeEvent.Notify(); }
+
+void WebrtcAudioConduit::OnRtcpTimeout() { mRtcpTimeoutEvent.Notify(); }
+
+// AudioSessionConduit Implementation
+MediaConduitErrorCode WebrtcAudioConduit::SendAudioFrame(
+ std::unique_ptr<webrtc::AudioFrame> frame) {
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
+ // Following checks need to be performed
+ // 1. Non null audio buffer pointer, and
+ // 2. Valid sample rate, and
+ // 3. Appropriate Sample Length for 10 ms audio-frame. This represents the
+ // block size used upstream for processing.
+ // Ex: for 16000 sample rate , valid block-length is 160.
+ // Similarly for 32000 sample rate, valid block length is 320.
+
+ if (!frame->data() ||
+ (IsSamplingFreqSupported(frame->sample_rate_hz()) == false) ||
+ ((frame->samples_per_channel() % (frame->sample_rate_hz() / 100) != 0))) {
+ CSFLogError(LOGTAG, "%s Invalid Parameters ", __FUNCTION__);
+ MOZ_ASSERT(PR_FALSE);
+ return kMediaConduitMalformedArgument;
+ }
+
+ // This is the AudioProxyThread, blocking it for a bit is fine.
+ AutoReadLock lock(mLock);
+ if (!mSendStreamRunning) {
+ CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__);
+ return kMediaConduitSessionNotInited;
+ }
+
+ mSendStream->SendAudioData(std::move(frame));
+ return kMediaConduitNoError;
+}
+
+MediaConduitErrorCode WebrtcAudioConduit::GetAudioFrame(
+ int32_t samplingFreqHz, webrtc::AudioFrame* frame) {
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
+
+ // validate params
+ if (!frame) {
+ CSFLogError(LOGTAG, "%s Null Audio Buffer Pointer", __FUNCTION__);
+ MOZ_ASSERT(PR_FALSE);
+ return kMediaConduitMalformedArgument;
+ }
+
+ // Validate sample length
+ if (GetNum10msSamplesForFrequency(samplingFreqHz) == 0) {
+ CSFLogError(LOGTAG, "%s Invalid Sampling Frequency ", __FUNCTION__);
+ MOZ_ASSERT(PR_FALSE);
+ return kMediaConduitMalformedArgument;
+ }
+
+ // If the lock is taken, skip this chunk to avoid blocking the audio thread.
+ AutoTryReadLock tryLock(mLock);
+ if (!tryLock) {
+ CSFLogError(LOGTAG, "%s Conduit going through negotiation ", __FUNCTION__);
+ return kMediaConduitPlayoutError;
+ }
+
+ // Conduit should have reception enabled before we ask for decoded
+ // samples
+ if (!mRecvStreamRunning) {
+ CSFLogError(LOGTAG, "%s Engine not Receiving ", __FUNCTION__);
+ return kMediaConduitSessionNotInited;
+ }
+
+ // Unfortunate to have to cast to an internal class, but that looks like the
+ // only way short of interfacing with a layer above (which mixes all streams,
+ // which we don't want) or a layer below (which we try to avoid because it is
+ // less stable).
+ auto info = static_cast<webrtc::AudioReceiveStreamImpl*>(mRecvStream)
+ ->GetAudioFrameWithInfo(samplingFreqHz, frame);
+
+ if (info == webrtc::AudioMixer::Source::AudioFrameInfo::kError) {
+ CSFLogError(LOGTAG, "%s Getting audio frame failed", __FUNCTION__);
+ return kMediaConduitPlayoutError;
+ }
+
+ CSFLogDebug(LOGTAG, "%s Got %zu channels of %zu samples", __FUNCTION__,
+ frame->num_channels(), frame->samples_per_channel());
+ return kMediaConduitNoError;
+}
+
+// Transport Layer Callbacks
+void WebrtcAudioConduit::OnRtpReceived(MediaPacket&& aPacket,
+ webrtc::RTPHeader&& aHeader) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ if (mAllowSsrcChange && mRecvStreamConfig.rtp.remote_ssrc != aHeader.ssrc) {
+ CSFLogDebug(LOGTAG, "%s: switching from SSRC %u to %u", __FUNCTION__,
+ mRecvStreamConfig.rtp.remote_ssrc, aHeader.ssrc);
+ OverrideRemoteSSRC(aHeader.ssrc);
+ }
+
+ CSFLogVerbose(LOGTAG, "%s: seq# %u, Len %zu, SSRC %u (0x%x) ", __FUNCTION__,
+ (uint16_t)ntohs(((uint16_t*)aPacket.data())[1]), aPacket.len(),
+ (uint32_t)ntohl(((uint32_t*)aPacket.data())[2]),
+ (uint32_t)ntohl(((uint32_t*)aPacket.data())[2]));
+
+ DeliverPacket(rtc::CopyOnWriteBuffer(aPacket.data(), aPacket.len()),
+ PacketType::RTP);
+}
+
+void WebrtcAudioConduit::OnRtcpReceived(MediaPacket&& aPacket) {
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ DeliverPacket(rtc::CopyOnWriteBuffer(aPacket.data(), aPacket.len()),
+ PacketType::RTCP);
+}
+
+Maybe<uint16_t> WebrtcAudioConduit::RtpSendBaseSeqFor(uint32_t aSsrc) const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ auto it = mRtpSendBaseSeqs.find(aSsrc);
+ if (it == mRtpSendBaseSeqs.end()) {
+ return Nothing();
+ }
+ return Some(it->second);
+}
+
+const dom::RTCStatsTimestampMaker& WebrtcAudioConduit::GetTimestampMaker()
+ const {
+ return mCall->GetTimestampMaker();
+}
+
+void WebrtcAudioConduit::StopTransmitting() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(!mLock.LockedForWritingByCurrentThread());
+
+ if (!mSendStreamRunning) {
+ return;
+ }
+
+ if (mSendStream) {
+ CSFLogDebug(LOGTAG, "%s Stopping send stream", __FUNCTION__);
+ mSendStream->Stop();
+ }
+
+ mSendStreamRunning = false;
+}
+
+void WebrtcAudioConduit::StartTransmitting() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mSendStream);
+ MOZ_ASSERT(!mLock.LockedForWritingByCurrentThread());
+
+ if (mSendStreamRunning) {
+ return;
+ }
+
+ CSFLogDebug(LOGTAG, "%s Starting send stream", __FUNCTION__);
+
+ mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::AUDIO,
+ webrtc::kNetworkUp);
+ mSendStream->Start();
+ mSendStreamRunning = true;
+}
+
+void WebrtcAudioConduit::StopReceiving() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(!mLock.LockedForWritingByCurrentThread());
+
+ if (!mRecvStreamRunning) {
+ return;
+ }
+
+ if (mRecvStream) {
+ CSFLogDebug(LOGTAG, "%s Stopping recv stream", __FUNCTION__);
+ mRecvStream->Stop();
+ }
+
+ mRecvStreamRunning = false;
+}
+
+void WebrtcAudioConduit::StartReceiving() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mRecvStream);
+ MOZ_ASSERT(!mLock.LockedForWritingByCurrentThread());
+
+ if (mRecvStreamRunning) {
+ return;
+ }
+
+ CSFLogDebug(LOGTAG, "%s Starting receive stream (SSRC %u (0x%x))",
+ __FUNCTION__, mRecvStreamConfig.rtp.remote_ssrc,
+ mRecvStreamConfig.rtp.remote_ssrc);
+
+ mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::AUDIO,
+ webrtc::kNetworkUp);
+ mRecvStream->Start();
+ mRecvStreamRunning = true;
+}
+
+bool WebrtcAudioConduit::SendRtp(const uint8_t* aData, size_t aLength,
+ const webrtc::PacketOptions& aOptions) {
+ MOZ_ASSERT(aLength >= 12);
+ const uint16_t seqno = ntohs(*((uint16_t*)&aData[2]));
+ const uint32_t ssrc = ntohl(*((uint32_t*)&aData[8]));
+
+ CSFLogVerbose(
+ LOGTAG,
+ "AudioConduit %p: Sending RTP Packet seq# %u, len %zu, SSRC %u (0x%x)",
+ this, seqno, aLength, ssrc, ssrc);
+
+ if (!mTransportActive) {
+ CSFLogError(LOGTAG, "AudioConduit %p: RTP Packet Send Failed ", this);
+ return false;
+ }
+
+ MediaPacket packet;
+ packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION);
+ packet.SetType(MediaPacket::RTP);
+ mSenderRtpSendEvent.Notify(std::move(packet));
+
+ // Parse the sequence number of the first rtp packet as base_seq.
+ const auto inserted = mRtpSendBaseSeqs_n.insert({ssrc, seqno}).second;
+
+ if (inserted || aOptions.packet_id >= 0) {
+ int64_t now_ms = PR_Now() / 1000;
+ MOZ_ALWAYS_SUCCEEDS(mCallThread->Dispatch(NS_NewRunnableFunction(
+ __func__, [this, self = RefPtr<WebrtcAudioConduit>(this),
+ packet_id = aOptions.packet_id, now_ms, ssrc, seqno] {
+ mRtpSendBaseSeqs.insert({ssrc, seqno});
+ if (packet_id >= 0) {
+ if (mCall->Call()) {
+ // TODO: This notification should ideally happen after the
+ // transport layer has sent the packet on the wire.
+ mCall->Call()->OnSentPacket({packet_id, now_ms});
+ }
+ }
+ })));
+ }
+ return true;
+}
+
+bool WebrtcAudioConduit::SendSenderRtcp(const uint8_t* aData, size_t aLength) {
+ CSFLogVerbose(
+ LOGTAG,
+ "AudioConduit %p: Sending RTCP SR Packet, len %zu, SSRC %u (0x%x)", this,
+ aLength, (uint32_t)ntohl(*((uint32_t*)&aData[4])),
+ (uint32_t)ntohl(*((uint32_t*)&aData[4])));
+
+ if (!mTransportActive) {
+ CSFLogError(LOGTAG, "%s RTCP SR Packet Send Failed ", __FUNCTION__);
+ return false;
+ }
+
+ MediaPacket packet;
+ packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION);
+ packet.SetType(MediaPacket::RTCP);
+ mSenderRtcpSendEvent.Notify(std::move(packet));
+ return true;
+}
+
+bool WebrtcAudioConduit::SendReceiverRtcp(const uint8_t* aData,
+ size_t aLength) {
+ CSFLogVerbose(
+ LOGTAG,
+ "AudioConduit %p: Sending RTCP RR Packet, len %zu, SSRC %u (0x%x)", this,
+ aLength, (uint32_t)ntohl(*((uint32_t*)&aData[4])),
+ (uint32_t)ntohl(*((uint32_t*)&aData[4])));
+
+ if (!mTransportActive) {
+ CSFLogError(LOGTAG, "AudioConduit %p: RTCP RR Packet Send Failed", this);
+ return false;
+ }
+
+ MediaPacket packet;
+ packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION);
+ packet.SetType(MediaPacket::RTCP);
+ mReceiverRtcpSendEvent.Notify(std::move(packet));
+ return true;
+}
+
+/**
+ * Supported Sampling Frequencies.
+ */
+bool WebrtcAudioConduit::IsSamplingFreqSupported(int freq) const {
+ return GetNum10msSamplesForFrequency(freq) != 0;
+}
+
+std::vector<webrtc::RtpSource> WebrtcAudioConduit::GetUpstreamRtpSources()
+ const {
+ MOZ_ASSERT(NS_IsMainThread());
+ std::vector<webrtc::RtpSource> sources;
+ {
+ AutoReadLock lock(mLock);
+ if (mRecvStream) {
+ sources = mRecvStream->GetSources();
+ }
+ }
+ return sources;
+}
+
+/* Return block-length of 10 ms audio frame in number of samples */
+unsigned int WebrtcAudioConduit::GetNum10msSamplesForFrequency(
+ int samplingFreqHz) const {
+ switch (samplingFreqHz) {
+ case 16000:
+ return 160; // 160 samples
+ case 32000:
+ return 320; // 320 samples
+ case 44100:
+ return 441; // 441 samples
+ case 48000:
+ return 480; // 480 samples
+ default:
+ return 0; // invalid or unsupported
+ }
+}
+
+/**
+ * Perform validation on the codecConfig to be applied.
+ * Verifies if the codec is already applied.
+ */
+MediaConduitErrorCode WebrtcAudioConduit::ValidateCodecConfig(
+ const AudioCodecConfig& codecInfo, bool send) {
+ if (codecInfo.mName.empty()) {
+ CSFLogError(LOGTAG, "%s Empty Payload Name ", __FUNCTION__);
+ return kMediaConduitMalformedArgument;
+ }
+
+ // Only mono or stereo channels supported
+ if ((codecInfo.mChannels != 1) && (codecInfo.mChannels != 2)) {
+ CSFLogError(LOGTAG, "%s Channel Unsupported ", __FUNCTION__);
+ return kMediaConduitMalformedArgument;
+ }
+
+ return kMediaConduitNoError;
+}
+
+RtpExtList WebrtcAudioConduit::FilterExtensions(LocalDirection aDirection,
+ const RtpExtList& aExtensions) {
+ const bool isSend = aDirection == LocalDirection::kSend;
+ RtpExtList filteredExtensions;
+
+ for (const auto& extension : aExtensions) {
+ // ssrc-audio-level RTP header extension
+ if (extension.uri == webrtc::RtpExtension::kAudioLevelUri) {
+ filteredExtensions.push_back(
+ webrtc::RtpExtension(extension.uri, extension.id));
+ }
+
+ // csrc-audio-level RTP header extension
+ if (extension.uri == webrtc::RtpExtension::kCsrcAudioLevelsUri) {
+ if (isSend) {
+ continue;
+ }
+ filteredExtensions.push_back(
+ webrtc::RtpExtension(extension.uri, extension.id));
+ }
+
+ // MID RTP header extension
+ if (extension.uri == webrtc::RtpExtension::kMidUri) {
+ if (!isSend) {
+ // TODO: recv mid support, see also bug 1727211
+ continue;
+ }
+ filteredExtensions.push_back(
+ webrtc::RtpExtension(extension.uri, extension.id));
+ }
+ }
+
+ return filteredExtensions;
+}
+
+webrtc::SdpAudioFormat WebrtcAudioConduit::CodecConfigToLibwebrtcFormat(
+ const AudioCodecConfig& aConfig) {
+ webrtc::SdpAudioFormat::Parameters parameters;
+ if (aConfig.mName == kOpusCodecName) {
+ if (aConfig.mChannels == 2) {
+ parameters[kCodecParamStereo] = kParamValueTrue;
+ }
+ if (aConfig.mFECEnabled) {
+ parameters[kCodecParamUseInbandFec] = kParamValueTrue;
+ }
+ if (aConfig.mDTXEnabled) {
+ parameters[kCodecParamUseDtx] = kParamValueTrue;
+ }
+ if (aConfig.mMaxPlaybackRate) {
+ parameters[kCodecParamMaxPlaybackRate] =
+ std::to_string(aConfig.mMaxPlaybackRate);
+ }
+ if (aConfig.mMaxAverageBitrate) {
+ parameters[kCodecParamMaxAverageBitrate] =
+ std::to_string(aConfig.mMaxAverageBitrate);
+ }
+ if (aConfig.mFrameSizeMs) {
+ parameters[kCodecParamPTime] = std::to_string(aConfig.mFrameSizeMs);
+ }
+ if (aConfig.mMinFrameSizeMs) {
+ parameters[kCodecParamMinPTime] = std::to_string(aConfig.mMinFrameSizeMs);
+ }
+ if (aConfig.mMaxFrameSizeMs) {
+ parameters[kCodecParamMaxPTime] = std::to_string(aConfig.mMaxFrameSizeMs);
+ }
+ if (aConfig.mCbrEnabled) {
+ parameters[kCodecParamCbr] = kParamValueTrue;
+ }
+ }
+
+ return webrtc::SdpAudioFormat(aConfig.mName, aConfig.mFreq, aConfig.mChannels,
+ parameters);
+}
+
+void WebrtcAudioConduit::DeleteSendStream() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mLock.LockedForWritingByCurrentThread());
+
+ if (!mSendStream) {
+ return;
+ }
+
+ mCall->Call()->DestroyAudioSendStream(mSendStream);
+ mSendStreamRunning = false;
+ mSendStream = nullptr;
+
+ // Reset base_seqs in case ssrcs get re-used.
+ mRtpSendBaseSeqs.clear();
+}
+
+void WebrtcAudioConduit::CreateSendStream() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mLock.LockedForWritingByCurrentThread());
+
+ if (mSendStream) {
+ return;
+ }
+
+ mSendStream = mCall->Call()->CreateAudioSendStream(mSendStreamConfig);
+}
+
+void WebrtcAudioConduit::DeleteRecvStream() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mLock.LockedForWritingByCurrentThread());
+
+ if (!mRecvStream) {
+ return;
+ }
+
+ mCall->Call()->DestroyAudioReceiveStream(mRecvStream);
+ mRecvStreamRunning = false;
+ mRecvStream = nullptr;
+}
+
+void WebrtcAudioConduit::CreateRecvStream() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mLock.LockedForWritingByCurrentThread());
+
+ if (mRecvStream) {
+ return;
+ }
+
+ mRecvStream = mCall->Call()->CreateAudioReceiveStream(mRecvStreamConfig);
+}
+
+void WebrtcAudioConduit::DeliverPacket(rtc::CopyOnWriteBuffer packet,
+ PacketType type) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ if (!mCall->Call()) {
+ return;
+ }
+
+ // Bug 1499796 - we need to get passed the time the packet was received
+ webrtc::PacketReceiver::DeliveryStatus status =
+ mCall->Call()->Receiver()->DeliverPacket(webrtc::MediaType::AUDIO,
+ std::move(packet), -1);
+
+ if (status != webrtc::PacketReceiver::DELIVERY_OK) {
+ CSFLogError(LOGTAG, "%s DeliverPacket Failed for %s packet, %d",
+ __FUNCTION__, type == PacketType::RTP ? "RTP" : "RTCP", status);
+ }
+}
+
+Maybe<int> WebrtcAudioConduit::ActiveSendPayloadType() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ auto stats = GetSenderStats();
+ if (!stats) {
+ return Nothing();
+ }
+
+ if (!stats->codec_payload_type) {
+ return Nothing();
+ }
+
+ return Some(*stats->codec_payload_type);
+}
+
+Maybe<int> WebrtcAudioConduit::ActiveRecvPayloadType() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ auto stats = GetReceiverStats();
+ if (!stats) {
+ return Nothing();
+ }
+
+ if (!stats->codec_payload_type) {
+ return Nothing();
+ }
+
+ return Some(*stats->codec_payload_type);
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/AudioConduit.h b/dom/media/webrtc/libwebrtcglue/AudioConduit.h
new file mode 100644
index 0000000000..630b22b297
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/AudioConduit.h
@@ -0,0 +1,315 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef AUDIO_SESSION_H_
+#define AUDIO_SESSION_H_
+
+#include "mozilla/Attributes.h"
+#include "mozilla/ReentrantMonitor.h"
+#include "mozilla/RWLock.h"
+#include "mozilla/StateMirroring.h"
+#include "mozilla/TimeStamp.h"
+
+#include "MediaConduitInterface.h"
+#include "common/MediaEngineWrapper.h"
+
+/**
+ * This file hosts several structures identifying different aspects of a RTP
+ * Session.
+ */
+namespace mozilla {
+
+struct DtmfEvent;
+
+/**
+ * Concrete class for Audio session. Hooks up
+ * - media-source and target to external transport
+ */
+class WebrtcAudioConduit : public AudioSessionConduit,
+ public webrtc::RtcpEventObserver {
+ public:
+ Maybe<int> ActiveSendPayloadType() const override;
+ Maybe<int> ActiveRecvPayloadType() const override;
+
+ void OnRtpReceived(MediaPacket&& aPacket, webrtc::RTPHeader&& aHeader);
+ void OnRtcpReceived(MediaPacket&& aPacket);
+
+ void OnRtcpBye() override;
+ void OnRtcpTimeout() override;
+
+ void SetTransportActive(bool aActive) override {
+ mTransportActive = aActive;
+ if (!aActive) {
+ mReceiverRtpEventListener.DisconnectIfExists();
+ mReceiverRtcpEventListener.DisconnectIfExists();
+ mSenderRtcpEventListener.DisconnectIfExists();
+ }
+ }
+ MediaEventSourceExc<MediaPacket>& SenderRtpSendEvent() override {
+ return mSenderRtpSendEvent;
+ }
+ MediaEventSourceExc<MediaPacket>& SenderRtcpSendEvent() override {
+ return mSenderRtcpSendEvent;
+ }
+ MediaEventSourceExc<MediaPacket>& ReceiverRtcpSendEvent() override {
+ return mReceiverRtcpSendEvent;
+ }
+ void ConnectReceiverRtpEvent(
+ MediaEventSourceExc<MediaPacket, webrtc::RTPHeader>& aEvent) override {
+ // Hold a strong-ref to `this` for safety, since we'll be disconnecting
+ // off-target.
+ mReceiverRtpEventListener = aEvent.Connect(
+ mCallThread, [this, self = RefPtr<WebrtcAudioConduit>(this)](
+ MediaPacket aPacket, webrtc::RTPHeader aHeader) {
+ OnRtpReceived(std::move(aPacket), std::move(aHeader));
+ });
+ }
+ void ConnectReceiverRtcpEvent(
+ MediaEventSourceExc<MediaPacket>& aEvent) override {
+ // Hold a strong-ref to `this` for safety, since we'll be disconnecting
+ // off-target.
+ mReceiverRtcpEventListener = aEvent.Connect(
+ mCallThread,
+ [this, self = RefPtr<WebrtcAudioConduit>(this)](MediaPacket aPacket) {
+ OnRtcpReceived(std::move(aPacket));
+ });
+ }
+ void ConnectSenderRtcpEvent(
+ MediaEventSourceExc<MediaPacket>& aEvent) override {
+ // Hold a strong-ref to `this` for safety, since we'll be disconnecting
+ // off-target.
+ mSenderRtcpEventListener = aEvent.Connect(
+ mCallThread,
+ [this, self = RefPtr<WebrtcAudioConduit>(this)](MediaPacket aPacket) {
+ OnRtcpReceived(std::move(aPacket));
+ });
+ }
+
+ Maybe<uint16_t> RtpSendBaseSeqFor(uint32_t aSsrc) const override;
+
+ const dom::RTCStatsTimestampMaker& GetTimestampMaker() const override;
+
+ void StopTransmitting();
+ void StartTransmitting();
+ void StopReceiving();
+ void StartReceiving();
+
+ /**
+ * Function to deliver externally captured audio sample for encoding and
+ * transport
+ * @param frame [in]: AudioFrame in upstream's format for forwarding to the
+ * send stream. Ownership is passed along.
+ * NOTE: ConfigureSendMediaCodec() SHOULD be called before this function can
+ * be invoked. This ensures the inserted audio-samples can be transmitted by
+ * the conduit.
+ */
+ MediaConduitErrorCode SendAudioFrame(
+ std::unique_ptr<webrtc::AudioFrame> frame) override;
+
+ /**
+ * Function to grab a decoded audio-sample from the media engine for
+ * rendering / playout of length 10 milliseconds.
+ *
+ * @param samplingFreqHz [in]: Frequency of the sampling for playback in
+ * Hertz (16000, 32000,..)
+ * @param frame [in/out]: Pointer to an AudioFrame to which audio data will be
+ * copied
+ * NOTE: This function should be invoked every 10 milliseconds for the best
+ * performance
+ * NOTE: ConfigureRecvMediaCodec() SHOULD be called before this function can
+ * be invoked
+ * This ensures the decoded samples are ready for reading and playout is
+ * enabled.
+ */
+ MediaConduitErrorCode GetAudioFrame(int32_t samplingFreqHz,
+ webrtc::AudioFrame* frame) override;
+
+ bool SendRtp(const uint8_t* aData, size_t aLength,
+ const webrtc::PacketOptions& aOptions) override;
+ bool SendSenderRtcp(const uint8_t* aData, size_t aLength) override;
+ bool SendReceiverRtcp(const uint8_t* aData, size_t aLength) override;
+
+ bool HasCodecPluginID(uint64_t aPluginID) const override { return false; }
+
+ void DeliverPacket(rtc::CopyOnWriteBuffer packet, PacketType type) override;
+
+ RefPtr<GenericPromise> Shutdown() override;
+
+ WebrtcAudioConduit(RefPtr<WebrtcCallWrapper> aCall,
+ nsCOMPtr<nsISerialEventTarget> aStsThread);
+
+ virtual ~WebrtcAudioConduit();
+
+ // Call thread.
+ void InitControl(AudioConduitControlInterface* aControl) override;
+
+ // Handle a DTMF event from mControl.mOnDtmfEventListener.
+ void OnDtmfEvent(const DtmfEvent& aEvent);
+
+ // Called when a parameter in mControl has changed. Call thread.
+ void OnControlConfigChange();
+
+ Ssrcs GetLocalSSRCs() const override;
+ Maybe<Ssrc> GetRemoteSSRC() const override;
+
+ void DisableSsrcChanges() override {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mAllowSsrcChange = false;
+ }
+
+ private:
+ /**
+ * Override the remote ssrc configured on mRecvStreamConfig.
+ *
+ * Recreates and restarts the recv stream if needed. The overriden value is
+ * overwritten the next time the mControl.mRemoteSsrc mirror changes value.
+ *
+ * Call thread only.
+ */
+ bool OverrideRemoteSSRC(uint32_t aSsrc);
+
+ public:
+ void UnsetRemoteSSRC(uint32_t aSsrc) override {}
+
+ Maybe<webrtc::AudioReceiveStreamInterface::Stats> GetReceiverStats()
+ const override;
+ Maybe<webrtc::AudioSendStream::Stats> GetSenderStats() const override;
+ Maybe<webrtc::CallBasicStats> GetCallStats() const override;
+
+ bool IsSamplingFreqSupported(int freq) const override;
+
+ MediaEventSource<void>& RtcpByeEvent() override { return mRtcpByeEvent; }
+ MediaEventSource<void>& RtcpTimeoutEvent() override {
+ return mRtcpTimeoutEvent;
+ }
+
+ std::vector<webrtc::RtpSource> GetUpstreamRtpSources() const override;
+
+ private:
+ WebrtcAudioConduit(const WebrtcAudioConduit& other) = delete;
+ void operator=(const WebrtcAudioConduit& other) = delete;
+
+ // Generate block size in sample length for a given sampling frequency
+ unsigned int GetNum10msSamplesForFrequency(int samplingFreqHz) const;
+
+ // Checks the codec to be applied
+ static MediaConduitErrorCode ValidateCodecConfig(
+ const AudioCodecConfig& codecInfo, bool send);
+ /**
+ * Of all extensions in aExtensions, returns a list of supported extensions.
+ */
+ static RtpExtList FilterExtensions(
+ MediaSessionConduitLocalDirection aDirection,
+ const RtpExtList& aExtensions);
+ static webrtc::SdpAudioFormat CodecConfigToLibwebrtcFormat(
+ const AudioCodecConfig& aConfig);
+
+ void CreateSendStream();
+ void DeleteSendStream();
+ void CreateRecvStream();
+ void DeleteRecvStream();
+
+ // Are SSRC changes without signaling allowed or not.
+ // Call thread only.
+ bool mAllowSsrcChange = true;
+
+ // Const so can be accessed on any thread. Most methods are called on the Call
+ // thread.
+ const RefPtr<WebrtcCallWrapper> mCall;
+
+ // Set up in the ctor and then not touched. Called through by the streams on
+ // any thread.
+ WebrtcSendTransport mSendTransport;
+ WebrtcReceiveTransport mRecvTransport;
+
+ // Accessed only on the Call thread.
+ webrtc::AudioReceiveStreamInterface::Config mRecvStreamConfig;
+
+ // Written only on the Call thread. Guarded by mLock, except for reads on the
+ // Call thread.
+ webrtc::AudioReceiveStreamInterface* mRecvStream;
+
+ // Accessed only on the Call thread.
+ webrtc::AudioSendStream::Config mSendStreamConfig;
+
+ // Written only on the Call thread. Guarded by mLock, except for reads on the
+ // Call thread.
+ webrtc::AudioSendStream* mSendStream;
+
+ // If true => mSendStream started and not stopped
+ // Written only on the Call thread.
+ Atomic<bool> mSendStreamRunning;
+ // If true => mRecvStream started and not stopped
+ // Written only on the Call thread.
+ Atomic<bool> mRecvStreamRunning;
+
+ // Accessed only on the Call thread.
+ bool mDtmfEnabled;
+
+ mutable RWLock mLock MOZ_UNANNOTATED;
+
+ // Call worker thread. All access to mCall->Call() happens here.
+ const RefPtr<AbstractThread> mCallThread;
+
+ // Socket transport service thread. Any thread.
+ const nsCOMPtr<nsISerialEventTarget> mStsThread;
+
+ struct Control {
+ // Mirrors and events that map to AudioConduitControlInterface for control.
+ // Call thread only.
+ Mirror<bool> mReceiving;
+ Mirror<bool> mTransmitting;
+ Mirror<Ssrcs> mLocalSsrcs;
+ Mirror<std::string> mLocalCname;
+ Mirror<std::string> mMid;
+ Mirror<Ssrc> mRemoteSsrc;
+ Mirror<std::string> mSyncGroup;
+ Mirror<RtpExtList> mLocalRecvRtpExtensions;
+ Mirror<RtpExtList> mLocalSendRtpExtensions;
+ Mirror<Maybe<AudioCodecConfig>> mSendCodec;
+ Mirror<std::vector<AudioCodecConfig>> mRecvCodecs;
+ MediaEventListener mOnDtmfEventListener;
+
+ // For caching mRemoteSsrc, since another caller may change the remote ssrc
+ // in the stream config directly.
+ Ssrc mConfiguredRemoteSsrc = 0;
+ // For tracking changes to mSendCodec.
+ Maybe<AudioCodecConfig> mConfiguredSendCodec;
+ // For tracking changes to mRecvCodecs.
+ std::vector<AudioCodecConfig> mConfiguredRecvCodecs;
+
+ Control() = delete;
+ explicit Control(const RefPtr<AbstractThread>& aCallThread);
+ } mControl;
+
+ // WatchManager allowing Mirrors to trigger functions that will update the
+ // webrtc.org configuration.
+ WatchManager<WebrtcAudioConduit> mWatchManager;
+
+ // Accessed from mStsThread. Last successfully polled RTT
+ Maybe<DOMHighResTimeStamp> mRttSec;
+
+ // Call thread only. ssrc -> base_seq
+ std::map<uint32_t, uint16_t> mRtpSendBaseSeqs;
+ // libwebrtc network thread only. ssrc -> base_seq.
+ // To track changes needed to mRtpSendBaseSeqs.
+ std::map<uint32_t, uint16_t> mRtpSendBaseSeqs_n;
+
+ // Thread safe
+ Atomic<bool> mTransportActive = Atomic<bool>(false);
+ MediaEventProducer<void> mRtcpByeEvent;
+ MediaEventProducer<void> mRtcpTimeoutEvent;
+ MediaEventProducerExc<MediaPacket> mSenderRtpSendEvent;
+ MediaEventProducerExc<MediaPacket> mSenderRtcpSendEvent;
+ MediaEventProducerExc<MediaPacket> mReceiverRtcpSendEvent;
+
+ // Assigned and revoked on mStsThread. Listeners for receiving packets.
+ MediaEventListener mSenderRtcpEventListener; // Rtp-transmitting pipeline
+ MediaEventListener mReceiverRtcpEventListener; // Rtp-receiving pipeline
+ MediaEventListener mReceiverRtpEventListener; // Rtp-receiving pipeline
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/CallWorkerThread.h b/dom/media/webrtc/libwebrtcglue/CallWorkerThread.h
new file mode 100644
index 0000000000..12d21fbee4
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/CallWorkerThread.h
@@ -0,0 +1,116 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_CALLWORKERTHREAD_H_
+#define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_CALLWORKERTHREAD_H_
+
+#include "mozilla/AbstractThread.h"
+#include "nsIDirectTaskDispatcher.h"
+#include "TaskQueueWrapper.h"
+
+namespace mozilla {
+
+// Implements AbstractThread for running things on the webrtc TaskQueue.
+// Webrtc TaskQueues are not refcounted so cannot implement AbstractThread
+// directly.
+class CallWorkerThread final : public AbstractThread,
+ public nsIDirectTaskDispatcher {
+ public:
+ NS_DECL_THREADSAFE_ISUPPORTS
+ NS_DECL_NSIDIRECTTASKDISPATCHER
+
+ explicit CallWorkerThread(
+ UniquePtr<TaskQueueWrapper<DeletionPolicy::NonBlocking>> aWebrtcTaskQueue)
+ : AbstractThread(aWebrtcTaskQueue->mTaskQueue->SupportsTailDispatch()),
+ mWebrtcTaskQueue(std::move(aWebrtcTaskQueue)) {}
+
+ // AbstractThread overrides
+ nsresult Dispatch(already_AddRefed<nsIRunnable> aRunnable,
+ DispatchReason aReason) override;
+ bool IsCurrentThreadIn() const override;
+ TaskDispatcher& TailDispatcher() override;
+ nsIEventTarget* AsEventTarget() override;
+ NS_IMETHOD
+ DelayedDispatch(already_AddRefed<nsIRunnable> aEvent,
+ uint32_t aDelayMs) override;
+
+ NS_IMETHOD RegisterShutdownTask(nsITargetShutdownTask* aTask) override;
+ NS_IMETHOD UnregisterShutdownTask(nsITargetShutdownTask* aTask) override;
+
+ const UniquePtr<TaskQueueWrapper<DeletionPolicy::NonBlocking>>
+ mWebrtcTaskQueue;
+
+ protected:
+ ~CallWorkerThread() = default;
+};
+
+NS_IMPL_ISUPPORTS(CallWorkerThread, nsIDirectTaskDispatcher,
+ nsISerialEventTarget, nsIEventTarget);
+
+//-----------------------------------------------------------------------------
+// AbstractThread
+//-----------------------------------------------------------------------------
+
+nsresult CallWorkerThread::Dispatch(already_AddRefed<nsIRunnable> aRunnable,
+ DispatchReason aReason) {
+ RefPtr<nsIRunnable> runnable = aRunnable;
+ return mWebrtcTaskQueue->mTaskQueue->Dispatch(
+ mWebrtcTaskQueue->CreateTaskRunner(std::move(runnable)), aReason);
+}
+
+bool CallWorkerThread::IsCurrentThreadIn() const {
+ return mWebrtcTaskQueue->mTaskQueue->IsOnCurrentThreadInfallible() &&
+ mWebrtcTaskQueue->IsCurrent();
+}
+
+TaskDispatcher& CallWorkerThread::TailDispatcher() {
+ return mWebrtcTaskQueue->mTaskQueue->TailDispatcher();
+}
+
+nsIEventTarget* CallWorkerThread::AsEventTarget() {
+ return mWebrtcTaskQueue->mTaskQueue->AsEventTarget();
+}
+
+NS_IMETHODIMP
+CallWorkerThread::DelayedDispatch(already_AddRefed<nsIRunnable> aEvent,
+ uint32_t aDelayMs) {
+ RefPtr<nsIRunnable> event = aEvent;
+ return mWebrtcTaskQueue->mTaskQueue->DelayedDispatch(
+ mWebrtcTaskQueue->CreateTaskRunner(std::move(event)), aDelayMs);
+}
+
+NS_IMETHODIMP CallWorkerThread::RegisterShutdownTask(
+ nsITargetShutdownTask* aTask) {
+ return mWebrtcTaskQueue->mTaskQueue->RegisterShutdownTask(aTask);
+}
+
+NS_IMETHODIMP CallWorkerThread::UnregisterShutdownTask(
+ nsITargetShutdownTask* aTask) {
+ return mWebrtcTaskQueue->mTaskQueue->UnregisterShutdownTask(aTask);
+}
+
+//-----------------------------------------------------------------------------
+// nsIDirectTaskDispatcher
+//-----------------------------------------------------------------------------
+
+NS_IMETHODIMP
+CallWorkerThread::DispatchDirectTask(already_AddRefed<nsIRunnable> aEvent) {
+ nsCOMPtr<nsIRunnable> event = aEvent;
+ return mWebrtcTaskQueue->mTaskQueue->DispatchDirectTask(
+ mWebrtcTaskQueue->CreateTaskRunner(std::move(event)));
+}
+
+NS_IMETHODIMP CallWorkerThread::DrainDirectTasks() {
+ return mWebrtcTaskQueue->mTaskQueue->DrainDirectTasks();
+}
+
+NS_IMETHODIMP CallWorkerThread::HaveDirectTasks(bool* aValue) {
+ return mWebrtcTaskQueue->mTaskQueue->HaveDirectTasks(aValue);
+}
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/CodecConfig.h b/dom/media/webrtc/libwebrtcglue/CodecConfig.h
new file mode 100644
index 0000000000..023ea98783
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/CodecConfig.h
@@ -0,0 +1,237 @@
+
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef CODEC_CONFIG_H_
+#define CODEC_CONFIG_H_
+
+#include <string>
+#include <vector>
+
+#include "common/EncodingConstraints.h"
+
+namespace mozilla {
+
+/**
+ * Minimalistic Audio Codec Config Params
+ */
+struct AudioCodecConfig {
+ /*
+ * The data-types for these properties mimic the
+ * corresponding webrtc::CodecInst data-types.
+ */
+ int mType;
+ std::string mName;
+ int mFreq;
+ int mChannels;
+
+ bool mFECEnabled;
+ bool mDtmfEnabled;
+ uint32_t mFrameSizeMs;
+ uint32_t mMaxFrameSizeMs;
+ uint32_t mMinFrameSizeMs;
+
+ // OPUS-specific
+ bool mDTXEnabled;
+ uint32_t mMaxAverageBitrate;
+ int mMaxPlaybackRate;
+ bool mCbrEnabled;
+
+ AudioCodecConfig(int type, std::string name, int freq, int channels,
+ bool FECEnabled)
+ : mType(type),
+ mName(name),
+ mFreq(freq),
+ mChannels(channels),
+ mFECEnabled(FECEnabled),
+ mDtmfEnabled(false),
+ mFrameSizeMs(0),
+ mMaxFrameSizeMs(0),
+ mMinFrameSizeMs(0),
+ mDTXEnabled(false),
+ mMaxAverageBitrate(0),
+ mMaxPlaybackRate(0),
+ mCbrEnabled(false) {}
+
+ bool operator==(const AudioCodecConfig& aOther) const {
+ return mType == aOther.mType && mName == aOther.mName &&
+ mFreq == aOther.mFreq && mChannels == aOther.mChannels &&
+ mFECEnabled == aOther.mFECEnabled &&
+ mDtmfEnabled == aOther.mDtmfEnabled &&
+ mFrameSizeMs == aOther.mFrameSizeMs &&
+ mMaxFrameSizeMs == aOther.mMaxFrameSizeMs &&
+ mMinFrameSizeMs == aOther.mMinFrameSizeMs &&
+ mDTXEnabled == aOther.mDTXEnabled &&
+ mMaxAverageBitrate == aOther.mMaxAverageBitrate &&
+ mMaxPlaybackRate == aOther.mMaxPlaybackRate &&
+ mCbrEnabled == aOther.mCbrEnabled;
+ }
+};
+
+/*
+ * Minimalistic video codec configuration
+ * More to be added later depending on the use-case
+ */
+
+#define MAX_SPROP_LEN 128
+
+// used for holding SDP negotiation results
+struct VideoCodecConfigH264 {
+ char sprop_parameter_sets[MAX_SPROP_LEN];
+ int packetization_mode;
+ int profile_level_id;
+ int tias_bw;
+
+ bool operator==(const VideoCodecConfigH264& aOther) const {
+ return strncmp(sprop_parameter_sets, aOther.sprop_parameter_sets,
+ MAX_SPROP_LEN) == 0 &&
+ packetization_mode == aOther.packetization_mode &&
+ profile_level_id == aOther.profile_level_id &&
+ tias_bw == aOther.tias_bw;
+ }
+};
+
+// class so the std::strings can get freed more easily/reliably
+class VideoCodecConfig {
+ public:
+ /*
+ * The data-types for these properties mimic the
+ * corresponding webrtc::VideoCodec data-types.
+ */
+ int mType; // payload type
+ std::string mName;
+
+ std::vector<std::string> mAckFbTypes;
+ std::vector<std::string> mNackFbTypes;
+ std::vector<std::string> mCcmFbTypes;
+ // Don't pass mOtherFbTypes from JsepVideoCodecDescription because we'd have
+ // to drag SdpRtcpFbAttributeList::Feedback along too.
+ bool mRembFbSet;
+ bool mFECFbSet;
+ bool mTransportCCFbSet;
+
+ int mULPFECPayloadType;
+ int mREDPayloadType;
+ int mREDRTXPayloadType;
+ int mRTXPayloadType;
+
+ uint32_t mTias;
+ EncodingConstraints mEncodingConstraints;
+ struct Encoding {
+ std::string rid;
+ EncodingConstraints constraints;
+ bool active = true;
+ // TODO(bug 1744116): Use = default here
+ bool operator==(const Encoding& aOther) const {
+ return rid == aOther.rid && constraints == aOther.constraints &&
+ active == aOther.active;
+ }
+ };
+ std::vector<Encoding> mEncodings;
+ std::string mSpropParameterSets;
+ uint8_t mProfile;
+ uint8_t mConstraints;
+ uint8_t mLevel;
+ uint8_t mPacketizationMode;
+ // TODO: add external negotiated SPS/PPS
+
+ // TODO(bug 1744116): Use = default here
+ bool operator==(const VideoCodecConfig& aRhs) const {
+ return mType == aRhs.mType && mName == aRhs.mName &&
+ mAckFbTypes == aRhs.mAckFbTypes &&
+ mNackFbTypes == aRhs.mNackFbTypes &&
+ mCcmFbTypes == aRhs.mCcmFbTypes && mRembFbSet == aRhs.mRembFbSet &&
+ mFECFbSet == aRhs.mFECFbSet &&
+ mTransportCCFbSet == aRhs.mTransportCCFbSet &&
+ mULPFECPayloadType == aRhs.mULPFECPayloadType &&
+ mREDPayloadType == aRhs.mREDPayloadType &&
+ mREDRTXPayloadType == aRhs.mREDRTXPayloadType &&
+ mRTXPayloadType == aRhs.mRTXPayloadType && mTias == aRhs.mTias &&
+ mEncodingConstraints == aRhs.mEncodingConstraints &&
+ mEncodings == aRhs.mEncodings &&
+ mSpropParameterSets == aRhs.mSpropParameterSets &&
+ mProfile == aRhs.mProfile && mConstraints == aRhs.mConstraints &&
+ mLevel == aRhs.mLevel &&
+ mPacketizationMode == aRhs.mPacketizationMode;
+ }
+
+ VideoCodecConfig(int type, std::string name,
+ const EncodingConstraints& constraints,
+ const struct VideoCodecConfigH264* h264 = nullptr)
+ : mType(type),
+ mName(name),
+ mRembFbSet(false),
+ mFECFbSet(false),
+ mTransportCCFbSet(false),
+ mULPFECPayloadType(-1),
+ mREDPayloadType(-1),
+ mREDRTXPayloadType(-1),
+ mRTXPayloadType(-1),
+ mTias(0),
+ mEncodingConstraints(constraints),
+ mProfile(0x42),
+ mConstraints(0xE0),
+ mLevel(0x0C),
+ mPacketizationMode(1) {
+ if (h264) {
+ mProfile = (h264->profile_level_id & 0x00FF0000) >> 16;
+ mConstraints = (h264->profile_level_id & 0x0000FF00) >> 8;
+ mLevel = (h264->profile_level_id & 0x000000FF);
+ mPacketizationMode = h264->packetization_mode;
+ mSpropParameterSets = h264->sprop_parameter_sets;
+ }
+ }
+
+ bool ResolutionEquals(const VideoCodecConfig& aConfig) const {
+ if (mEncodings.size() != aConfig.mEncodings.size()) {
+ return false;
+ }
+ for (size_t i = 0; i < mEncodings.size(); ++i) {
+ if (!mEncodings[i].constraints.ResolutionEquals(
+ aConfig.mEncodings[i].constraints)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ // Nothing seems to use this right now. Do we intend to support this
+ // someday?
+ bool RtcpFbAckIsSet(const std::string& type) const {
+ for (auto i = mAckFbTypes.begin(); i != mAckFbTypes.end(); ++i) {
+ if (*i == type) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ bool RtcpFbNackIsSet(const std::string& type) const {
+ for (auto i = mNackFbTypes.begin(); i != mNackFbTypes.end(); ++i) {
+ if (*i == type) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ bool RtcpFbCcmIsSet(const std::string& type) const {
+ for (auto i = mCcmFbTypes.begin(); i != mCcmFbTypes.end(); ++i) {
+ if (*i == type) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ bool RtcpFbRembIsSet() const { return mRembFbSet; }
+
+ bool RtcpFbFECIsSet() const { return mFECFbSet; }
+
+ bool RtcpFbTransportCCIsSet() const { return mTransportCCFbSet; }
+
+ bool RtxPayloadTypeIsSet() const { return mRTXPayloadType != -1; }
+};
+} // namespace mozilla
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp b/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp
new file mode 100644
index 0000000000..ccadd846e2
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp
@@ -0,0 +1,22 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WebrtcGmpVideoCodec.h"
+#include "GmpVideoCodec.h"
+
+namespace mozilla {
+
+WebrtcVideoEncoder* GmpVideoCodec::CreateEncoder(
+ const webrtc::SdpVideoFormat& aFormat, std::string aPCHandle) {
+ return new WebrtcVideoEncoderProxy(
+ new WebrtcGmpVideoEncoder(aFormat, std::move(aPCHandle)));
+}
+
+WebrtcVideoDecoder* GmpVideoCodec::CreateDecoder(std::string aPCHandle,
+ TrackingId aTrackingId) {
+ return new WebrtcVideoDecoderProxy(std::move(aPCHandle),
+ std::move(aTrackingId));
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h b/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h
new file mode 100644
index 0000000000..caf125c809
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h
@@ -0,0 +1,27 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef GMPVIDEOCODEC_H_
+#define GMPVIDEOCODEC_H_
+
+#include <string>
+
+#include "PerformanceRecorder.h"
+
+namespace mozilla {
+
+class WebrtcVideoDecoder;
+class WebrtcVideoEncoder;
+
+class GmpVideoCodec {
+ public:
+ static WebrtcVideoEncoder* CreateEncoder(
+ const webrtc::SdpVideoFormat& aFormat, std::string aPCHandle);
+ static WebrtcVideoDecoder* CreateDecoder(std::string aPCHandle,
+ TrackingId aTrackingId);
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h b/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h
new file mode 100644
index 0000000000..892b9f958e
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h
@@ -0,0 +1,68 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_MEDIACONDUITCONTROL_H_
+#define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_MEDIACONDUITCONTROL_H_
+
+#include "jsapi/RTCDTMFSender.h"
+#include "MediaConduitInterface.h"
+#include "mozilla/StateMirroring.h"
+#include "RtpRtcpConfig.h"
+
+namespace mozilla {
+
+/**
+ * These are the interfaces used to control the async conduits. Some parameters
+ * are common, and some are tied to the conduit type. See
+ * MediaSessionConduit::InitConduitControl for how they are used.
+ *
+ * Put simply, the implementer of the interfaces below may set its canonicals on
+ * any thread, and the conduits will react to those changes accordingly, on
+ * their dedicated worker thread. One instance of these interfaces could control
+ * multiple conduits as each canonical can connect to any number of mirrors.
+ */
+
+class MediaConduitControlInterface {
+ public:
+ virtual AbstractCanonical<bool>* CanonicalReceiving() = 0;
+ virtual AbstractCanonical<bool>* CanonicalTransmitting() = 0;
+ virtual AbstractCanonical<Ssrcs>* CanonicalLocalSsrcs() = 0;
+ virtual AbstractCanonical<std::string>* CanonicalLocalCname() = 0;
+ virtual AbstractCanonical<std::string>* CanonicalMid() = 0;
+ virtual AbstractCanonical<Ssrc>* CanonicalRemoteSsrc() = 0;
+ virtual AbstractCanonical<std::string>* CanonicalSyncGroup() = 0;
+ virtual AbstractCanonical<RtpExtList>* CanonicalLocalRecvRtpExtensions() = 0;
+ virtual AbstractCanonical<RtpExtList>* CanonicalLocalSendRtpExtensions() = 0;
+};
+
+class AudioConduitControlInterface : public MediaConduitControlInterface {
+ public:
+ virtual AbstractCanonical<Maybe<AudioCodecConfig>>*
+ CanonicalAudioSendCodec() = 0;
+ virtual AbstractCanonical<std::vector<AudioCodecConfig>>*
+ CanonicalAudioRecvCodecs() = 0;
+ virtual MediaEventSource<DtmfEvent>& OnDtmfEvent() = 0;
+};
+
+class VideoConduitControlInterface : public MediaConduitControlInterface {
+ public:
+ virtual AbstractCanonical<Ssrcs>* CanonicalLocalVideoRtxSsrcs() = 0;
+ virtual AbstractCanonical<Ssrc>* CanonicalRemoteVideoRtxSsrc() = 0;
+ virtual AbstractCanonical<Maybe<VideoCodecConfig>>*
+ CanonicalVideoSendCodec() = 0;
+ virtual AbstractCanonical<Maybe<RtpRtcpConfig>>*
+ CanonicalVideoSendRtpRtcpConfig() = 0;
+ virtual AbstractCanonical<std::vector<VideoCodecConfig>>*
+ CanonicalVideoRecvCodecs() = 0;
+ virtual AbstractCanonical<Maybe<RtpRtcpConfig>>*
+ CanonicalVideoRecvRtpRtcpConfig() = 0;
+ virtual AbstractCanonical<webrtc::VideoCodecMode>*
+ CanonicalVideoCodecMode() = 0;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitErrors.h b/dom/media/webrtc/libwebrtcglue/MediaConduitErrors.h
new file mode 100644
index 0000000000..34487d77a0
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/MediaConduitErrors.h
@@ -0,0 +1,46 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MEDIA_SESSION_ERRORS_H_
+#define MEDIA_SESSION_ERRORS_H_
+
+namespace mozilla {
+enum MediaConduitErrorCode {
+ kMediaConduitNoError = 0, // 0 for Success,greater than 0 imples error
+ kMediaConduitSessionNotInited =
+ 10100, // Session not initialized.10100 serves as
+ // base for the conduit errors
+ kMediaConduitMalformedArgument, // Malformed input to Conduit API
+ kMediaConduitCaptureError, // WebRTC capture APIs failed
+ kMediaConduitInvalidSendCodec, // Wrong Send codec
+ kMediaConduitInvalidReceiveCodec, // Wrong Recv Codec
+ kMediaConduitCodecInUse, // Already applied Codec
+ kMediaConduitInvalidRenderer, // Null or Wrong Renderer object
+ kMediaConduitRendererFail, // Add Render called multiple times
+ kMediaConduitSendingAlready, // Engine already trasmitting
+ kMediaConduitReceivingAlready, // Engine already receiving
+ kMediaConduitTransportRegistrationFail, // Null or wrong transport interface
+ kMediaConduitInvalidTransport, // Null or wrong transport interface
+ kMediaConduitChannelError, // Configuration Error
+ kMediaConduitSocketError, // Media Engine transport socket error
+ kMediaConduitRTPRTCPModuleError, // Couldn't start RTP/RTCP processing
+ kMediaConduitRTPProcessingFailed, // Processing incoming RTP frame failed
+ kMediaConduitUnknownError, // More information can be found in logs
+ kMediaConduitExternalRecordingError, // Couldn't start external recording
+ kMediaConduitRecordingError, // Runtime recording error
+ kMediaConduitExternalPlayoutError, // Couldn't start external playout
+ kMediaConduitPlayoutError, // Runtime playout error
+ kMediaConduitMTUError, // Can't set MTU
+ kMediaConduitRTCPStatusError, // Can't set RTCP mode
+ kMediaConduitKeyFrameRequestError, // Can't set KeyFrameRequest mode
+ kMediaConduitNACKStatusError, // Can't set NACK mode
+ kMediaConduitTMMBRStatusError, // Can't set TMMBR mode
+ kMediaConduitFECStatusError, // Can't set FEC mode
+ kMediaConduitHybridNACKFECStatusError, // Can't set Hybrid NACK / FEC mode
+ kMediaConduitVideoSendStreamError // WebRTC video send stream failure
+};
+
+}
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.cpp b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.cpp
new file mode 100644
index 0000000000..c0718739e9
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.cpp
@@ -0,0 +1,151 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaConduitInterface.h"
+
+#include "nsTArray.h"
+#include "mozilla/Assertions.h"
+#include "MainThreadUtils.h"
+#include "SystemTime.h"
+
+#include "system_wrappers/include/clock.h"
+
+namespace mozilla {
+
+void MediaSessionConduit::GetRtpSources(
+ nsTArray<dom::RTCRtpSourceEntry>& outSources) const {
+ MOZ_ASSERT(NS_IsMainThread());
+ if (mSourcesUpdateNeeded) {
+ UpdateRtpSources(GetUpstreamRtpSources());
+ OnSourcesUpdated();
+ }
+ outSources.Clear();
+ for (auto& [key, entry] : mSourcesCache) {
+ (void)key;
+ outSources.AppendElement(entry);
+ }
+
+ struct TimestampComparator {
+ bool LessThan(const dom::RTCRtpSourceEntry& aLhs,
+ const dom::RTCRtpSourceEntry& aRhs) const {
+ // Sort descending!
+ return aLhs.mTimestamp > aRhs.mTimestamp;
+ }
+
+ bool Equals(const dom::RTCRtpSourceEntry& aLhs,
+ const dom::RTCRtpSourceEntry& aRhs) const {
+ return aLhs.mTimestamp == aRhs.mTimestamp;
+ }
+ };
+
+ // *sigh* We have to re-sort this by JS timestamp; we can run into cases
+ // where the libwebrtc timestamps are not in exactly the same order as JS
+ // timestamps due to clock differences (wibbly-wobbly, timey-wimey stuff)
+ outSources.Sort(TimestampComparator());
+}
+
+static double rtpToDomAudioLevel(uint8_t aAudioLevel) {
+ if (aAudioLevel == 127) {
+ // Spec indicates that a value of 127 should be set to 0
+ return 0;
+ }
+
+ // All other values are calculated as 10^(-rfc_level/20)
+ return std::pow(10, -aAudioLevel / 20.0);
+}
+
+void MediaSessionConduit::UpdateRtpSources(
+ const std::vector<webrtc::RtpSource>& aSources) const {
+ MOZ_ASSERT(NS_IsMainThread());
+ // Empty out the cache; we'll copy things back as needed
+ auto cache = std::move(mSourcesCache);
+
+ for (const auto& source : aSources) {
+ SourceKey key(source);
+ auto it = cache.find(key);
+ if (it != cache.end()) {
+ // This source entry was already in the cache, and should continue to be
+ // present in exactly the same form as before. This means we do _not_
+ // want to perform the timestamp adjustment again, since it might yield a
+ // slightly different result. This is why we copy this entry from the old
+ // cache instead of simply rebuilding it, and is also why we key the
+ // cache based on timestamp (keying the cache based on timestamp also
+ // gets us the ordering we want, conveniently).
+ mSourcesCache[key] = it->second;
+ continue;
+ }
+
+ // This is something we did not already have in the cache.
+ dom::RTCRtpSourceEntry domEntry;
+ domEntry.mSource = source.source_id();
+ switch (source.source_type()) {
+ case webrtc::RtpSourceType::SSRC:
+ domEntry.mSourceType = dom::RTCRtpSourceEntryType::Synchronization;
+ break;
+ case webrtc::RtpSourceType::CSRC:
+ domEntry.mSourceType = dom::RTCRtpSourceEntryType::Contributing;
+ break;
+ default:
+ MOZ_CRASH("Unexpected RTCRtpSourceEntryType");
+ }
+
+ if (source.audio_level()) {
+ domEntry.mAudioLevel.Construct(rtpToDomAudioLevel(*source.audio_level()));
+ }
+
+ // These timestamps are always **rounded** to milliseconds. That means they
+ // can jump up to half a millisecond into the future. We compensate for that
+ // here so that things seem consistent to js.
+ domEntry.mTimestamp = GetTimestampMaker().ReduceRealtimePrecision(
+ webrtc::Timestamp::Millis(source.timestamp_ms()) -
+ webrtc::TimeDelta::Micros(500));
+ domEntry.mRtpTimestamp = source.rtp_timestamp();
+ mSourcesCache[key] = domEntry;
+ }
+}
+
+void MediaSessionConduit::OnSourcesUpdated() const {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(mSourcesUpdateNeeded);
+ mSourcesUpdateNeeded = false;
+ // Reset the updateNeeded flag and clear the cache in a direct task, i.e.,
+ // as soon as the current task has finished.
+ AbstractThread::GetCurrent()->TailDispatcher().AddDirectTask(
+ NS_NewRunnableFunction(
+ __func__, [this, self = RefPtr<const MediaSessionConduit>(this)] {
+ mSourcesUpdateNeeded = true;
+ mSourcesCache.clear();
+ }));
+}
+
+void MediaSessionConduit::InsertAudioLevelForContributingSource(
+ const uint32_t aCsrcSource, const int64_t aTimestamp,
+ const uint32_t aRtpTimestamp, const bool aHasAudioLevel,
+ const uint8_t aAudioLevel) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ if (mSourcesUpdateNeeded) {
+ OnSourcesUpdated();
+ }
+
+ dom::RTCRtpSourceEntry domEntry;
+ domEntry.mSource = aCsrcSource;
+ domEntry.mSourceType = dom::RTCRtpSourceEntryType::Contributing;
+ domEntry.mTimestamp = aTimestamp;
+ domEntry.mRtpTimestamp = aRtpTimestamp;
+ if (aHasAudioLevel) {
+ domEntry.mAudioLevel.Construct(rtpToDomAudioLevel(aAudioLevel));
+ }
+
+ webrtc::Timestamp libwebrtcNow = GetTimestampMaker().GetNowRealtime();
+ double jsNow = GetTimestampMaker().ReduceRealtimePrecision(libwebrtcNow);
+ double ago = jsNow - aTimestamp;
+ webrtc::Timestamp convertedTimestamp =
+ libwebrtcNow - webrtc::TimeDelta::Millis(ago);
+
+ SourceKey key(convertedTimestamp.ms<uint32_t>(), aCsrcSource);
+ mSourcesCache[key] = domEntry;
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h
new file mode 100644
index 0000000000..e7a9d70054
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h
@@ -0,0 +1,493 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MEDIA_CONDUIT_ABSTRACTION_
+#define MEDIA_CONDUIT_ABSTRACTION_
+
+#include <vector>
+#include <functional>
+#include <map>
+
+#include "CodecConfig.h"
+#include "ImageContainer.h"
+#include "jsapi/RTCStatsReport.h"
+#include "MediaConduitErrors.h"
+#include "mozilla/media/MediaUtils.h"
+#include "mozilla/MozPromise.h"
+#include "VideoTypes.h"
+#include "WebrtcVideoCodecFactory.h"
+#include "nsTArray.h"
+#include "mozilla/dom/RTCRtpSourcesBinding.h"
+#include "PerformanceRecorder.h"
+#include "transport/mediapacket.h"
+
+// libwebrtc includes
+#include "api/audio/audio_frame.h"
+#include "api/call/transport.h"
+#include "api/rtp_headers.h"
+#include "api/rtp_parameters.h"
+#include "api/transport/rtp/rtp_source.h"
+#include "api/video/video_frame_buffer.h"
+#include "call/audio_receive_stream.h"
+#include "call/audio_send_stream.h"
+#include "call/call_basic_stats.h"
+#include "call/video_receive_stream.h"
+#include "call/video_send_stream.h"
+#include "rtc_base/copy_on_write_buffer.h"
+
+namespace webrtc {
+class VideoFrame;
+}
+
+namespace mozilla {
+namespace dom {
+struct RTCRtpSourceEntry;
+}
+
+namespace dom {
+struct RTCRtpSourceEntry;
+}
+
+enum class MediaSessionConduitLocalDirection : int { kSend, kRecv };
+
+class VideoConduitControlInterface;
+class AudioConduitControlInterface;
+class VideoSessionConduit;
+class AudioSessionConduit;
+class WebrtcCallWrapper;
+
+using RtpExtList = std::vector<webrtc::RtpExtension>;
+using Ssrc = uint32_t;
+using Ssrcs = std::vector<uint32_t>;
+
+/**
+ * 1. Abstract renderer for video data
+ * 2. This class acts as abstract interface between the video-engine and
+ * video-engine agnostic renderer implementation.
+ * 3. Concrete implementation of this interface is responsible for
+ * processing and/or rendering the obtained raw video frame to appropriate
+ * output , say, <video>
+ */
+class VideoRenderer {
+ protected:
+ virtual ~VideoRenderer() {}
+
+ public:
+ /**
+ * Callback Function reportng any change in the video-frame dimensions
+ * @param width: current width of the video @ decoder
+ * @param height: current height of the video @ decoder
+ */
+ virtual void FrameSizeChange(unsigned int width, unsigned int height) = 0;
+
+ /**
+ * Callback Function reporting decoded frame for processing.
+ * @param buffer: reference to decoded video frame
+ * @param buffer_size: size of the decoded frame
+ * @param time_stamp: Decoder timestamp, typically 90KHz as per RTP
+ * @render_time: Wall-clock time at the decoder for synchronization
+ * purposes in milliseconds
+ * NOTE: If decoded video frame is passed through buffer , it is the
+ * responsibility of the concrete implementations of this class to own copy
+ * of the frame if needed for time longer than scope of this callback.
+ * Such implementations should be quick in processing the frames and return
+ * immediately.
+ */
+ virtual void RenderVideoFrame(const webrtc::VideoFrameBuffer& buffer,
+ uint32_t time_stamp, int64_t render_time) = 0;
+
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VideoRenderer)
+};
+
+/**
+ * Generic Interface for representing Audio/Video Session
+ * MediaSession conduit is identified by 2 main components
+ * 1. Attached Transport Interface (through events) for inbound and outbound RTP
+ * transport
+ * 2. Attached Renderer Interface for rendering media data off the network
+ * This class hides specifics of Media-Engine implementation from the consumers
+ * of this interface.
+ * Also provides codec configuration API for the media sent and recevied
+ */
+class MediaSessionConduit {
+ protected:
+ virtual ~MediaSessionConduit() {}
+
+ public:
+ enum Type { AUDIO, VIDEO };
+ enum class PacketType { RTP, RTCP };
+
+ static std::string LocalDirectionToString(
+ const MediaSessionConduitLocalDirection aDirection) {
+ return aDirection == MediaSessionConduitLocalDirection::kSend ? "send"
+ : "receive";
+ }
+
+ virtual Type type() const = 0;
+
+ // Call thread only
+ virtual Maybe<int> ActiveSendPayloadType() const = 0;
+ virtual Maybe<int> ActiveRecvPayloadType() const = 0;
+
+ // Whether transport is currently sending and receiving packets
+ virtual void SetTransportActive(bool aActive) = 0;
+
+ // Sending packets
+ virtual MediaEventSourceExc<MediaPacket>& SenderRtpSendEvent() = 0;
+ virtual MediaEventSourceExc<MediaPacket>& SenderRtcpSendEvent() = 0;
+ virtual MediaEventSourceExc<MediaPacket>& ReceiverRtcpSendEvent() = 0;
+
+ // Receiving packets...
+ // from an rtp-receiving pipeline
+ virtual void ConnectReceiverRtpEvent(
+ MediaEventSourceExc<MediaPacket, webrtc::RTPHeader>& aEvent) = 0;
+ // from an rtp-receiving pipeline
+ virtual void ConnectReceiverRtcpEvent(
+ MediaEventSourceExc<MediaPacket>& aEvent) = 0;
+ // from an rtp-transmitting pipeline
+ virtual void ConnectSenderRtcpEvent(
+ MediaEventSourceExc<MediaPacket>& aEvent) = 0;
+
+ // Sts thread only.
+ virtual Maybe<uint16_t> RtpSendBaseSeqFor(uint32_t aSsrc) const = 0;
+
+ // Any thread.
+ virtual const dom::RTCStatsTimestampMaker& GetTimestampMaker() const = 0;
+
+ virtual Ssrcs GetLocalSSRCs() const = 0;
+
+ virtual Maybe<Ssrc> GetRemoteSSRC() const = 0;
+ virtual void UnsetRemoteSSRC(Ssrc aSsrc) = 0;
+
+ virtual void DisableSsrcChanges() = 0;
+
+ virtual bool HasCodecPluginID(uint64_t aPluginID) const = 0;
+
+ virtual MediaEventSource<void>& RtcpByeEvent() = 0;
+ virtual MediaEventSource<void>& RtcpTimeoutEvent() = 0;
+
+ virtual bool SendRtp(const uint8_t* aData, size_t aLength,
+ const webrtc::PacketOptions& aOptions) = 0;
+ virtual bool SendSenderRtcp(const uint8_t* aData, size_t aLength) = 0;
+ virtual bool SendReceiverRtcp(const uint8_t* aData, size_t aLength) = 0;
+
+ virtual void DeliverPacket(rtc::CopyOnWriteBuffer packet,
+ PacketType type) = 0;
+
+ virtual RefPtr<GenericPromise> Shutdown() = 0;
+
+ virtual Maybe<RefPtr<AudioSessionConduit>> AsAudioSessionConduit() = 0;
+ virtual Maybe<RefPtr<VideoSessionConduit>> AsVideoSessionConduit() = 0;
+
+ virtual Maybe<webrtc::CallBasicStats> GetCallStats() const = 0;
+
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaSessionConduit)
+
+ void GetRtpSources(nsTArray<dom::RTCRtpSourceEntry>& outSources) const;
+
+ // test-only: inserts fake CSRCs and audio level data.
+ // NB: fake data is only valid during the current main thread task.
+ void InsertAudioLevelForContributingSource(const uint32_t aCsrcSource,
+ const int64_t aTimestamp,
+ const uint32_t aRtpTimestamp,
+ const bool aHasAudioLevel,
+ const uint8_t aAudioLevel);
+
+ protected:
+ virtual std::vector<webrtc::RtpSource> GetUpstreamRtpSources() const = 0;
+
+ private:
+ void UpdateRtpSources(const std::vector<webrtc::RtpSource>& aSources) const;
+
+ // Marks the cache as having been updated in the current task, and keeps it
+ // stable until the current task is finished.
+ void OnSourcesUpdated() const;
+
+ // Accessed only on main thread. This exists for a couple of reasons:
+ // 1. The webrtc spec says that source stats are updated using a queued task;
+ // libwebrtc's internal representation of these stats is updated without
+ // any task queueing, which means we need a mainthread-only cache.
+ // 2. libwebrtc uses its own clock that is not consistent with the one we
+ // need to use for stats (the so-called JS timestamps), which means we need
+ // to adjust the timestamps. Since timestamp adjustment is inexact and will
+ // not necessarily yield exactly the same result if performed again later, we
+ // need to avoid performing it more than once for each entry, which means we
+ // need to remember both the JS timestamp (in dom::RTCRtpSourceEntry) and the
+ // libwebrtc timestamp (in SourceKey::mLibwebrtcTimestampMs).
+ class SourceKey {
+ public:
+ explicit SourceKey(const webrtc::RtpSource& aSource)
+ : SourceKey(aSource.timestamp_ms(), aSource.source_id()) {}
+
+ SourceKey(uint32_t aTimestamp, uint32_t aSrc)
+ : mLibwebrtcTimestampMs(aTimestamp), mSrc(aSrc) {}
+
+ // TODO: Once we support = default for this in our toolchain, do so
+ auto operator>(const SourceKey& aRhs) const {
+ if (mLibwebrtcTimestampMs == aRhs.mLibwebrtcTimestampMs) {
+ return mSrc > aRhs.mSrc;
+ }
+ return mLibwebrtcTimestampMs > aRhs.mLibwebrtcTimestampMs;
+ }
+
+ private:
+ uint32_t mLibwebrtcTimestampMs;
+ uint32_t mSrc;
+ };
+ mutable std::map<SourceKey, dom::RTCRtpSourceEntry, std::greater<SourceKey>>
+ mSourcesCache;
+ // Accessed only on main thread. A flag saying whether mSourcesCache needs
+ // updating. Ensures that get*Sources() appear stable from javascript
+ // throughout a main thread task, even though we don't follow the spec to the
+ // letter (dispatch a task to update the sources).
+ mutable bool mSourcesUpdateNeeded = true;
+};
+
+class WebrtcSendTransport : public webrtc::Transport {
+ // WeakRef to the owning conduit
+ MediaSessionConduit* mConduit;
+
+ public:
+ explicit WebrtcSendTransport(MediaSessionConduit* aConduit)
+ : mConduit(aConduit) {}
+ bool SendRtp(const uint8_t* aPacket, size_t aLength,
+ const webrtc::PacketOptions& aOptions) override {
+ return mConduit->SendRtp(aPacket, aLength, aOptions);
+ }
+ bool SendRtcp(const uint8_t* aPacket, size_t aLength) override {
+ return mConduit->SendSenderRtcp(aPacket, aLength);
+ }
+};
+
+class WebrtcReceiveTransport : public webrtc::Transport {
+ // WeakRef to the owning conduit
+ MediaSessionConduit* mConduit;
+
+ public:
+ explicit WebrtcReceiveTransport(MediaSessionConduit* aConduit)
+ : mConduit(aConduit) {}
+ bool SendRtp(const uint8_t* aPacket, size_t aLength,
+ const webrtc::PacketOptions& aOptions) override {
+ MOZ_CRASH("Unexpected RTP packet");
+ }
+ bool SendRtcp(const uint8_t* aPacket, size_t aLength) override {
+ return mConduit->SendReceiverRtcp(aPacket, aLength);
+ }
+};
+
+// Abstract base classes for external encoder/decoder.
+
+// Interface to help signal PluginIDs
+class CodecPluginID {
+ public:
+ virtual MediaEventSource<uint64_t>* InitPluginEvent() { return nullptr; }
+ virtual MediaEventSource<uint64_t>* ReleasePluginEvent() { return nullptr; }
+ virtual ~CodecPluginID() {}
+};
+
+class VideoEncoder : public CodecPluginID {
+ public:
+ virtual ~VideoEncoder() {}
+};
+
+class VideoDecoder : public CodecPluginID {
+ public:
+ virtual ~VideoDecoder() {}
+};
+
+/**
+ * MediaSessionConduit for video
+ * Refer to the comments on MediaSessionConduit above for overall
+ * information
+ */
+class VideoSessionConduit : public MediaSessionConduit {
+ public:
+ struct Options {
+ bool mVideoLatencyTestEnable = false;
+ // All in bps.
+ int mMinBitrate = 0;
+ int mStartBitrate = 0;
+ int mPrefMaxBitrate = 0;
+ int mMinBitrateEstimate = 0;
+ bool mDenoising = false;
+ bool mLockScaling = false;
+ uint8_t mSpatialLayers = 1;
+ uint8_t mTemporalLayers = 1;
+ };
+
+ /**
+ * Factory function to create and initialize a Video Conduit Session
+ * @param webrtc::Call instance shared by paired audio and video
+ * media conduits
+ * @param aOptions are a number of options, typically from prefs, used to
+ * configure the created VideoConduit.
+ * @param aPCHandle is a string representing the RTCPeerConnection that is
+ * creating this VideoConduit. This is used when reporting GMP plugin
+ * crashes.
+ * @result Concrete VideoSessionConduitObject or nullptr in the case
+ * of failure
+ */
+ static RefPtr<VideoSessionConduit> Create(
+ RefPtr<WebrtcCallWrapper> aCall,
+ nsCOMPtr<nsISerialEventTarget> aStsThread, Options aOptions,
+ std::string aPCHandle, const TrackingId& aRecvTrackingId);
+
+ enum FrameRequestType {
+ FrameRequestNone,
+ FrameRequestFir,
+ FrameRequestPli,
+ FrameRequestUnknown
+ };
+
+ VideoSessionConduit()
+ : mFrameRequestMethod(FrameRequestNone),
+ mUsingNackBasic(false),
+ mUsingTmmbr(false),
+ mUsingFEC(false) {}
+
+ virtual ~VideoSessionConduit() {}
+
+ Type type() const override { return VIDEO; }
+
+ Maybe<RefPtr<AudioSessionConduit>> AsAudioSessionConduit() override {
+ return Nothing();
+ }
+
+ Maybe<RefPtr<VideoSessionConduit>> AsVideoSessionConduit() override {
+ return Some(RefPtr<VideoSessionConduit>(this));
+ }
+
+ /**
+ * Hooks up mControl Mirrors with aControl Canonicals, and sets up
+ * mWatchManager to react on Mirror changes.
+ */
+ virtual void InitControl(VideoConduitControlInterface* aControl) = 0;
+
+ /**
+ * Function to attach Renderer end-point of the Media-Video conduit.
+ * @param aRenderer : Reference to the concrete Video renderer implementation
+ * Note: Multiple invocations of this API shall remove an existing renderer
+ * and attaches the new to the Conduit.
+ */
+ virtual MediaConduitErrorCode AttachRenderer(
+ RefPtr<mozilla::VideoRenderer> aRenderer) = 0;
+ virtual void DetachRenderer() = 0;
+
+ /**
+ * Function to deliver a capture video frame for encoding and transport.
+ * If the frame's timestamp is 0, it will be automatcally generated.
+ *
+ * NOTE: ConfigureSendMediaCodec() must be called before this function can
+ * be invoked. This ensures the inserted video-frames can be
+ * transmitted by the conduit.
+ */
+ virtual MediaConduitErrorCode SendVideoFrame(webrtc::VideoFrame aFrame) = 0;
+
+ /**
+ * These methods allow unit tests to double-check that the
+ * rtcp-fb settings are as expected.
+ */
+ FrameRequestType FrameRequestMethod() const { return mFrameRequestMethod; }
+
+ bool UsingNackBasic() const { return mUsingNackBasic; }
+
+ bool UsingTmmbr() const { return mUsingTmmbr; }
+
+ bool UsingFEC() const { return mUsingFEC; }
+
+ virtual Maybe<webrtc::VideoReceiveStreamInterface::Stats> GetReceiverStats()
+ const = 0;
+ virtual Maybe<webrtc::VideoSendStream::Stats> GetSenderStats() const = 0;
+
+ virtual void CollectTelemetryData() = 0;
+
+ virtual bool AddFrameHistory(
+ dom::Sequence<dom::RTCVideoFrameHistoryInternal>* outHistories) const = 0;
+
+ protected:
+ /* RTCP feedback settings, for unit testing purposes */
+ FrameRequestType mFrameRequestMethod;
+ bool mUsingNackBasic;
+ bool mUsingTmmbr;
+ bool mUsingFEC;
+};
+
+/**
+ * MediaSessionConduit for audio
+ * Refer to the comments on MediaSessionConduit above for overall
+ * information
+ */
+class AudioSessionConduit : public MediaSessionConduit {
+ public:
+ /**
+ * Factory function to create and initialize an Audio Conduit Session
+ * @param webrtc::Call instance shared by paired audio and video
+ * media conduits
+ * @result Concrete AudioSessionConduitObject or nullptr in the case
+ * of failure
+ */
+ static RefPtr<AudioSessionConduit> Create(
+ RefPtr<WebrtcCallWrapper> aCall,
+ nsCOMPtr<nsISerialEventTarget> aStsThread);
+
+ virtual ~AudioSessionConduit() {}
+
+ Type type() const override { return AUDIO; }
+
+ Maybe<RefPtr<AudioSessionConduit>> AsAudioSessionConduit() override {
+ return Some(this);
+ }
+
+ Maybe<RefPtr<VideoSessionConduit>> AsVideoSessionConduit() override {
+ return Nothing();
+ }
+
+ /**
+ * Hooks up mControl Mirrors with aControl Canonicals, and sets up
+ * mWatchManager to react on Mirror changes.
+ */
+ virtual void InitControl(AudioConduitControlInterface* aControl) = 0;
+
+ /**
+ * Function to deliver externally captured audio sample for encoding and
+ * transport
+ * @param frame [in]: AudioFrame in upstream's format for forwarding to the
+ * send stream. Ownership is passed along.
+ * NOTE: ConfigureSendMediaCodec() SHOULD be called before this function can
+ * be invoked. This ensures the inserted audio-samples can be transmitted by
+ * the conduit.
+ */
+ virtual MediaConduitErrorCode SendAudioFrame(
+ std::unique_ptr<webrtc::AudioFrame> frame) = 0;
+
+ /**
+ * Function to grab a decoded audio-sample from the media engine for
+ * rendering / playout of length 10 milliseconds.
+ *
+ * @param samplingFreqHz [in]: Frequency of the sampling for playback in
+ * Hertz (16000, 32000,..)
+ * @param frame [in/out]: Pointer to an AudioFrame to which audio data will be
+ * copied
+ * NOTE: This function should be invoked every 10 milliseconds for the best
+ * performance
+ * NOTE: ConfigureRecvMediaCodec() SHOULD be called before this function can
+ * be invoked
+ * This ensures the decoded samples are ready for reading and playout is
+ * enabled.
+ */
+ virtual MediaConduitErrorCode GetAudioFrame(int32_t samplingFreqHz,
+ webrtc::AudioFrame* frame) = 0;
+
+ /**
+ * Checks if given sampling frequency is supported
+ * @param freq: Sampling rate (in Hz) to check
+ */
+ virtual bool IsSamplingFreqSupported(int freq) const = 0;
+
+ virtual Maybe<webrtc::AudioReceiveStreamInterface::Stats> GetReceiverStats()
+ const = 0;
+ virtual Maybe<webrtc::AudioSendStream::Stats> GetSenderStats() const = 0;
+};
+} // namespace mozilla
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/MediaDataCodec.cpp b/dom/media/webrtc/libwebrtcglue/MediaDataCodec.cpp
new file mode 100644
index 0000000000..895fcdc432
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/MediaDataCodec.cpp
@@ -0,0 +1,70 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaDataCodec.h"
+
+#include "PDMFactory.h"
+#include "WebrtcGmpVideoCodec.h"
+#include "WebrtcMediaDataDecoderCodec.h"
+#include "WebrtcMediaDataEncoderCodec.h"
+#include "mozilla/StaticPrefs_media.h"
+
+namespace mozilla {
+
+/* static */
+WebrtcVideoEncoder* MediaDataCodec::CreateEncoder(
+ const webrtc::SdpVideoFormat& aFormat) {
+ if (!StaticPrefs::media_webrtc_platformencoder()) {
+ return nullptr;
+ }
+ if (!WebrtcMediaDataEncoder::CanCreate(
+ webrtc::PayloadStringToCodecType(aFormat.name))) {
+ return nullptr;
+ }
+
+ return new WebrtcVideoEncoderProxy(new WebrtcMediaDataEncoder(aFormat));
+}
+
+/* static */
+WebrtcVideoDecoder* MediaDataCodec::CreateDecoder(
+ webrtc::VideoCodecType aCodecType, TrackingId aTrackingId) {
+ switch (aCodecType) {
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ case webrtc::VideoCodecType::kVideoCodecVP9:
+ if (!StaticPrefs::media_navigator_mediadatadecoder_vpx_enabled()) {
+ return nullptr;
+ }
+ break;
+ case webrtc::VideoCodecType::kVideoCodecH264:
+ if (!StaticPrefs::media_navigator_mediadatadecoder_h264_enabled()) {
+ return nullptr;
+ }
+ break;
+ default:
+ return nullptr;
+ }
+
+ nsAutoCString codec;
+ switch (aCodecType) {
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ codec = "video/vp8";
+ break;
+ case webrtc::VideoCodecType::kVideoCodecVP9:
+ codec = "video/vp9";
+ break;
+ case webrtc::VideoCodecType::kVideoCodecH264:
+ codec = "video/avc";
+ break;
+ default:
+ return nullptr;
+ }
+ RefPtr<PDMFactory> pdm = new PDMFactory();
+ if (pdm->SupportsMimeType(codec) == media::DecodeSupport::Unsupported) {
+ return nullptr;
+ }
+
+ return new WebrtcMediaDataDecoder(codec, aTrackingId);
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/MediaDataCodec.h b/dom/media/webrtc/libwebrtcglue/MediaDataCodec.h
new file mode 100644
index 0000000000..b885d6ae0c
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/MediaDataCodec.h
@@ -0,0 +1,32 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MEDIA_DATA_CODEC_H_
+#define MEDIA_DATA_CODEC_H_
+
+#include "MediaConduitInterface.h"
+
+namespace mozilla {
+
+class WebrtcVideoDecoder;
+class WebrtcVideoEncoder;
+class MediaDataCodec {
+ public:
+ /**
+ * Create encoder object for codec format |aFormat|. Return |nullptr| when
+ * failed.
+ */
+ static WebrtcVideoEncoder* CreateEncoder(
+ const webrtc::SdpVideoFormat& aFormat);
+
+ /**
+ * Create decoder object for codec type |aCodecType|. Return |nullptr| when
+ * failed.
+ */
+ static WebrtcVideoDecoder* CreateDecoder(webrtc::VideoCodecType aCodecType,
+ TrackingId aTrackingId);
+};
+} // namespace mozilla
+
+#endif // MEDIA_DATA_CODEC_H_
diff --git a/dom/media/webrtc/libwebrtcglue/RtpRtcpConfig.h b/dom/media/webrtc/libwebrtcglue/RtpRtcpConfig.h
new file mode 100644
index 0000000000..03a774ec3b
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/RtpRtcpConfig.h
@@ -0,0 +1,24 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef __RTPRTCP_CONFIG_H__
+#define __RTPRTCP_CONFIG_H__
+#include "api/rtp_headers.h"
+
+namespace mozilla {
+class RtpRtcpConfig {
+ public:
+ RtpRtcpConfig() = delete;
+ explicit RtpRtcpConfig(const webrtc::RtcpMode aMode) : mRtcpMode(aMode) {}
+ webrtc::RtcpMode GetRtcpMode() const { return mRtcpMode; }
+
+ bool operator==(const RtpRtcpConfig& aOther) const {
+ return mRtcpMode == aOther.mRtcpMode;
+ }
+
+ private:
+ webrtc::RtcpMode mRtcpMode;
+};
+} // namespace mozilla
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/RunningStat.h b/dom/media/webrtc/libwebrtcglue/RunningStat.h
new file mode 100644
index 0000000000..7a0e88f193
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/RunningStat.h
@@ -0,0 +1,48 @@
+/* Any copyright is dedicated to the Public Domain.
+ http://creativecommons.org/publicdomain/zero/1.0/ */
+/* Adapted from "Accurately computing running variance - John D. Cook"
+ http://www.johndcook.com/standard_deviation.html */
+
+#ifndef RUNNING_STAT_H_
+#define RUNNING_STAT_H_
+#include <math.h>
+
+namespace mozilla {
+
+class RunningStat {
+ public:
+ RunningStat() : mN(0), mOldM(0.0), mNewM(0.0), mOldS(0.0), mNewS(0.0) {}
+
+ void Clear() { mN = 0; }
+
+ void Push(double x) {
+ mN++;
+
+ // See Knuth TAOCP vol 2, 3rd edition, page 232
+ if (mN == 1) {
+ mOldM = mNewM = x;
+ mOldS = 0.0;
+ } else {
+ mNewM = mOldM + (x - mOldM) / mN;
+ mNewS = mOldS + (x - mOldM) * (x - mNewM);
+
+ // set up for next iteration
+ mOldM = mNewM;
+ mOldS = mNewS;
+ }
+ }
+
+ int NumDataValues() const { return mN; }
+
+ double Mean() const { return (mN > 0) ? mNewM : 0.0; }
+
+ double Variance() const { return (mN > 1) ? mNewS / (mN - 1) : 0.0; }
+
+ double StandardDeviation() const { return sqrt(Variance()); }
+
+ private:
+ int mN;
+ double mOldM, mNewM, mOldS, mNewS;
+};
+} // namespace mozilla
+#endif // RUNNING_STAT_H_
diff --git a/dom/media/webrtc/libwebrtcglue/SystemTime.cpp b/dom/media/webrtc/libwebrtcglue/SystemTime.cpp
new file mode 100644
index 0000000000..4b6bdd7cc2
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/SystemTime.cpp
@@ -0,0 +1,67 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "SystemTime.h"
+
+#include "TimeUnits.h"
+
+namespace mozilla {
+
+RTCStatsTimestampMakerRealtimeClock::RTCStatsTimestampMakerRealtimeClock(
+ const dom::RTCStatsTimestampMaker& aTimestampMaker)
+ : mTimestampMaker(aTimestampMaker) {}
+
+webrtc::Timestamp RTCStatsTimestampMakerRealtimeClock::CurrentTime() {
+ return mTimestampMaker.GetNowRealtime();
+}
+
+webrtc::NtpTime RTCStatsTimestampMakerRealtimeClock::ConvertTimestampToNtpTime(
+ webrtc::Timestamp aRealtime) {
+ return CreateNtp(mTimestampMaker.ConvertRealtimeTo1Jan1970(aRealtime) +
+ webrtc::TimeDelta::Seconds(webrtc::kNtpJan1970));
+}
+
+static TimeStamp CalculateBaseOffset(TimeStamp aNow) {
+ uint32_t offset = 24 * 60 * 60;
+ // If `converted` has underflowed it is capped at 0, which is an invalid
+ // timestamp. Reduce the offset in case that happens.
+ TimeStamp base;
+ do {
+ base = aNow - TimeDuration::FromSeconds(offset);
+ offset /= 2;
+ } while (!base);
+ return base;
+}
+
+TimeStamp WebrtcSystemTimeBase() {
+ static TimeStamp now = TimeStamp::Now();
+ // Make it obvious that these timestamps use a different base than
+ // RTCStatsTimestampMakerRealtimeClock::CurrentTime.
+ static TimeStamp base = CalculateBaseOffset(now);
+ return base;
+}
+
+webrtc::Timestamp WebrtcSystemTime() {
+ const TimeStamp base = WebrtcSystemTimeBase();
+ const TimeStamp now = TimeStamp::Now();
+ return webrtc::Timestamp::Micros((now - base).ToMicroseconds());
+}
+
+webrtc::NtpTime CreateNtp(webrtc::Timestamp aTime) {
+ const int64_t timeNtpUs = aTime.us();
+ const uint32_t seconds = static_cast<uint32_t>(timeNtpUs / USECS_PER_S);
+
+ constexpr int64_t fractionsPerSec = 1LL << 32;
+ const int64_t fractionsUs = timeNtpUs % USECS_PER_S;
+ const uint32_t fractions = (fractionsUs * fractionsPerSec) / USECS_PER_S;
+
+ return webrtc::NtpTime(seconds, fractions);
+}
+} // namespace mozilla
+
+namespace rtc {
+int64_t SystemTimeNanos() { return mozilla::WebrtcSystemTime().us() * 1000; }
+} // namespace rtc
diff --git a/dom/media/webrtc/libwebrtcglue/SystemTime.h b/dom/media/webrtc/libwebrtcglue/SystemTime.h
new file mode 100644
index 0000000000..f2f7aba25a
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/SystemTime.h
@@ -0,0 +1,47 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_SYSTEMTIMENANOS_H_
+#define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_SYSTEMTIMENANOS_H_
+
+#include "jsapi/RTCStatsReport.h"
+#include "mozilla/TimeStamp.h"
+#include "system_wrappers/include/clock.h"
+
+namespace mozilla {
+class RTCStatsTimestampMakerRealtimeClock : public webrtc::Clock {
+ public:
+ explicit RTCStatsTimestampMakerRealtimeClock(
+ const dom::RTCStatsTimestampMaker& aTimestampMaker);
+
+ webrtc::Timestamp CurrentTime() override;
+
+ // Upstream, this method depend on rtc::TimeUTCMicros for converting the
+ // monotonic system clock to Ntp, if only for the first call when deciding the
+ // Ntp offset.
+ // We override this to be able to use our own clock instead of
+ // rtc::TimeUTCMicros for ntp timestamps.
+ webrtc::NtpTime ConvertTimestampToNtpTime(
+ webrtc::Timestamp aRealtime) override;
+
+ const dom::RTCStatsTimestampMaker mTimestampMaker;
+
+ private:
+ webrtc::NtpTime WebrtcSystemTimeToNtp(TimeDuration aSystemTime);
+};
+
+// The time base used for WebrtcSystemTime(). Completely arbitrary. Constant.
+TimeStamp WebrtcSystemTimeBase();
+
+// The returned timestamp denotes the monotonic time passed since
+// WebrtcSystemTimeBase(). Libwebrtc uses this to track how time advances from a
+// specific point in time. It adds an offset to make the timestamps absolute.
+webrtc::Timestamp WebrtcSystemTime();
+
+webrtc::NtpTime CreateNtp(webrtc::Timestamp aTime);
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h b/dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h
new file mode 100644
index 0000000000..8af40ab626
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h
@@ -0,0 +1,181 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_TASKQUEUEWRAPPER_H_
+#define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_TASKQUEUEWRAPPER_H_
+
+#include "api/task_queue/task_queue_factory.h"
+#include "mozilla/DataMutex.h"
+#include "mozilla/ProfilerRunnable.h"
+#include "mozilla/TaskQueue.h"
+#include "VideoUtils.h"
+#include "mozilla/media/MediaUtils.h" // For media::Await
+
+namespace mozilla {
+
+enum class DeletionPolicy : uint8_t { Blocking, NonBlocking };
+
+/**
+ * A wrapper around Mozilla TaskQueues in the shape of a libwebrtc TaskQueue.
+ *
+ * Allows libwebrtc to use Mozilla threads where tooling, e.g. profiling, is set
+ * up and just works.
+ *
+ * Mozilla APIs like Runnables, MozPromise, etc. can also be used with the
+ * wrapped TaskQueue to run things on the right thread when interacting with
+ * libwebrtc.
+ */
+template <DeletionPolicy Deletion>
+class TaskQueueWrapper : public webrtc::TaskQueueBase {
+ public:
+ TaskQueueWrapper(RefPtr<TaskQueue> aTaskQueue, nsCString aName)
+ : mTaskQueue(std::move(aTaskQueue)), mName(std::move(aName)) {}
+ ~TaskQueueWrapper() = default;
+
+ void Delete() override {
+ {
+ // Scope this to make sure it does not race against the promise chain we
+ // set up below.
+ auto hasShutdown = mHasShutdown.Lock();
+ *hasShutdown = true;
+ }
+
+ MOZ_RELEASE_ASSERT(Deletion == DeletionPolicy::NonBlocking ||
+ !mTaskQueue->IsOnCurrentThread());
+
+ nsCOMPtr<nsISerialEventTarget> backgroundTaskQueue;
+ NS_CreateBackgroundTaskQueue(__func__, getter_AddRefs(backgroundTaskQueue));
+ if (NS_WARN_IF(!backgroundTaskQueue)) {
+ // Ok... that's pretty broken. Try main instead.
+ MOZ_ASSERT(false);
+ backgroundTaskQueue = GetMainThreadSerialEventTarget();
+ }
+
+ RefPtr<GenericPromise> shutdownPromise = mTaskQueue->BeginShutdown()->Then(
+ backgroundTaskQueue, __func__, [this] {
+ // Wait until shutdown is complete, then delete for real. Although we
+ // prevent queued tasks from executing with mHasShutdown, that is a
+ // member variable, which means we still need to ensure that the
+ // queue is done executing tasks before destroying it.
+ delete this;
+ return GenericPromise::CreateAndResolve(true, __func__);
+ });
+ if constexpr (Deletion == DeletionPolicy::Blocking) {
+ media::Await(backgroundTaskQueue.forget(), shutdownPromise);
+ } else {
+ Unused << shutdownPromise;
+ }
+ }
+
+ already_AddRefed<Runnable> CreateTaskRunner(
+ absl::AnyInvocable<void() &&> aTask) {
+ return NS_NewRunnableFunction(
+ "TaskQueueWrapper::CreateTaskRunner",
+ [this, task = std::move(aTask),
+ name = nsPrintfCString("TQ %s: webrtc::QueuedTask",
+ mName.get())]() mutable {
+ CurrentTaskQueueSetter current(this);
+ auto hasShutdown = mHasShutdown.Lock();
+ if (*hasShutdown) {
+ return;
+ }
+ AUTO_PROFILE_FOLLOWING_RUNNABLE(name);
+ std::move(task)();
+ });
+ }
+
+ already_AddRefed<Runnable> CreateTaskRunner(nsCOMPtr<nsIRunnable> aRunnable) {
+ return NS_NewRunnableFunction(
+ "TaskQueueWrapper::CreateTaskRunner",
+ [this, runnable = std::move(aRunnable)]() mutable {
+ CurrentTaskQueueSetter current(this);
+ auto hasShutdown = mHasShutdown.Lock();
+ if (*hasShutdown) {
+ return;
+ }
+ AUTO_PROFILE_FOLLOWING_RUNNABLE(runnable);
+ runnable->Run();
+ });
+ }
+
+ void PostTask(absl::AnyInvocable<void() &&> aTask) override {
+ MOZ_ALWAYS_SUCCEEDS(
+ mTaskQueue->Dispatch(CreateTaskRunner(std::move(aTask))));
+ }
+
+ void PostDelayedTask(absl::AnyInvocable<void() &&> aTask,
+ webrtc::TimeDelta aDelay) override {
+ if (aDelay.ms() == 0) {
+ // AbstractThread::DelayedDispatch doesn't support delay 0
+ PostTask(std::move(aTask));
+ return;
+ }
+ MOZ_ALWAYS_SUCCEEDS(mTaskQueue->DelayedDispatch(
+ CreateTaskRunner(std::move(aTask)), aDelay.ms()));
+ }
+
+ void PostDelayedHighPrecisionTask(absl::AnyInvocable<void() &&> aTask,
+ webrtc::TimeDelta aDelay) override {
+ PostDelayedTask(std::move(aTask), aDelay);
+ }
+
+ const RefPtr<TaskQueue> mTaskQueue;
+ const nsCString mName;
+
+ // This is a recursive mutex because a TaskRunner holding this mutex while
+ // running its runnable may end up running other - tail dispatched - runnables
+ // too, and they'll again try to grab the mutex.
+ // The mutex must be held while running the runnable since otherwise there'd
+ // be a race between shutting down the underlying task queue and the runnable
+ // dispatching to that task queue (and we assert it succeeds in e.g.,
+ // PostTask()).
+ DataMutexBase<bool, RecursiveMutex> mHasShutdown{
+ false, "TaskQueueWrapper::mHasShutdown"};
+};
+
+template <DeletionPolicy Deletion>
+class DefaultDelete<TaskQueueWrapper<Deletion>>
+ : public webrtc::TaskQueueDeleter {
+ public:
+ void operator()(TaskQueueWrapper<Deletion>* aPtr) const {
+ webrtc::TaskQueueDeleter::operator()(aPtr);
+ }
+};
+
+class SharedThreadPoolWebRtcTaskQueueFactory : public webrtc::TaskQueueFactory {
+ public:
+ SharedThreadPoolWebRtcTaskQueueFactory() {}
+
+ template <DeletionPolicy Deletion>
+ UniquePtr<TaskQueueWrapper<Deletion>> CreateTaskQueueWrapper(
+ absl::string_view aName, bool aSupportTailDispatch, Priority aPriority,
+ MediaThreadType aThreadType = MediaThreadType::WEBRTC_WORKER) const {
+ // XXX Do something with aPriority
+ nsCString name(aName.data(), aName.size());
+ auto taskQueue = TaskQueue::Create(GetMediaThreadPool(aThreadType),
+ name.get(), aSupportTailDispatch);
+ return MakeUnique<TaskQueueWrapper<Deletion>>(std::move(taskQueue),
+ std::move(name));
+ }
+
+ std::unique_ptr<webrtc::TaskQueueBase, webrtc::TaskQueueDeleter>
+ CreateTaskQueue(absl::string_view aName, Priority aPriority) const override {
+ // libwebrtc will dispatch some tasks sync, i.e., block the origin thread
+ // until they've run, and that doesn't play nice with tail dispatching since
+ // there will never be a tail.
+ // DeletionPolicy::Blocking because this is for libwebrtc use and that's
+ // what they expect.
+ constexpr bool supportTailDispatch = false;
+ return std::unique_ptr<webrtc::TaskQueueBase, webrtc::TaskQueueDeleter>(
+ CreateTaskQueueWrapper<DeletionPolicy::Blocking>(
+ std::move(aName), supportTailDispatch, aPriority)
+ .release(),
+ webrtc::TaskQueueDeleter());
+ }
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/VideoConduit.cpp b/dom/media/webrtc/libwebrtcglue/VideoConduit.cpp
new file mode 100644
index 0000000000..e2b4aae50d
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/VideoConduit.cpp
@@ -0,0 +1,1849 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "VideoConduit.h"
+
+#include <algorithm>
+#include <cinttypes>
+#include <cmath>
+
+#include "common/browser_logging/CSFLog.h"
+#include "common/YuvStamper.h"
+#include "GmpVideoCodec.h"
+#include "MediaConduitControl.h"
+#include "MediaDataCodec.h"
+#include "mozilla/dom/RTCRtpSourcesBinding.h"
+#include "mozilla/media/MediaUtils.h"
+#include "mozilla/StaticPrefs_media.h"
+#include "mozilla/TemplateLib.h"
+#include "nsIGfxInfo.h"
+#include "nsIPrefBranch.h"
+#include "nsIPrefService.h"
+#include "nsServiceManagerUtils.h"
+#include "RtpRtcpConfig.h"
+#include "transport/SrtpFlow.h" // For SRTP_MAX_EXPANSION
+#include "Tracing.h"
+#include "VideoStreamFactory.h"
+#include "WebrtcCallWrapper.h"
+#include "WebrtcGmpVideoCodec.h"
+
+// libwebrtc includes
+#include "api/transport/bitrate_settings.h"
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "api/video_codecs/video_codec.h"
+#include "media/base/media_constants.h"
+#include "media/engine/encoder_simulcast_proxy.h"
+#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+
+#ifdef MOZ_WIDGET_ANDROID
+# include "VideoEngine.h"
+#endif
+
+// for ntohs
+#ifdef _MSC_VER
+# include "Winsock2.h"
+#else
+# include <netinet/in.h>
+#endif
+
+#define INVALID_RTP_PAYLOAD 255 // valid payload types are 0 to 127
+
+namespace mozilla {
+
+namespace {
+
+const char* vcLogTag = "WebrtcVideoSessionConduit";
+#ifdef LOGTAG
+# undef LOGTAG
+#endif
+#define LOGTAG vcLogTag
+
+using namespace cricket;
+using LocalDirection = MediaSessionConduitLocalDirection;
+
+const int kNullPayloadType = -1;
+const char kRtcpFbCcmParamTmmbr[] = "tmmbr";
+
+// The number of frame buffers WebrtcVideoConduit may create before returning
+// errors.
+// Sometimes these are released synchronously but they can be forwarded all the
+// way to the encoder for asynchronous encoding. With a pool size of 5,
+// we allow 1 buffer for the current conversion, and 4 buffers to be queued at
+// the encoder.
+#define SCALER_BUFFER_POOL_SIZE 5
+
+// The pixel alignment to use for the highest resolution layer when simulcast
+// is active and one or more layers are being scaled.
+#define SIMULCAST_RESOLUTION_ALIGNMENT 16
+
+template <class t>
+void ConstrainPreservingAspectRatioExact(uint32_t max_fs, t* width, t* height) {
+ // We could try to pick a better starting divisor, but it won't make any real
+ // performance difference.
+ for (size_t d = 1; d < std::min(*width, *height); ++d) {
+ if ((*width % d) || (*height % d)) {
+ continue; // Not divisible
+ }
+
+ if (((*width) * (*height)) / (d * d) <= max_fs) {
+ *width /= d;
+ *height /= d;
+ return;
+ }
+ }
+
+ *width = 0;
+ *height = 0;
+}
+
+/**
+ * Perform validation on the codecConfig to be applied
+ */
+MediaConduitErrorCode ValidateCodecConfig(const VideoCodecConfig& codecInfo) {
+ if (codecInfo.mName.empty()) {
+ CSFLogError(LOGTAG, "%s Empty Payload Name ", __FUNCTION__);
+ return kMediaConduitMalformedArgument;
+ }
+
+ return kMediaConduitNoError;
+}
+
+webrtc::VideoCodecType SupportedCodecType(webrtc::VideoCodecType aType) {
+ switch (aType) {
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ case webrtc::VideoCodecType::kVideoCodecVP9:
+ case webrtc::VideoCodecType::kVideoCodecH264:
+ return aType;
+ default:
+ return webrtc::VideoCodecType::kVideoCodecGeneric;
+ }
+ // NOTREACHED
+}
+
+// Call thread only.
+rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
+ConfigureVideoEncoderSettings(const VideoCodecConfig& aConfig,
+ const WebrtcVideoConduit* aConduit,
+ webrtc::SdpVideoFormat::Parameters& aParameters) {
+ bool is_screencast =
+ aConduit->CodecMode() == webrtc::VideoCodecMode::kScreensharing;
+ // No automatic resizing when using simulcast or screencast.
+ bool automatic_resize = !is_screencast && aConfig.mEncodings.size() <= 1;
+ bool denoising;
+ bool codec_default_denoising = false;
+ if (is_screencast) {
+ denoising = false;
+ } else {
+ // Use codec default if video_noise_reduction is unset.
+ denoising = aConduit->Denoising();
+ codec_default_denoising = !denoising;
+ }
+
+ if (aConfig.mName == kH264CodecName) {
+ aParameters[kH264FmtpPacketizationMode] =
+ std::to_string(aConfig.mPacketizationMode);
+ {
+ std::stringstream ss;
+ ss << std::hex << std::setfill('0');
+ ss << std::setw(2) << static_cast<uint32_t>(aConfig.mProfile);
+ ss << std::setw(2) << static_cast<uint32_t>(aConfig.mConstraints);
+ ss << std::setw(2) << static_cast<uint32_t>(aConfig.mLevel);
+ std::string profileLevelId = ss.str();
+ auto parsedProfileLevelId =
+ webrtc::ParseH264ProfileLevelId(profileLevelId.c_str());
+ MOZ_DIAGNOSTIC_ASSERT(parsedProfileLevelId);
+ if (parsedProfileLevelId) {
+ aParameters[kH264FmtpProfileLevelId] = profileLevelId;
+ }
+ }
+ aParameters[kH264FmtpSpropParameterSets] = aConfig.mSpropParameterSets;
+ }
+ if (aConfig.mName == kVp8CodecName) {
+ webrtc::VideoCodecVP8 vp8_settings =
+ webrtc::VideoEncoder::GetDefaultVp8Settings();
+ vp8_settings.automaticResizeOn = automatic_resize;
+ // VP8 denoising is enabled by default.
+ vp8_settings.denoisingOn = codec_default_denoising ? true : denoising;
+ return rtc::scoped_refptr<
+ webrtc::VideoEncoderConfig::EncoderSpecificSettings>(
+ new rtc::RefCountedObject<
+ webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(
+ vp8_settings));
+ }
+ if (aConfig.mName == kVp9CodecName) {
+ webrtc::VideoCodecVP9 vp9_settings =
+ webrtc::VideoEncoder::GetDefaultVp9Settings();
+ if (!is_screencast) {
+ // Always configure only 1 spatial layer for screencapture as libwebrtc
+ // has some special requirements when SVC is active. For non-screencapture
+ // the spatial layers are experimentally configurable via a pref.
+ vp9_settings.numberOfSpatialLayers = aConduit->SpatialLayers();
+ }
+ // VP9 denoising is disabled by default.
+ vp9_settings.denoisingOn = codec_default_denoising ? false : denoising;
+ return rtc::scoped_refptr<
+ webrtc::VideoEncoderConfig::EncoderSpecificSettings>(
+ new rtc::RefCountedObject<
+ webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(
+ vp9_settings));
+ }
+ return nullptr;
+}
+
+uint32_t GenerateRandomSSRC() {
+ uint32_t ssrc;
+ do {
+ SECStatus rv = PK11_GenerateRandom(reinterpret_cast<unsigned char*>(&ssrc),
+ sizeof(ssrc));
+ MOZ_RELEASE_ASSERT(rv == SECSuccess);
+ } while (ssrc == 0); // webrtc.org code has fits if you select an SSRC of 0
+
+ return ssrc;
+}
+
+// TODO: Make this a defaulted operator when we have c++20 (bug 1731036).
+bool operator==(const rtc::VideoSinkWants& aThis,
+ const rtc::VideoSinkWants& aOther) {
+ // This would have to be expanded should we make use of more members of
+ // rtc::VideoSinkWants.
+ return aThis.max_pixel_count == aOther.max_pixel_count &&
+ aThis.max_framerate_fps == aOther.max_framerate_fps &&
+ aThis.resolution_alignment == aOther.resolution_alignment;
+}
+
+// TODO: Make this a defaulted operator when we have c++20 (bug 1731036).
+bool operator!=(const rtc::VideoSinkWants& aThis,
+ const rtc::VideoSinkWants& aOther) {
+ return !(aThis == aOther);
+}
+
+// TODO: Make this a defaulted operator when we have c++20 (bug 1731036).
+bool operator!=(
+ const webrtc::VideoReceiveStreamInterface::Config::Rtp& aThis,
+ const webrtc::VideoReceiveStreamInterface::Config::Rtp& aOther) {
+ return aThis.remote_ssrc != aOther.remote_ssrc ||
+ aThis.local_ssrc != aOther.local_ssrc ||
+ aThis.rtcp_mode != aOther.rtcp_mode ||
+ aThis.rtcp_xr.receiver_reference_time_report !=
+ aOther.rtcp_xr.receiver_reference_time_report ||
+ aThis.transport_cc != aOther.transport_cc ||
+ aThis.remb != aOther.remb || aThis.tmmbr != aOther.tmmbr ||
+ aThis.keyframe_method != aOther.keyframe_method ||
+ aThis.lntf.enabled != aOther.lntf.enabled ||
+ aThis.nack.rtp_history_ms != aOther.nack.rtp_history_ms ||
+ aThis.ulpfec_payload_type != aOther.ulpfec_payload_type ||
+ aThis.red_payload_type != aOther.red_payload_type ||
+ aThis.rtx_ssrc != aOther.rtx_ssrc ||
+ aThis.protected_by_flexfec != aOther.protected_by_flexfec ||
+ aThis.rtx_associated_payload_types !=
+ aOther.rtx_associated_payload_types ||
+ aThis.raw_payload_types != aOther.raw_payload_types ||
+ aThis.extensions != aOther.extensions;
+}
+
+#ifdef DEBUG
+// TODO: Make this a defaulted operator when we have c++20 (bug 1731036).
+bool operator==(
+ const webrtc::VideoReceiveStreamInterface::Config::Rtp& aThis,
+ const webrtc::VideoReceiveStreamInterface::Config::Rtp& aOther) {
+ return !(aThis != aOther);
+}
+#endif
+
+// TODO: Make this a defaulted operator when we have c++20 (bug 1731036).
+bool operator!=(const webrtc::RtpConfig& aThis,
+ const webrtc::RtpConfig& aOther) {
+ return aThis.ssrcs != aOther.ssrcs || aThis.rids != aOther.rids ||
+ aThis.mid != aOther.mid || aThis.rtcp_mode != aOther.rtcp_mode ||
+ aThis.max_packet_size != aOther.max_packet_size ||
+ aThis.extmap_allow_mixed != aOther.extmap_allow_mixed ||
+ aThis.extensions != aOther.extensions ||
+ aThis.payload_name != aOther.payload_name ||
+ aThis.payload_type != aOther.payload_type ||
+ aThis.raw_payload != aOther.raw_payload ||
+ aThis.lntf.enabled != aOther.lntf.enabled ||
+ aThis.nack.rtp_history_ms != aOther.nack.rtp_history_ms ||
+ !(aThis.ulpfec == aOther.ulpfec) ||
+ aThis.flexfec.payload_type != aOther.flexfec.payload_type ||
+ aThis.flexfec.ssrc != aOther.flexfec.ssrc ||
+ aThis.flexfec.protected_media_ssrcs !=
+ aOther.flexfec.protected_media_ssrcs ||
+ aThis.rtx.ssrcs != aOther.rtx.ssrcs ||
+ aThis.rtx.payload_type != aOther.rtx.payload_type ||
+ aThis.c_name != aOther.c_name;
+}
+
+#ifdef DEBUG
+// TODO: Make this a defaulted operator when we have c++20 (bug 1731036).
+bool operator==(const webrtc::RtpConfig& aThis,
+ const webrtc::RtpConfig& aOther) {
+ return !(aThis != aOther);
+}
+#endif
+
+} // namespace
+
+/**
+ * Factory Method for VideoConduit
+ */
+RefPtr<VideoSessionConduit> VideoSessionConduit::Create(
+ RefPtr<WebrtcCallWrapper> aCall, nsCOMPtr<nsISerialEventTarget> aStsThread,
+ Options aOptions, std::string aPCHandle,
+ const TrackingId& aRecvTrackingId) {
+ MOZ_ASSERT(NS_IsMainThread());
+ MOZ_ASSERT(aCall, "missing required parameter: aCall");
+ CSFLogVerbose(LOGTAG, "%s", __FUNCTION__);
+
+ if (!aCall) {
+ return nullptr;
+ }
+
+ auto obj = MakeRefPtr<WebrtcVideoConduit>(
+ std::move(aCall), std::move(aStsThread), std::move(aOptions),
+ std::move(aPCHandle), aRecvTrackingId);
+ if (obj->Init() != kMediaConduitNoError) {
+ CSFLogError(LOGTAG, "%s VideoConduit Init Failed ", __FUNCTION__);
+ return nullptr;
+ }
+ CSFLogVerbose(LOGTAG, "%s Successfully created VideoConduit ", __FUNCTION__);
+ return obj.forget();
+}
+
+#define INIT_MIRROR(name, val) \
+ name(aCallThread, val, "WebrtcVideoConduit::Control::" #name " (Mirror)")
+WebrtcVideoConduit::Control::Control(const RefPtr<AbstractThread>& aCallThread)
+ : INIT_MIRROR(mReceiving, false),
+ INIT_MIRROR(mTransmitting, false),
+ INIT_MIRROR(mLocalSsrcs, Ssrcs()),
+ INIT_MIRROR(mLocalRtxSsrcs, Ssrcs()),
+ INIT_MIRROR(mLocalCname, std::string()),
+ INIT_MIRROR(mMid, std::string()),
+ INIT_MIRROR(mRemoteSsrc, 0),
+ INIT_MIRROR(mRemoteRtxSsrc, 0),
+ INIT_MIRROR(mSyncGroup, std::string()),
+ INIT_MIRROR(mLocalRecvRtpExtensions, RtpExtList()),
+ INIT_MIRROR(mLocalSendRtpExtensions, RtpExtList()),
+ INIT_MIRROR(mSendCodec, Nothing()),
+ INIT_MIRROR(mSendRtpRtcpConfig, Nothing()),
+ INIT_MIRROR(mRecvCodecs, std::vector<VideoCodecConfig>()),
+ INIT_MIRROR(mRecvRtpRtcpConfig, Nothing()),
+ INIT_MIRROR(mCodecMode, webrtc::VideoCodecMode::kRealtimeVideo) {}
+#undef INIT_MIRROR
+
+WebrtcVideoConduit::WebrtcVideoConduit(
+ RefPtr<WebrtcCallWrapper> aCall, nsCOMPtr<nsISerialEventTarget> aStsThread,
+ Options aOptions, std::string aPCHandle, const TrackingId& aRecvTrackingId)
+ : mRendererMonitor("WebrtcVideoConduit::mRendererMonitor"),
+ mCallThread(aCall->mCallThread),
+ mStsThread(std::move(aStsThread)),
+ mControl(aCall->mCallThread),
+ mWatchManager(this, aCall->mCallThread),
+ mMutex("WebrtcVideoConduit::mMutex"),
+ mDecoderFactory(MakeUnique<WebrtcVideoDecoderFactory>(
+ mCallThread.get(), aPCHandle, aRecvTrackingId)),
+ mEncoderFactory(MakeUnique<WebrtcVideoEncoderFactory>(
+ mCallThread.get(), std::move(aPCHandle))),
+ mBufferPool(false, SCALER_BUFFER_POOL_SIZE),
+ mEngineTransmitting(false),
+ mEngineReceiving(false),
+ mVideoLatencyTestEnable(aOptions.mVideoLatencyTestEnable),
+ mMinBitrate(aOptions.mMinBitrate),
+ mStartBitrate(aOptions.mStartBitrate),
+ mPrefMaxBitrate(aOptions.mPrefMaxBitrate),
+ mMinBitrateEstimate(aOptions.mMinBitrateEstimate),
+ mDenoising(aOptions.mDenoising),
+ mLockScaling(aOptions.mLockScaling),
+ mSpatialLayers(aOptions.mSpatialLayers),
+ mTemporalLayers(aOptions.mTemporalLayers),
+ mCall(std::move(aCall)),
+ mSendTransport(this),
+ mRecvTransport(this),
+ mSendStreamConfig(&mSendTransport),
+ mVideoStreamFactory("WebrtcVideoConduit::mVideoStreamFactory"),
+ mRecvStreamConfig(&mRecvTransport) {
+ mRecvStreamConfig.rtp.rtcp_event_observer = this;
+}
+
+WebrtcVideoConduit::~WebrtcVideoConduit() {
+ CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
+
+ MOZ_ASSERT(!mSendStream && !mRecvStream,
+ "Call DeleteStreams prior to ~WebrtcVideoConduit.");
+}
+
+#define CONNECT(aCanonical, aMirror) \
+ do { \
+ (aMirror).Connect(aCanonical); \
+ mWatchManager.Watch(aMirror, &WebrtcVideoConduit::OnControlConfigChange); \
+ } while (0)
+
+void WebrtcVideoConduit::InitControl(VideoConduitControlInterface* aControl) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ CONNECT(aControl->CanonicalReceiving(), mControl.mReceiving);
+ CONNECT(aControl->CanonicalTransmitting(), mControl.mTransmitting);
+ CONNECT(aControl->CanonicalLocalSsrcs(), mControl.mLocalSsrcs);
+ CONNECT(aControl->CanonicalLocalVideoRtxSsrcs(), mControl.mLocalRtxSsrcs);
+ CONNECT(aControl->CanonicalLocalCname(), mControl.mLocalCname);
+ CONNECT(aControl->CanonicalMid(), mControl.mMid);
+ CONNECT(aControl->CanonicalRemoteSsrc(), mControl.mRemoteSsrc);
+ CONNECT(aControl->CanonicalRemoteVideoRtxSsrc(), mControl.mRemoteRtxSsrc);
+ CONNECT(aControl->CanonicalSyncGroup(), mControl.mSyncGroup);
+ CONNECT(aControl->CanonicalLocalRecvRtpExtensions(),
+ mControl.mLocalRecvRtpExtensions);
+ CONNECT(aControl->CanonicalLocalSendRtpExtensions(),
+ mControl.mLocalSendRtpExtensions);
+ CONNECT(aControl->CanonicalVideoSendCodec(), mControl.mSendCodec);
+ CONNECT(aControl->CanonicalVideoSendRtpRtcpConfig(),
+ mControl.mSendRtpRtcpConfig);
+ CONNECT(aControl->CanonicalVideoRecvCodecs(), mControl.mRecvCodecs);
+ CONNECT(aControl->CanonicalVideoRecvRtpRtcpConfig(),
+ mControl.mRecvRtpRtcpConfig);
+ CONNECT(aControl->CanonicalVideoCodecMode(), mControl.mCodecMode);
+}
+
+#undef CONNECT
+
+void WebrtcVideoConduit::OnControlConfigChange() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ bool encoderReconfigureNeeded = false;
+ bool remoteSsrcUpdateNeeded = false;
+ bool sendStreamRecreationNeeded = false;
+
+ if (mControl.mRemoteSsrc.Ref() != mControl.mConfiguredRemoteSsrc) {
+ mControl.mConfiguredRemoteSsrc = mControl.mRemoteSsrc;
+ remoteSsrcUpdateNeeded = true;
+ }
+
+ if (mControl.mRemoteRtxSsrc.Ref() != mControl.mConfiguredRemoteRtxSsrc) {
+ mControl.mConfiguredRemoteRtxSsrc = mControl.mRemoteRtxSsrc;
+ remoteSsrcUpdateNeeded = true;
+ }
+
+ if (mControl.mSyncGroup.Ref() != mRecvStreamConfig.sync_group) {
+ mRecvStreamConfig.sync_group = mControl.mSyncGroup;
+ }
+
+ if (mControl.mLocalRecvRtpExtensions.Ref() !=
+ mRecvStreamConfig.rtp.extensions) {
+ mRecvStreamConfig.rtp.extensions = mControl.mLocalRecvRtpExtensions;
+ }
+
+ if (const auto [codecConfigList, rtpRtcpConfig] = std::make_pair(
+ mControl.mRecvCodecs.Ref(), mControl.mRecvRtpRtcpConfig.Ref());
+ !codecConfigList.empty() && rtpRtcpConfig.isSome() &&
+ (codecConfigList != mControl.mConfiguredRecvCodecs ||
+ rtpRtcpConfig != mControl.mConfiguredRecvRtpRtcpConfig)) {
+ mControl.mConfiguredRecvCodecs = codecConfigList;
+ mControl.mConfiguredRecvRtpRtcpConfig = rtpRtcpConfig;
+
+ webrtc::VideoReceiveStreamInterface::Config::Rtp newRtp(
+ mRecvStreamConfig.rtp);
+ MOZ_ASSERT(newRtp == mRecvStreamConfig.rtp);
+ newRtp.rtx_associated_payload_types.clear();
+ newRtp.rtcp_mode = rtpRtcpConfig->GetRtcpMode();
+ newRtp.nack.rtp_history_ms = 0;
+ newRtp.remb = false;
+ newRtp.transport_cc = false;
+ newRtp.tmmbr = false;
+ newRtp.keyframe_method = webrtc::KeyFrameReqMethod::kNone;
+ newRtp.ulpfec_payload_type = kNullPayloadType;
+ newRtp.red_payload_type = kNullPayloadType;
+ bool use_fec = false;
+ bool configuredH264 = false;
+ std::vector<webrtc::VideoReceiveStreamInterface::Decoder> recv_codecs;
+
+ // Try Applying the codecs in the list
+ // we treat as success if at least one codec was applied and reception was
+ // started successfully.
+ for (const auto& codec_config : codecConfigList) {
+ if (auto condError = ValidateCodecConfig(codec_config);
+ condError != kMediaConduitNoError) {
+ CSFLogError(LOGTAG, "Invalid recv codec config for %s decoder: %i",
+ codec_config.mName.c_str(), condError);
+ continue;
+ }
+
+ if (codec_config.mName == kH264CodecName) {
+ // TODO(bug 1200768): We can only handle configuring one recv H264 codec
+ if (configuredH264) {
+ continue;
+ }
+ configuredH264 = true;
+ }
+
+ if (codec_config.mName == kUlpfecCodecName) {
+ newRtp.ulpfec_payload_type = codec_config.mType;
+ continue;
+ }
+
+ if (codec_config.mName == kRedCodecName) {
+ newRtp.red_payload_type = codec_config.mType;
+ continue;
+ }
+
+ if (SupportedCodecType(
+ webrtc::PayloadStringToCodecType(codec_config.mName)) ==
+ webrtc::VideoCodecType::kVideoCodecGeneric) {
+ CSFLogError(LOGTAG, "%s Unknown decoder type: %s", __FUNCTION__,
+ codec_config.mName.c_str());
+ continue;
+ }
+
+ // Check for the keyframe request type: PLI is preferred over FIR, and FIR
+ // is preferred over none.
+ if (codec_config.RtcpFbNackIsSet(kRtcpFbNackParamPli)) {
+ newRtp.keyframe_method = webrtc::KeyFrameReqMethod::kPliRtcp;
+ } else if (newRtp.keyframe_method !=
+ webrtc::KeyFrameReqMethod::kPliRtcp &&
+ codec_config.RtcpFbCcmIsSet(kRtcpFbCcmParamFir)) {
+ newRtp.keyframe_method = webrtc::KeyFrameReqMethod::kFirRtcp;
+ }
+
+ // What if codec A has Nack and REMB, and codec B has TMMBR, and codec C
+ // has none? In practice, that's not a useful configuration, and
+ // VideoReceiveStream::Config can't represent that, so simply union the
+ // (boolean) settings
+ if (codec_config.RtcpFbNackIsSet(kParamValueEmpty)) {
+ newRtp.nack.rtp_history_ms = 1000;
+ }
+ newRtp.tmmbr |= codec_config.RtcpFbCcmIsSet(kRtcpFbCcmParamTmmbr);
+ newRtp.remb |= codec_config.RtcpFbRembIsSet();
+ use_fec |= codec_config.RtcpFbFECIsSet();
+ newRtp.transport_cc |= codec_config.RtcpFbTransportCCIsSet();
+
+ if (codec_config.RtxPayloadTypeIsSet()) {
+ newRtp.rtx_associated_payload_types[codec_config.mRTXPayloadType] =
+ codec_config.mType;
+ }
+
+ auto& decoder = recv_codecs.emplace_back();
+ decoder.video_format = webrtc::SdpVideoFormat(codec_config.mName);
+ decoder.payload_type = codec_config.mType;
+ }
+
+ if (!use_fec) {
+ // Reset to defaults
+ newRtp.ulpfec_payload_type = kNullPayloadType;
+ newRtp.red_payload_type = kNullPayloadType;
+ }
+
+ // TODO: This would be simpler, but for some reason gives
+ // "error: invalid operands to binary expression
+ // ('webrtc::VideoReceiveStreamInterface::Decoder' and
+ // 'webrtc::VideoReceiveStreamInterface::Decoder')"
+ // if (recv_codecs != mRecvStreamConfig.decoders) {
+ if (!std::equal(recv_codecs.begin(), recv_codecs.end(),
+ mRecvStreamConfig.decoders.begin(),
+ mRecvStreamConfig.decoders.end(),
+ [](const auto& aLeft, const auto& aRight) {
+ return aLeft == aRight;
+ })) {
+ if (recv_codecs.empty()) {
+ CSFLogError(LOGTAG, "%s Found no valid receive codecs", __FUNCTION__);
+ }
+ mRecvStreamConfig.decoders = std::move(recv_codecs);
+ }
+
+ if (mRecvStreamConfig.rtp != newRtp) {
+ mRecvStreamConfig.rtp = newRtp;
+ }
+ }
+
+ {
+ // mSendStreamConfig and other members need the lock
+ MutexAutoLock lock(mMutex);
+ if (mControl.mLocalSsrcs.Ref() != mSendStreamConfig.rtp.ssrcs) {
+ mSendStreamConfig.rtp.ssrcs = mControl.mLocalSsrcs;
+ sendStreamRecreationNeeded = true;
+
+ const uint32_t localSsrc = mSendStreamConfig.rtp.ssrcs.empty()
+ ? 0
+ : mSendStreamConfig.rtp.ssrcs.front();
+ if (localSsrc != mRecvStreamConfig.rtp.local_ssrc) {
+ mRecvStreamConfig.rtp.local_ssrc = localSsrc;
+ }
+ }
+
+ {
+ Ssrcs localRtxSsrcs = mControl.mLocalRtxSsrcs.Ref();
+ if (!mControl.mSendCodec.Ref()
+ .map([](const auto& aCodec) {
+ return aCodec.RtxPayloadTypeIsSet();
+ })
+ .valueOr(false)) {
+ localRtxSsrcs.clear();
+ }
+ if (localRtxSsrcs != mSendStreamConfig.rtp.rtx.ssrcs) {
+ mSendStreamConfig.rtp.rtx.ssrcs = localRtxSsrcs;
+ sendStreamRecreationNeeded = true;
+ }
+ }
+
+ if (mControl.mLocalCname.Ref() != mSendStreamConfig.rtp.c_name) {
+ mSendStreamConfig.rtp.c_name = mControl.mLocalCname;
+ sendStreamRecreationNeeded = true;
+ }
+
+ if (mControl.mMid.Ref() != mSendStreamConfig.rtp.mid) {
+ mSendStreamConfig.rtp.mid = mControl.mMid;
+ sendStreamRecreationNeeded = true;
+ }
+
+ if (mControl.mLocalSendRtpExtensions.Ref() !=
+ mSendStreamConfig.rtp.extensions) {
+ mSendStreamConfig.rtp.extensions = mControl.mLocalSendRtpExtensions;
+ sendStreamRecreationNeeded = true;
+ }
+
+ if (const auto [codecConfig, rtpRtcpConfig] = std::make_pair(
+ mControl.mSendCodec.Ref(), mControl.mSendRtpRtcpConfig.Ref());
+ codecConfig.isSome() && rtpRtcpConfig.isSome() &&
+ (codecConfig != mControl.mConfiguredSendCodec ||
+ rtpRtcpConfig != mControl.mConfiguredSendRtpRtcpConfig)) {
+ CSFLogDebug(LOGTAG, "Configuring codec %s", codecConfig->mName.c_str());
+ mControl.mConfiguredSendCodec = codecConfig;
+ mControl.mConfiguredSendRtpRtcpConfig = rtpRtcpConfig;
+
+ if (ValidateCodecConfig(*codecConfig) == kMediaConduitNoError) {
+ encoderReconfigureNeeded = true;
+
+ mCurSendCodecConfig = codecConfig;
+
+ size_t streamCount = std::min(codecConfig->mEncodings.size(),
+ (size_t)webrtc::kMaxSimulcastStreams);
+ size_t highestResolutionIndex = 0;
+ for (size_t i = 1; i < streamCount; ++i) {
+ if (codecConfig->mEncodings[i].constraints.scaleDownBy <
+ codecConfig->mEncodings[highestResolutionIndex]
+ .constraints.scaleDownBy) {
+ highestResolutionIndex = i;
+ }
+ }
+ MOZ_RELEASE_ASSERT(streamCount >= 1,
+ "streamCount should be at least one");
+
+ CSFLogDebug(LOGTAG,
+ "Updating send codec for VideoConduit:%p stream count:%zu",
+ this, streamCount);
+
+ // So we can comply with b=TIAS/b=AS/maxbr=X when input resolution
+ // changes
+ MOZ_ASSERT(codecConfig->mTias < INT_MAX);
+ mNegotiatedMaxBitrate = static_cast<int>(codecConfig->mTias);
+
+ if (mLastWidth == 0 && mMinBitrateEstimate != 0) {
+ // Only do this at the start; use "have we sent a frame" as a
+ // reasonable stand-in. min <= start <= max (but all three parameters
+ // are optional)
+ webrtc::BitrateSettings settings;
+ settings.min_bitrate_bps = mMinBitrateEstimate;
+ settings.start_bitrate_bps = mMinBitrateEstimate;
+ mCall->Call()->SetClientBitratePreferences(settings);
+ }
+
+ // XXX parse the encoded SPS/PPS data and set
+ // spsData/spsLen/ppsData/ppsLen
+ mEncoderConfig.video_format =
+ webrtc::SdpVideoFormat(codecConfig->mName);
+ mEncoderConfig.encoder_specific_settings =
+ ConfigureVideoEncoderSettings(
+ *codecConfig, this, mEncoderConfig.video_format.parameters);
+
+ mEncoderConfig.codec_type = SupportedCodecType(
+ webrtc::PayloadStringToCodecType(codecConfig->mName));
+ MOZ_RELEASE_ASSERT(mEncoderConfig.codec_type !=
+ webrtc::VideoCodecType::kVideoCodecGeneric);
+
+ mEncoderConfig.content_type =
+ mControl.mCodecMode.Ref() == webrtc::VideoCodecMode::kRealtimeVideo
+ ? webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo
+ : webrtc::VideoEncoderConfig::ContentType::kScreen;
+
+ mEncoderConfig.frame_drop_enabled =
+ mControl.mCodecMode.Ref() != webrtc::VideoCodecMode::kScreensharing;
+
+ mEncoderConfig.min_transmit_bitrate_bps = mMinBitrate;
+
+ // Set the max bitrate, defaulting to 10Mbps, checking:
+ // - pref
+ // - b=TIAS
+ // - codec constraints
+ // - encoding parameter if there's a single stream
+ int maxBps = KBPS(10000);
+ maxBps = MinIgnoreZero(maxBps, mPrefMaxBitrate);
+ maxBps = MinIgnoreZero(maxBps, mNegotiatedMaxBitrate);
+ maxBps = MinIgnoreZero(
+ maxBps, static_cast<int>(codecConfig->mEncodingConstraints.maxBr));
+ if (codecConfig->mEncodings.size() == 1) {
+ maxBps = MinIgnoreZero(
+ maxBps,
+ static_cast<int>(codecConfig->mEncodings[0].constraints.maxBr));
+ }
+ mEncoderConfig.max_bitrate_bps = maxBps;
+
+ // TODO this is for webrtc-priority, but needs plumbing bits
+ mEncoderConfig.bitrate_priority = 1.0;
+
+ // Expected max number of encodings
+ mEncoderConfig.number_of_streams = streamCount;
+
+ // libwebrtc disables this by default.
+ mSendStreamConfig.suspend_below_min_bitrate = false;
+
+ webrtc::RtpConfig newRtp = mSendStreamConfig.rtp;
+ MOZ_ASSERT(newRtp == mSendStreamConfig.rtp);
+ newRtp.payload_name = codecConfig->mName;
+ newRtp.payload_type = codecConfig->mType;
+ newRtp.rtcp_mode = rtpRtcpConfig->GetRtcpMode();
+ newRtp.max_packet_size = kVideoMtu;
+ newRtp.rtx.payload_type = codecConfig->RtxPayloadTypeIsSet()
+ ? codecConfig->mRTXPayloadType
+ : kNullPayloadType;
+
+ {
+ // See Bug 1297058, enabling FEC when basic NACK is to be enabled in
+ // H.264 is problematic
+ const bool useFECDefaults =
+ !codecConfig->RtcpFbFECIsSet() ||
+ (codecConfig->mName == kH264CodecName &&
+ codecConfig->RtcpFbNackIsSet(kParamValueEmpty));
+ newRtp.ulpfec.ulpfec_payload_type =
+ useFECDefaults ? kNullPayloadType
+ : codecConfig->mULPFECPayloadType;
+ newRtp.ulpfec.red_payload_type =
+ useFECDefaults ? kNullPayloadType : codecConfig->mREDPayloadType;
+ newRtp.ulpfec.red_rtx_payload_type =
+ useFECDefaults ? kNullPayloadType
+ : codecConfig->mREDRTXPayloadType;
+ }
+
+ newRtp.nack.rtp_history_ms =
+ codecConfig->RtcpFbNackIsSet(kParamValueEmpty) ? 1000 : 0;
+
+ {
+ newRtp.rids.clear();
+ bool has_rid = false;
+ for (size_t idx = 0; idx < streamCount; idx++) {
+ const auto& encoding = codecConfig->mEncodings[idx];
+ if (encoding.rid[0]) {
+ has_rid = true;
+ break;
+ }
+ }
+ if (has_rid) {
+ for (size_t idx = streamCount; idx > 0; idx--) {
+ const auto& encoding = codecConfig->mEncodings[idx - 1];
+ newRtp.rids.push_back(encoding.rid);
+ }
+ }
+ }
+ if (mSendStreamConfig.rtp != newRtp) {
+ mSendStreamConfig.rtp = newRtp;
+ sendStreamRecreationNeeded = true;
+ }
+
+ mEncoderConfig.video_stream_factory = CreateVideoStreamFactory();
+ }
+ }
+
+ {
+ const auto& mode = mControl.mCodecMode.Ref();
+ MOZ_ASSERT(mode == webrtc::VideoCodecMode::kRealtimeVideo ||
+ mode == webrtc::VideoCodecMode::kScreensharing);
+
+ auto contentType =
+ mode == webrtc::VideoCodecMode::kRealtimeVideo
+ ? webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo
+ : webrtc::VideoEncoderConfig::ContentType::kScreen;
+
+ if (contentType != mEncoderConfig.content_type) {
+ mEncoderConfig.video_stream_factory = CreateVideoStreamFactory();
+ encoderReconfigureNeeded = true;
+ }
+ }
+
+ if (remoteSsrcUpdateNeeded) {
+ SetRemoteSSRCConfig(mControl.mConfiguredRemoteSsrc,
+ mControl.mConfiguredRemoteRtxSsrc);
+ }
+
+ // Handle un-signalled SSRCs by creating random ones and then when they
+ // actually get set, we'll destroy and recreate.
+ if (mControl.mReceiving || mControl.mTransmitting) {
+ const auto remoteSsrc = mRecvStreamConfig.rtp.remote_ssrc;
+ const auto localSsrc = mRecvStreamConfig.rtp.local_ssrc;
+ const auto localSsrcs = mSendStreamConfig.rtp.ssrcs;
+ EnsureLocalSSRC();
+ if (mControl.mReceiving) {
+ EnsureRemoteSSRC();
+ }
+ if (localSsrc != mRecvStreamConfig.rtp.local_ssrc ||
+ remoteSsrc != mRecvStreamConfig.rtp.remote_ssrc) {
+ }
+ if (localSsrcs != mSendStreamConfig.rtp.ssrcs) {
+ sendStreamRecreationNeeded = true;
+ }
+ }
+
+ // Recreate receiving streams
+ if (mControl.mReceiving) {
+ DeleteRecvStream();
+ CreateRecvStream();
+ }
+ if (sendStreamRecreationNeeded) {
+ DeleteSendStream();
+ }
+ if (mControl.mTransmitting) {
+ CreateSendStream();
+ }
+ }
+
+ // We make sure to not hold the lock while stopping/starting/reconfiguring
+ // streams, so as to not cause deadlocks. These methods can cause our platform
+ // codecs to dispatch sync runnables to main, and main may grab the lock.
+
+ if (mSendStream && encoderReconfigureNeeded) {
+ MOZ_DIAGNOSTIC_ASSERT(
+ mSendStreamConfig.rtp.ssrcs.size() == mEncoderConfig.number_of_streams,
+ "Each video substream must have a corresponding ssrc.");
+ mSendStream->ReconfigureVideoEncoder(mEncoderConfig.Copy());
+ }
+
+ if (!mControl.mReceiving) {
+ StopReceiving();
+ }
+ if (!mControl.mTransmitting) {
+ StopTransmitting();
+ }
+
+ if (mControl.mReceiving) {
+ StartReceiving();
+ }
+ if (mControl.mTransmitting) {
+ StartTransmitting();
+ }
+}
+
+std::vector<unsigned int> WebrtcVideoConduit::GetLocalSSRCs() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ return mSendStreamConfig.rtp.ssrcs;
+}
+
+void WebrtcVideoConduit::DeleteSendStream() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertCurrentThreadOwns();
+
+ if (!mSendStream) {
+ return;
+ }
+
+ mCall->Call()->DestroyVideoSendStream(mSendStream);
+ mEngineTransmitting = false;
+ mSendStream = nullptr;
+
+ // Reset base_seqs in case ssrcs get re-used.
+ mRtpSendBaseSeqs.clear();
+}
+
+void WebrtcVideoConduit::CreateSendStream() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertCurrentThreadOwns();
+
+ if (mSendStream) {
+ return;
+ }
+
+ nsAutoString codecName;
+ codecName.AssignASCII(mSendStreamConfig.rtp.payload_name.c_str());
+ Telemetry::ScalarAdd(Telemetry::ScalarID::WEBRTC_VIDEO_SEND_CODEC_USED,
+ codecName, 1);
+
+ mSendStreamConfig.encoder_settings.encoder_factory = mEncoderFactory.get();
+ mSendStreamConfig.encoder_settings.bitrate_allocator_factory =
+ mCall->mVideoBitrateAllocatorFactory.get();
+
+ MOZ_DIAGNOSTIC_ASSERT(
+ mSendStreamConfig.rtp.ssrcs.size() == mEncoderConfig.number_of_streams,
+ "Each video substream must have a corresponding ssrc.");
+
+ mSendStream = mCall->Call()->CreateVideoSendStream(mSendStreamConfig.Copy(),
+ mEncoderConfig.Copy());
+
+ mSendStream->SetSource(this, webrtc::DegradationPreference::BALANCED);
+}
+
+void WebrtcVideoConduit::DeleteRecvStream() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertCurrentThreadOwns();
+
+ if (!mRecvStream) {
+ return;
+ }
+
+ mCall->Call()->DestroyVideoReceiveStream(mRecvStream);
+ mEngineReceiving = false;
+ mRecvStream = nullptr;
+}
+
+void WebrtcVideoConduit::CreateRecvStream() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertCurrentThreadOwns();
+
+ if (mRecvStream) {
+ return;
+ }
+
+ mRecvStreamConfig.renderer = this;
+
+ for (auto& decoder : mRecvStreamConfig.decoders) {
+ nsAutoString codecName;
+ codecName.AssignASCII(decoder.video_format.name.c_str());
+ Telemetry::ScalarAdd(Telemetry::ScalarID::WEBRTC_VIDEO_RECV_CODEC_USED,
+ codecName, 1);
+ }
+
+ mRecvStreamConfig.decoder_factory = mDecoderFactory.get();
+
+ mRecvStream =
+ mCall->Call()->CreateVideoReceiveStream(mRecvStreamConfig.Copy());
+
+ CSFLogDebug(LOGTAG, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
+ mRecvStream, mRecvStreamConfig.rtp.remote_ssrc,
+ mRecvStreamConfig.rtp.remote_ssrc);
+}
+
+void WebrtcVideoConduit::NotifyUnsetCurrentRemoteSSRC() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ CSFLogDebug(LOGTAG, "%s (%p): Unsetting SSRC %u in other conduits",
+ __FUNCTION__, this, mRecvStreamConfig.rtp.remote_ssrc);
+ mCall->UnregisterConduit(this);
+ mCall->UnsetRemoteSSRC(mRecvStreamConfig.rtp.remote_ssrc);
+ mCall->RegisterConduit(this);
+}
+
+void WebrtcVideoConduit::SetRemoteSSRCConfig(uint32_t aSsrc,
+ uint32_t aRtxSsrc) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, aSsrc, aSsrc);
+
+ if (mRecvStreamConfig.rtp.remote_ssrc != aSsrc) {
+ nsCOMPtr<nsIDirectTaskDispatcher> dtd = do_QueryInterface(mCallThread);
+ MOZ_ALWAYS_SUCCEEDS(dtd->DispatchDirectTask(NewRunnableMethod(
+ "WebrtcVideoConduit::NotifyUnsetCurrentRemoteSSRC", this,
+ &WebrtcVideoConduit::NotifyUnsetCurrentRemoteSSRC)));
+ }
+
+ mRecvSSRC = mRecvStreamConfig.rtp.remote_ssrc = aSsrc;
+ mRecvStreamConfig.rtp.rtx_ssrc = aRtxSsrc;
+}
+
+void WebrtcVideoConduit::SetRemoteSSRCAndRestartAsNeeded(uint32_t aSsrc,
+ uint32_t aRtxSsrc) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ if (mRecvStreamConfig.rtp.remote_ssrc == aSsrc &&
+ mRecvStreamConfig.rtp.rtx_ssrc == aRtxSsrc) {
+ return;
+ }
+
+ SetRemoteSSRCConfig(aSsrc, aRtxSsrc);
+
+ const bool wasReceiving = mEngineReceiving;
+ const bool hadRecvStream = mRecvStream;
+
+ StopReceiving();
+
+ if (hadRecvStream) {
+ MutexAutoLock lock(mMutex);
+ DeleteRecvStream();
+ CreateRecvStream();
+ }
+
+ if (wasReceiving) {
+ StartReceiving();
+ }
+}
+
+void WebrtcVideoConduit::EnsureRemoteSSRC() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertCurrentThreadOwns();
+
+ const auto& ssrcs = mSendStreamConfig.rtp.ssrcs;
+ if (mRecvStreamConfig.rtp.remote_ssrc != 0 &&
+ std::find(ssrcs.begin(), ssrcs.end(),
+ mRecvStreamConfig.rtp.remote_ssrc) == ssrcs.end()) {
+ return;
+ }
+
+ uint32_t ssrc;
+ do {
+ ssrc = GenerateRandomSSRC();
+ } while (
+ NS_WARN_IF(std::find(ssrcs.begin(), ssrcs.end(), ssrc) != ssrcs.end()));
+ CSFLogDebug(LOGTAG, "VideoConduit %p: Generated remote SSRC %u", this, ssrc);
+ SetRemoteSSRCConfig(ssrc, 0);
+}
+
+void WebrtcVideoConduit::EnsureLocalSSRC() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertCurrentThreadOwns();
+
+ auto& ssrcs = mSendStreamConfig.rtp.ssrcs;
+ if (ssrcs.empty()) {
+ ssrcs.push_back(GenerateRandomSSRC());
+ }
+
+ // Reverse-iterating here so that the first dupe in `ssrcs` always wins.
+ for (auto& ssrc : Reversed(ssrcs)) {
+ if (ssrc != 0 && ssrc != mRecvStreamConfig.rtp.remote_ssrc &&
+ std::count(ssrcs.begin(), ssrcs.end(), ssrc) == 1) {
+ continue;
+ }
+ do {
+ ssrc = GenerateRandomSSRC();
+ } while (NS_WARN_IF(ssrc == mRecvStreamConfig.rtp.remote_ssrc) ||
+ NS_WARN_IF(std::count(ssrcs.begin(), ssrcs.end(), ssrc) > 1));
+ CSFLogDebug(LOGTAG, "%s (%p): Generated local SSRC %u", __FUNCTION__, this,
+ ssrc);
+ }
+ mRecvStreamConfig.rtp.local_ssrc = ssrcs[0];
+}
+
+void WebrtcVideoConduit::UnsetRemoteSSRC(uint32_t aSsrc) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertNotCurrentThreadOwns();
+
+ if (mRecvStreamConfig.rtp.remote_ssrc != aSsrc &&
+ mRecvStreamConfig.rtp.rtx_ssrc != aSsrc) {
+ return;
+ }
+
+ const auto& ssrcs = mSendStreamConfig.rtp.ssrcs;
+ uint32_t our_ssrc = 0;
+ do {
+ our_ssrc = GenerateRandomSSRC();
+ } while (NS_WARN_IF(our_ssrc == aSsrc) ||
+ NS_WARN_IF(std::find(ssrcs.begin(), ssrcs.end(), our_ssrc) !=
+ ssrcs.end()));
+
+ CSFLogDebug(LOGTAG, "%s (%p): Generated remote SSRC %u", __FUNCTION__, this,
+ our_ssrc);
+
+ // There is a (tiny) chance that this new random ssrc will collide with some
+ // other conduit's remote ssrc, in which case that conduit will choose a new
+ // one.
+ SetRemoteSSRCAndRestartAsNeeded(our_ssrc, 0);
+}
+
+/*static*/
+unsigned WebrtcVideoConduit::ToLibwebrtcMaxFramerate(
+ const Maybe<double>& aMaxFramerate) {
+ Maybe<unsigned> negotiatedMaxFps;
+ if (aMaxFramerate.isSome()) {
+ // libwebrtc does not handle non-integer max framerate.
+ unsigned integerMaxFps = static_cast<unsigned>(std::round(*aMaxFramerate));
+ // libwebrtc crashes with a max framerate of 0, even though the
+ // spec says this is valid. For now, we treat this as no limit.
+ if (integerMaxFps) {
+ negotiatedMaxFps = Some(integerMaxFps);
+ }
+ }
+ // We do not use DEFAULT_VIDEO_MAX_FRAMERATE here; that is used at the very
+ // end in VideoStreamFactory, once codec-wide and per-encoding limits are
+ // known.
+ return negotiatedMaxFps.refOr(std::numeric_limits<unsigned int>::max());
+}
+
+Maybe<Ssrc> WebrtcVideoConduit::GetRemoteSSRC() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ // libwebrtc uses 0 to mean a lack of SSRC. That is not to spec.
+ return mRecvStreamConfig.rtp.remote_ssrc == 0
+ ? Nothing()
+ : Some(mRecvStreamConfig.rtp.remote_ssrc);
+}
+
+Maybe<webrtc::VideoReceiveStreamInterface::Stats>
+WebrtcVideoConduit::GetReceiverStats() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ if (!mRecvStream) {
+ return Nothing();
+ }
+ return Some(mRecvStream->GetStats());
+}
+
+Maybe<webrtc::VideoSendStream::Stats> WebrtcVideoConduit::GetSenderStats()
+ const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ if (!mSendStream) {
+ return Nothing();
+ }
+ return Some(mSendStream->GetStats());
+}
+
+Maybe<webrtc::Call::Stats> WebrtcVideoConduit::GetCallStats() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ if (!mCall->Call()) {
+ return Nothing();
+ }
+ return Some(mCall->Call()->GetStats());
+}
+
+MediaConduitErrorCode WebrtcVideoConduit::Init() {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this);
+
+#ifdef MOZ_WIDGET_ANDROID
+ if (mozilla::camera::VideoEngine::SetAndroidObjects() != 0) {
+ CSFLogError(LOGTAG, "%s: could not set Android objects", __FUNCTION__);
+ return kMediaConduitSessionNotInited;
+ }
+#endif // MOZ_WIDGET_ANDROID
+
+ mSendPluginCreated = mEncoderFactory->CreatedGmpPluginEvent().Connect(
+ GetMainThreadSerialEventTarget(),
+ [self = detail::RawPtr(this)](uint64_t aPluginID) {
+ self.get()->mSendCodecPluginIDs.AppendElement(aPluginID);
+ });
+ mSendPluginReleased = mEncoderFactory->ReleasedGmpPluginEvent().Connect(
+ GetMainThreadSerialEventTarget(),
+ [self = detail::RawPtr(this)](uint64_t aPluginID) {
+ self.get()->mSendCodecPluginIDs.RemoveElement(aPluginID);
+ });
+ mRecvPluginCreated = mDecoderFactory->CreatedGmpPluginEvent().Connect(
+ GetMainThreadSerialEventTarget(),
+ [self = detail::RawPtr(this)](uint64_t aPluginID) {
+ self.get()->mRecvCodecPluginIDs.AppendElement(aPluginID);
+ });
+ mRecvPluginReleased = mDecoderFactory->ReleasedGmpPluginEvent().Connect(
+ GetMainThreadSerialEventTarget(),
+ [self = detail::RawPtr(this)](uint64_t aPluginID) {
+ self.get()->mRecvCodecPluginIDs.RemoveElement(aPluginID);
+ });
+
+ MOZ_ALWAYS_SUCCEEDS(mCallThread->Dispatch(NS_NewRunnableFunction(
+ __func__, [this, self = RefPtr<WebrtcVideoConduit>(this)] {
+ mCall->RegisterConduit(this);
+ })));
+
+ CSFLogDebug(LOGTAG, "%s Initialization Done", __FUNCTION__);
+ return kMediaConduitNoError;
+}
+
+RefPtr<GenericPromise> WebrtcVideoConduit::Shutdown() {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ mSendPluginCreated.DisconnectIfExists();
+ mSendPluginReleased.DisconnectIfExists();
+ mRecvPluginCreated.DisconnectIfExists();
+ mRecvPluginReleased.DisconnectIfExists();
+
+ return InvokeAsync(
+ mCallThread, __func__, [this, self = RefPtr<WebrtcVideoConduit>(this)] {
+ using namespace Telemetry;
+ if (mSendBitrate.NumDataValues() > 0) {
+ Accumulate(WEBRTC_VIDEO_ENCODER_BITRATE_AVG_PER_CALL_KBPS,
+ static_cast<unsigned>(mSendBitrate.Mean() / 1000));
+ Accumulate(
+ WEBRTC_VIDEO_ENCODER_BITRATE_STD_DEV_PER_CALL_KBPS,
+ static_cast<unsigned>(mSendBitrate.StandardDeviation() / 1000));
+ mSendBitrate.Clear();
+ }
+ if (mSendFramerate.NumDataValues() > 0) {
+ Accumulate(WEBRTC_VIDEO_ENCODER_FRAMERATE_AVG_PER_CALL,
+ static_cast<unsigned>(mSendFramerate.Mean()));
+ Accumulate(
+ WEBRTC_VIDEO_ENCODER_FRAMERATE_10X_STD_DEV_PER_CALL,
+ static_cast<unsigned>(mSendFramerate.StandardDeviation() * 10));
+ mSendFramerate.Clear();
+ }
+
+ if (mRecvBitrate.NumDataValues() > 0) {
+ Accumulate(WEBRTC_VIDEO_DECODER_BITRATE_AVG_PER_CALL_KBPS,
+ static_cast<unsigned>(mRecvBitrate.Mean() / 1000));
+ Accumulate(
+ WEBRTC_VIDEO_DECODER_BITRATE_STD_DEV_PER_CALL_KBPS,
+ static_cast<unsigned>(mRecvBitrate.StandardDeviation() / 1000));
+ mRecvBitrate.Clear();
+ }
+ if (mRecvFramerate.NumDataValues() > 0) {
+ Accumulate(WEBRTC_VIDEO_DECODER_FRAMERATE_AVG_PER_CALL,
+ static_cast<unsigned>(mRecvFramerate.Mean()));
+ Accumulate(
+ WEBRTC_VIDEO_DECODER_FRAMERATE_10X_STD_DEV_PER_CALL,
+ static_cast<unsigned>(mRecvFramerate.StandardDeviation() * 10));
+ mRecvFramerate.Clear();
+ }
+
+ mControl.mReceiving.DisconnectIfConnected();
+ mControl.mTransmitting.DisconnectIfConnected();
+ mControl.mLocalSsrcs.DisconnectIfConnected();
+ mControl.mLocalRtxSsrcs.DisconnectIfConnected();
+ mControl.mLocalCname.DisconnectIfConnected();
+ mControl.mMid.DisconnectIfConnected();
+ mControl.mRemoteSsrc.DisconnectIfConnected();
+ mControl.mRemoteRtxSsrc.DisconnectIfConnected();
+ mControl.mSyncGroup.DisconnectIfConnected();
+ mControl.mLocalRecvRtpExtensions.DisconnectIfConnected();
+ mControl.mLocalSendRtpExtensions.DisconnectIfConnected();
+ mControl.mSendCodec.DisconnectIfConnected();
+ mControl.mSendRtpRtcpConfig.DisconnectIfConnected();
+ mControl.mRecvCodecs.DisconnectIfConnected();
+ mControl.mRecvRtpRtcpConfig.DisconnectIfConnected();
+ mControl.mCodecMode.DisconnectIfConnected();
+ mWatchManager.Shutdown();
+
+ mCall->UnregisterConduit(this);
+ mDecoderFactory->DisconnectAll();
+ mEncoderFactory->DisconnectAll();
+ {
+ MutexAutoLock lock(mMutex);
+ DeleteSendStream();
+ DeleteRecvStream();
+ }
+
+ return GenericPromise::CreateAndResolve(true, __func__);
+ });
+}
+
+webrtc::VideoCodecMode WebrtcVideoConduit::CodecMode() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ return mControl.mCodecMode;
+}
+
+MediaConduitErrorCode WebrtcVideoConduit::AttachRenderer(
+ RefPtr<mozilla::VideoRenderer> aVideoRenderer) {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
+
+ // null renderer
+ if (!aVideoRenderer) {
+ CSFLogError(LOGTAG, "%s NULL Renderer", __FUNCTION__);
+ MOZ_ASSERT(false);
+ return kMediaConduitInvalidRenderer;
+ }
+
+ // This function is called only from main, so we only need to protect against
+ // modifying mRenderer while any webrtc.org code is trying to use it.
+ {
+ ReentrantMonitorAutoEnter enter(mRendererMonitor);
+ mRenderer = aVideoRenderer;
+ // Make sure the renderer knows the resolution
+ mRenderer->FrameSizeChange(mReceivingWidth, mReceivingHeight);
+ }
+
+ return kMediaConduitNoError;
+}
+
+void WebrtcVideoConduit::DetachRenderer() {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ ReentrantMonitorAutoEnter enter(mRendererMonitor);
+ if (mRenderer) {
+ mRenderer = nullptr;
+ }
+}
+
+rtc::RefCountedObject<mozilla::VideoStreamFactory>*
+WebrtcVideoConduit::CreateVideoStreamFactory() {
+ auto videoStreamFactory = mVideoStreamFactory.Lock();
+ *videoStreamFactory = new rtc::RefCountedObject<VideoStreamFactory>(
+ *mCurSendCodecConfig, mControl.mCodecMode, mMinBitrate, mStartBitrate,
+ mPrefMaxBitrate, mNegotiatedMaxBitrate, mVideoBroadcaster.wants(),
+ mLockScaling);
+ return videoStreamFactory->get();
+}
+
+void WebrtcVideoConduit::AddOrUpdateSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ if (!mRegisteredSinks.Contains(sink)) {
+ mRegisteredSinks.AppendElement(sink);
+ }
+ auto oldWants = mVideoBroadcaster.wants();
+ mVideoBroadcaster.AddOrUpdateSink(sink, wants);
+ if (oldWants != mVideoBroadcaster.wants()) {
+ mEncoderConfig.video_stream_factory = CreateVideoStreamFactory();
+ mSendStream->ReconfigureVideoEncoder(mEncoderConfig.Copy());
+ }
+}
+
+void WebrtcVideoConduit::RemoveSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ mRegisteredSinks.RemoveElement(sink);
+ auto oldWants = mVideoBroadcaster.wants();
+ mVideoBroadcaster.RemoveSink(sink);
+ if (oldWants != mVideoBroadcaster.wants()) {
+ mEncoderConfig.video_stream_factory = CreateVideoStreamFactory();
+ mSendStream->ReconfigureVideoEncoder(mEncoderConfig.Copy());
+ }
+}
+
+MediaConduitErrorCode WebrtcVideoConduit::SendVideoFrame(
+ webrtc::VideoFrame aFrame) {
+ // XXX Google uses a "timestamp_aligner" to translate timestamps from the
+ // camera via TranslateTimestamp(); we should look at doing the same. This
+ // avoids sampling error when capturing frames, but google had to deal with
+ // some broken cameras, include Logitech c920's IIRC.
+
+ {
+ MutexAutoLock lock(mMutex);
+ if (mSendStreamConfig.rtp.ssrcs.empty()) {
+ CSFLogVerbose(LOGTAG, "WebrtcVideoConduit %p %s No SSRC set", this,
+ __FUNCTION__);
+ return kMediaConduitNoError;
+ }
+ if (!mCurSendCodecConfig) {
+ CSFLogVerbose(LOGTAG, "WebrtcVideoConduit %p %s No send codec set", this,
+ __FUNCTION__);
+ return kMediaConduitNoError;
+ }
+
+ // Workaround for bug in libwebrtc where all encodings are transmitted
+ // if they are all inactive.
+ bool anyActive = false;
+ for (const auto& encoding : mCurSendCodecConfig->mEncodings) {
+ if (encoding.active) {
+ anyActive = true;
+ break;
+ }
+ }
+ if (!anyActive) {
+ CSFLogVerbose(LOGTAG, "WebrtcVideoConduit %p %s No active encodings",
+ this, __FUNCTION__);
+ return kMediaConduitNoError;
+ }
+
+ CSFLogVerbose(LOGTAG, "WebrtcVideoConduit %p %s (send SSRC %u (0x%x))",
+ this, __FUNCTION__, mSendStreamConfig.rtp.ssrcs.front(),
+ mSendStreamConfig.rtp.ssrcs.front());
+
+ if (aFrame.width() != mLastWidth || aFrame.height() != mLastHeight) {
+ // See if we need to recalculate what we're sending.
+ CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u",
+ __FUNCTION__, aFrame.width(), aFrame.height());
+ MOZ_ASSERT(aFrame.width() != 0 && aFrame.height() != 0);
+ // Note coverity will flag this since it thinks they can be 0
+ MOZ_ASSERT(mCurSendCodecConfig);
+
+ mLastWidth = aFrame.width();
+ mLastHeight = aFrame.height();
+ }
+
+ // adapt input video to wants of sink
+ if (!mVideoBroadcaster.frame_wanted()) {
+ return kMediaConduitNoError;
+ }
+
+ // Check if we need to drop this frame to meet a requested FPS
+ auto videoStreamFactory = mVideoStreamFactory.Lock();
+ auto& videoStreamFactoryRef = videoStreamFactory.ref();
+ if (videoStreamFactoryRef->ShouldDropFrame(aFrame)) {
+ return kMediaConduitNoError;
+ }
+ }
+
+ // If we have zero width or height, drop the frame here. Attempting to send
+ // it will cause all sorts of problems in the webrtc.org code.
+ if (aFrame.width() == 0 || aFrame.height() == 0) {
+ return kMediaConduitNoError;
+ }
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer =
+ aFrame.video_frame_buffer();
+
+ MOZ_ASSERT(!aFrame.color_space(), "Unexpected use of color space");
+ MOZ_ASSERT(!aFrame.has_update_rect(), "Unexpected use of update rect");
+
+#ifdef MOZ_REAL_TIME_TRACING
+ if (profiler_is_active()) {
+ MutexAutoLock lock(mMutex);
+ nsAutoCStringN<256> ssrcsCommaSeparated;
+ bool first = true;
+ for (auto ssrc : mSendStreamConfig.rtp.ssrcs) {
+ if (!first) {
+ ssrcsCommaSeparated.AppendASCII(", ");
+ } else {
+ first = false;
+ }
+ ssrcsCommaSeparated.AppendInt(ssrc);
+ }
+ // The first frame has a delta of zero.
+ uint64_t timestampDelta =
+ mLastTimestampSendUs.isSome()
+ ? aFrame.timestamp_us() - mLastTimestampSendUs.value()
+ : 0;
+ mLastTimestampSendUs = Some(aFrame.timestamp_us());
+ TRACE_COMMENT("VideoConduit::SendVideoFrame", "t-delta=%.1fms, ssrcs=%s",
+ timestampDelta / 1000.f, ssrcsCommaSeparated.get());
+ }
+#endif
+
+ mVideoBroadcaster.OnFrame(aFrame);
+
+ return kMediaConduitNoError;
+}
+
+// Transport Layer Callbacks
+
+void WebrtcVideoConduit::DeliverPacket(rtc::CopyOnWriteBuffer packet,
+ PacketType type) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ if (!mCall->Call()) {
+ return;
+ }
+
+ // Bug 1499796 - we need to get passed the time the packet was received
+ webrtc::PacketReceiver::DeliveryStatus status =
+ mCall->Call()->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
+ std::move(packet), -1);
+
+ if (status != webrtc::PacketReceiver::DELIVERY_OK) {
+ CSFLogError(LOGTAG, "%s DeliverPacket Failed for %s packet, %d",
+ __FUNCTION__, type == PacketType::RTP ? "RTP" : "RTCP", status);
+ }
+}
+
+void WebrtcVideoConduit::OnRtpReceived(MediaPacket&& aPacket,
+ webrtc::RTPHeader&& aHeader) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ mRemoteSendSSRC = aHeader.ssrc;
+
+ if (mAllowSsrcChange || mRecvStreamConfig.rtp.remote_ssrc == 0) {
+ bool switchRequired = mRecvStreamConfig.rtp.remote_ssrc != aHeader.ssrc;
+ if (switchRequired) {
+ // Handle the unknown ssrc (and ssrc-not-signaled case).
+
+ // We need to check that the newly received ssrc is not already
+ // associated with ulpfec or rtx. This is how webrtc.org handles
+ // things, see https://codereview.webrtc.org/1226093002.
+ const webrtc::VideoReceiveStreamInterface::Config::Rtp& rtp =
+ mRecvStreamConfig.rtp;
+ switchRequired =
+ rtp.rtx_associated_payload_types.find(aHeader.payloadType) ==
+ rtp.rtx_associated_payload_types.end() &&
+ rtp.ulpfec_payload_type != aHeader.payloadType;
+ }
+
+ if (switchRequired) {
+ CSFLogInfo(LOGTAG, "VideoConduit %p: Switching remote SSRC from %u to %u",
+ this, mRecvStreamConfig.rtp.remote_ssrc, aHeader.ssrc);
+ SetRemoteSSRCAndRestartAsNeeded(aHeader.ssrc, 0);
+ }
+ }
+
+ CSFLogVerbose(
+ LOGTAG,
+ "VideoConduit %p: Received RTP packet, seq# %u, len %zu, SSRC %u (0x%x) ",
+ this, (uint16_t)ntohs(((uint16_t*)aPacket.data())[1]), aPacket.len(),
+ (uint32_t)ntohl(((uint32_t*)aPacket.data())[2]),
+ (uint32_t)ntohl(((uint32_t*)aPacket.data())[2]));
+
+ DeliverPacket(rtc::CopyOnWriteBuffer(aPacket.data(), aPacket.len()),
+ PacketType::RTP);
+}
+
+void WebrtcVideoConduit::OnRtcpReceived(MediaPacket&& aPacket) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ CSFLogVerbose(LOGTAG, "VideoConduit %p: Received RTCP Packet, len %zu ", this,
+ aPacket.len());
+
+ DeliverPacket(rtc::CopyOnWriteBuffer(aPacket.data(), aPacket.len()),
+ PacketType::RTCP);
+}
+
+Maybe<uint16_t> WebrtcVideoConduit::RtpSendBaseSeqFor(uint32_t aSsrc) const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ auto it = mRtpSendBaseSeqs.find(aSsrc);
+ if (it == mRtpSendBaseSeqs.end()) {
+ return Nothing();
+ }
+ return Some(it->second);
+}
+
+const dom::RTCStatsTimestampMaker& WebrtcVideoConduit::GetTimestampMaker()
+ const {
+ return mCall->GetTimestampMaker();
+}
+
+void WebrtcVideoConduit::StopTransmitting() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertNotCurrentThreadOwns();
+
+ if (!mEngineTransmitting) {
+ return;
+ }
+
+ if (mSendStream) {
+ CSFLogDebug(LOGTAG, "%s Stopping send stream", __FUNCTION__);
+ mSendStream->Stop();
+ }
+
+ mEngineTransmitting = false;
+}
+
+void WebrtcVideoConduit::StartTransmitting() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mSendStream);
+ mMutex.AssertNotCurrentThreadOwns();
+
+ if (mEngineTransmitting) {
+ return;
+ }
+
+ CSFLogDebug(LOGTAG, "%s Starting send stream", __FUNCTION__);
+
+ mSendStream->Start();
+ // XXX File a bug to consider hooking this up to the state of mtransport
+ mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO,
+ webrtc::kNetworkUp);
+ mEngineTransmitting = true;
+}
+
+void WebrtcVideoConduit::StopReceiving() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mMutex.AssertNotCurrentThreadOwns();
+
+ // Are we receiving already? If so, stop receiving and playout
+ // since we can't apply new recv codec when the engine is playing.
+ if (!mEngineReceiving) {
+ return;
+ }
+
+ if (mRecvStream) {
+ CSFLogDebug(LOGTAG, "%s Stopping receive stream", __FUNCTION__);
+ mRecvStream->Stop();
+ }
+
+ mEngineReceiving = false;
+}
+
+void WebrtcVideoConduit::StartReceiving() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(mRecvStream);
+ mMutex.AssertNotCurrentThreadOwns();
+
+ if (mEngineReceiving) {
+ return;
+ }
+
+ CSFLogDebug(LOGTAG, "%s Starting receive stream (SSRC %u (0x%x))",
+ __FUNCTION__, mRecvStreamConfig.rtp.remote_ssrc,
+ mRecvStreamConfig.rtp.remote_ssrc);
+ // Start Receiving on the video engine
+ mRecvStream->Start();
+
+ // XXX File a bug to consider hooking this up to the state of mtransport
+ mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO,
+ webrtc::kNetworkUp);
+ mEngineReceiving = true;
+}
+
+bool WebrtcVideoConduit::SendRtp(const uint8_t* aData, size_t aLength,
+ const webrtc::PacketOptions& aOptions) {
+ MOZ_ASSERT(aLength >= 12);
+ const uint16_t seqno = ntohs(*((uint16_t*)&aData[2]));
+ const uint32_t ssrc = ntohl(*((uint32_t*)&aData[8]));
+
+ CSFLogVerbose(
+ LOGTAG,
+ "VideoConduit %p: Sending RTP Packet seq# %u, len %zu, SSRC %u (0x%x)",
+ this, seqno, aLength, ssrc, ssrc);
+
+ if (!mTransportActive) {
+ CSFLogError(LOGTAG, "VideoConduit %p: RTP Packet Send Failed", this);
+ return false;
+ }
+
+ MediaPacket packet;
+ packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION);
+ packet.SetType(MediaPacket::RTP);
+ mSenderRtpSendEvent.Notify(std::move(packet));
+
+ // Parse the sequence number of the first rtp packet as base_seq.
+ const auto inserted = mRtpSendBaseSeqs_n.insert({ssrc, seqno}).second;
+
+ if (inserted || aOptions.packet_id >= 0) {
+ int64_t now_ms = PR_Now() / 1000;
+ MOZ_ALWAYS_SUCCEEDS(mCallThread->Dispatch(NS_NewRunnableFunction(
+ __func__, [this, self = RefPtr<WebrtcVideoConduit>(this),
+ packet_id = aOptions.packet_id, now_ms, ssrc, seqno] {
+ mRtpSendBaseSeqs.insert({ssrc, seqno});
+ if (packet_id >= 0) {
+ if (mCall->Call()) {
+ // TODO: This notification should ideally happen after the
+ // transport layer has sent the packet on the wire.
+ mCall->Call()->OnSentPacket({packet_id, now_ms});
+ }
+ }
+ })));
+ }
+ return true;
+}
+
+bool WebrtcVideoConduit::SendSenderRtcp(const uint8_t* aData, size_t aLength) {
+ CSFLogVerbose(
+ LOGTAG,
+ "VideoConduit %p: Sending RTCP SR Packet, len %zu, SSRC %u (0x%x)", this,
+ aLength, (uint32_t)ntohl(*((uint32_t*)&aData[4])),
+ (uint32_t)ntohl(*((uint32_t*)&aData[4])));
+
+ if (!mTransportActive) {
+ CSFLogError(LOGTAG, "VideoConduit %p: RTCP SR Packet Send Failed", this);
+ return false;
+ }
+
+ MediaPacket packet;
+ packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION);
+ packet.SetType(MediaPacket::RTCP);
+ mSenderRtcpSendEvent.Notify(std::move(packet));
+ return true;
+}
+
+bool WebrtcVideoConduit::SendReceiverRtcp(const uint8_t* aData,
+ size_t aLength) {
+ CSFLogVerbose(
+ LOGTAG,
+ "VideoConduit %p: Sending RTCP RR Packet, len %zu, SSRC %u (0x%x)", this,
+ aLength, (uint32_t)ntohl(*((uint32_t*)&aData[4])),
+ (uint32_t)ntohl(*((uint32_t*)&aData[4])));
+
+ if (!mTransportActive) {
+ CSFLogError(LOGTAG, "VideoConduit %p: RTCP RR Packet Send Failed", this);
+ return false;
+ }
+
+ MediaPacket packet;
+ packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION);
+ packet.SetType(MediaPacket::RTCP);
+ mReceiverRtcpSendEvent.Notify(std::move(packet));
+ return true;
+}
+
+void WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame) {
+ const uint32_t localRecvSsrc = mRecvSSRC;
+ const uint32_t remoteSendSsrc = mRemoteSendSSRC;
+
+ CSFLogVerbose(
+ LOGTAG,
+ "VideoConduit %p: Rendering frame, Remote SSRC %u (0x%x), size %ux%u",
+ this, static_cast<uint32_t>(remoteSendSsrc),
+ static_cast<uint32_t>(remoteSendSsrc), video_frame.width(),
+ video_frame.height());
+ ReentrantMonitorAutoEnter enter(mRendererMonitor);
+
+ if (!mRenderer) {
+ CSFLogError(LOGTAG, "VideoConduit %p: Cannot render frame, no renderer",
+ this);
+ return;
+ }
+
+ bool needsNewHistoryElement = mReceivedFrameHistory.mEntries.IsEmpty();
+
+ if (mReceivingWidth != video_frame.width() ||
+ mReceivingHeight != video_frame.height()) {
+ mReceivingWidth = video_frame.width();
+ mReceivingHeight = video_frame.height();
+ mRenderer->FrameSizeChange(mReceivingWidth, mReceivingHeight);
+ needsNewHistoryElement = true;
+ }
+
+ if (!needsNewHistoryElement) {
+ auto& currentEntry = mReceivedFrameHistory.mEntries.LastElement();
+ needsNewHistoryElement =
+ currentEntry.mRotationAngle !=
+ static_cast<unsigned long>(video_frame.rotation()) ||
+ currentEntry.mLocalSsrc != localRecvSsrc ||
+ currentEntry.mRemoteSsrc != remoteSendSsrc;
+ }
+
+ // Record frame history
+ const auto historyNow = mCall->GetTimestampMaker().GetNow();
+ if (needsNewHistoryElement) {
+ dom::RTCVideoFrameHistoryEntryInternal frameHistoryElement;
+ frameHistoryElement.mConsecutiveFrames = 0;
+ frameHistoryElement.mWidth = video_frame.width();
+ frameHistoryElement.mHeight = video_frame.height();
+ frameHistoryElement.mRotationAngle =
+ static_cast<unsigned long>(video_frame.rotation());
+ frameHistoryElement.mFirstFrameTimestamp = historyNow;
+ frameHistoryElement.mLocalSsrc = localRecvSsrc;
+ frameHistoryElement.mRemoteSsrc = remoteSendSsrc;
+ if (!mReceivedFrameHistory.mEntries.AppendElement(frameHistoryElement,
+ fallible)) {
+ mozalloc_handle_oom(0);
+ }
+ }
+ auto& currentEntry = mReceivedFrameHistory.mEntries.LastElement();
+
+ currentEntry.mConsecutiveFrames++;
+ currentEntry.mLastFrameTimestamp = historyNow;
+ // Attempt to retrieve an timestamp encoded in the image pixels if enabled.
+ if (mVideoLatencyTestEnable && mReceivingWidth && mReceivingHeight) {
+ uint64_t now = PR_Now();
+ uint64_t timestamp = 0;
+ uint8_t* data = const_cast<uint8_t*>(
+ video_frame.video_frame_buffer()->GetI420()->DataY());
+ bool ok = YuvStamper::Decode(
+ mReceivingWidth, mReceivingHeight, mReceivingWidth, data,
+ reinterpret_cast<unsigned char*>(&timestamp), sizeof(timestamp), 0, 0);
+ if (ok) {
+ VideoLatencyUpdate(now - timestamp);
+ }
+ }
+#ifdef MOZ_REAL_TIME_TRACING
+ if (profiler_is_active()) {
+ MutexAutoLock lock(mMutex);
+ // The first frame has a delta of zero.
+ uint32_t rtpTimestamp = video_frame.timestamp();
+ uint32_t timestampDelta =
+ mLastRTPTimestampReceive.isSome()
+ ? rtpTimestamp - mLastRTPTimestampReceive.value()
+ : 0;
+ mLastRTPTimestampReceive = Some(rtpTimestamp);
+ TRACE_COMMENT("VideoConduit::OnFrame", "t-delta=%.1fms, ssrc=%u",
+ timestampDelta * 1000.f / webrtc::kVideoPayloadTypeFrequency,
+ localRecvSsrc);
+ }
+#endif
+
+ mRenderer->RenderVideoFrame(*video_frame.video_frame_buffer(),
+ video_frame.timestamp(),
+ video_frame.render_time_ms());
+}
+
+bool WebrtcVideoConduit::AddFrameHistory(
+ dom::Sequence<dom::RTCVideoFrameHistoryInternal>* outHistories) const {
+ ReentrantMonitorAutoEnter enter(mRendererMonitor);
+ if (!outHistories->AppendElement(mReceivedFrameHistory, fallible)) {
+ mozalloc_handle_oom(0);
+ return false;
+ }
+ return true;
+}
+
+void WebrtcVideoConduit::DumpCodecDB() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ for (const auto& entry : mControl.mConfiguredRecvCodecs) {
+ CSFLogDebug(LOGTAG, "Payload Name: %s", entry.mName.c_str());
+ CSFLogDebug(LOGTAG, "Payload Type: %d", entry.mType);
+ CSFLogDebug(LOGTAG, "Payload Max Frame Size: %d",
+ entry.mEncodingConstraints.maxFs);
+ if (entry.mEncodingConstraints.maxFps.isSome()) {
+ CSFLogDebug(LOGTAG, "Payload Max Frame Rate: %f",
+ *entry.mEncodingConstraints.maxFps);
+ }
+ }
+}
+
+void WebrtcVideoConduit::VideoLatencyUpdate(uint64_t aNewSample) {
+ mRendererMonitor.AssertCurrentThreadIn();
+
+ mVideoLatencyAvg =
+ (sRoundingPadding * aNewSample + sAlphaNum * mVideoLatencyAvg) /
+ sAlphaDen;
+}
+
+uint64_t WebrtcVideoConduit::MozVideoLatencyAvg() {
+ mRendererMonitor.AssertCurrentThreadIn();
+
+ return mVideoLatencyAvg / sRoundingPadding;
+}
+
+void WebrtcVideoConduit::CollectTelemetryData() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ if (mEngineTransmitting) {
+ webrtc::VideoSendStream::Stats stats = mSendStream->GetStats();
+ mSendBitrate.Push(stats.media_bitrate_bps);
+ mSendFramerate.Push(stats.encode_frame_rate);
+ }
+ if (mEngineReceiving) {
+ webrtc::VideoReceiveStreamInterface::Stats stats = mRecvStream->GetStats();
+ mRecvBitrate.Push(stats.total_bitrate_bps);
+ mRecvFramerate.Push(stats.decode_frame_rate);
+ }
+}
+
+void WebrtcVideoConduit::OnRtcpBye() { mRtcpByeEvent.Notify(); }
+
+void WebrtcVideoConduit::OnRtcpTimeout() { mRtcpTimeoutEvent.Notify(); }
+
+std::vector<webrtc::RtpSource> WebrtcVideoConduit::GetUpstreamRtpSources()
+ const {
+ MOZ_ASSERT(NS_IsMainThread());
+ std::vector<webrtc::RtpSource> sources;
+ {
+ MutexAutoLock lock(mMutex);
+ if (mRecvStream) {
+ sources = mRecvStream->GetSources();
+ }
+ }
+ return sources;
+}
+
+bool WebrtcVideoConduit::HasCodecPluginID(uint64_t aPluginID) const {
+ MOZ_ASSERT(NS_IsMainThread());
+
+ return mSendCodecPluginIDs.Contains(aPluginID) ||
+ mRecvCodecPluginIDs.Contains(aPluginID);
+}
+
+bool WebrtcVideoConduit::HasH264Hardware() {
+ nsCOMPtr<nsIGfxInfo> gfxInfo = do_GetService("@mozilla.org/gfx/info;1");
+ if (!gfxInfo) {
+ return false;
+ }
+ int32_t status;
+ nsCString discardFailureId;
+ return NS_SUCCEEDED(gfxInfo->GetFeatureStatus(
+ nsIGfxInfo::FEATURE_WEBRTC_HW_ACCELERATION_H264, discardFailureId,
+ &status)) &&
+ status == nsIGfxInfo::FEATURE_STATUS_OK;
+}
+
+Maybe<int> WebrtcVideoConduit::ActiveSendPayloadType() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ if (!mSendStream) {
+ return Nothing();
+ }
+
+ if (mSendStreamConfig.rtp.payload_type == -1) {
+ return Nothing();
+ }
+
+ return Some(mSendStreamConfig.rtp.payload_type);
+}
+
+Maybe<int> WebrtcVideoConduit::ActiveRecvPayloadType() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+
+ auto stats = GetReceiverStats();
+ if (!stats) {
+ return Nothing();
+ }
+
+ if (stats->current_payload_type == -1) {
+ return Nothing();
+ }
+
+ return Some(stats->current_payload_type);
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/VideoConduit.h b/dom/media/webrtc/libwebrtcglue/VideoConduit.h
new file mode 100644
index 0000000000..d9342df623
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/VideoConduit.h
@@ -0,0 +1,505 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef VIDEO_SESSION_H_
+#define VIDEO_SESSION_H_
+
+#include "mozilla/Atomics.h"
+#include "mozilla/Attributes.h"
+#include "mozilla/DataMutex.h"
+#include "mozilla/ReentrantMonitor.h"
+#include "mozilla/SharedThreadPool.h"
+#include "mozilla/StateMirroring.h"
+#include "mozilla/UniquePtr.h"
+#include "nsITimer.h"
+
+#include "MediaConduitInterface.h"
+#include "common/MediaEngineWrapper.h"
+#include "RtpRtcpConfig.h"
+#include "RunningStat.h"
+#include "transport/runnable_utils.h"
+
+// conflicts with #include of scoped_ptr.h
+#undef FF
+// Video Engine Includes
+#include "api/video_codecs/video_decoder.h"
+#include "api/video_codecs/video_encoder.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "call/call_basic_stats.h"
+#include "common_video/include/video_frame_buffer_pool.h"
+#include "media/base/video_broadcaster.h"
+#include <functional>
+#include <memory>
+/** This file hosts several structures identifying different aspects
+ * of a RTP Session.
+ */
+
+namespace mozilla {
+
+// Convert (SI) kilobits/sec to (SI) bits/sec
+#define KBPS(kbps) kbps * 1000
+
+const int kViEMinCodecBitrate_bps = KBPS(30);
+const unsigned int kVideoMtu = 1200;
+const int kQpMax = 56;
+
+template <typename T>
+T MinIgnoreZero(const T& a, const T& b) {
+ return std::min(a ? a : b, b ? b : a);
+}
+
+class VideoStreamFactory;
+class WebrtcAudioConduit;
+
+// Interface of external video encoder for WebRTC.
+class WebrtcVideoEncoder : public VideoEncoder, public webrtc::VideoEncoder {};
+
+// Interface of external video decoder for WebRTC.
+class WebrtcVideoDecoder : public VideoDecoder, public webrtc::VideoDecoder {};
+
+/**
+ * Concrete class for Video session. Hooks up
+ * - media-source and target to external transport
+ */
+class WebrtcVideoConduit
+ : public VideoSessionConduit,
+ public webrtc::RtcpEventObserver,
+ public rtc::VideoSinkInterface<webrtc::VideoFrame>,
+ public rtc::VideoSourceInterface<webrtc::VideoFrame> {
+ public:
+ // Returns true when both encoder and decoder are HW accelerated.
+ static bool HasH264Hardware();
+
+ Maybe<int> ActiveSendPayloadType() const override;
+ Maybe<int> ActiveRecvPayloadType() const override;
+
+ /**
+ * Function to attach Renderer end-point for the Media-Video conduit.
+ * @param aRenderer : Reference to the concrete mozilla Video renderer
+ * implementation Note: Multiple invocations of this API shall remove an
+ * existing renderer and attaches the new to the Conduit.
+ */
+ MediaConduitErrorCode AttachRenderer(
+ RefPtr<mozilla::VideoRenderer> aVideoRenderer) override;
+ void DetachRenderer() override;
+
+ Maybe<uint16_t> RtpSendBaseSeqFor(uint32_t aSsrc) const override;
+
+ const dom::RTCStatsTimestampMaker& GetTimestampMaker() const override;
+
+ void StopTransmitting();
+ void StartTransmitting();
+ void StopReceiving();
+ void StartReceiving();
+
+ /**
+ * Function to deliver a capture video frame for encoding and transport.
+ * If the frame's timestamp is 0, it will be automatically generated.
+ *
+ * NOTE: ConfigureSendMediaCodec() must be called before this function can
+ * be invoked. This ensures the inserted video-frames can be
+ * transmitted by the conduit.
+ */
+ MediaConduitErrorCode SendVideoFrame(webrtc::VideoFrame aFrame) override;
+
+ bool SendRtp(const uint8_t* aData, size_t aLength,
+ const webrtc::PacketOptions& aOptions) override;
+ bool SendSenderRtcp(const uint8_t* aData, size_t aLength) override;
+ bool SendReceiverRtcp(const uint8_t* aData, size_t aLength) override;
+
+ /*
+ * webrtc:VideoSinkInterface implementation
+ * -------------------------------
+ */
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ /*
+ * webrtc:VideoSourceInterface implementation
+ * -------------------------------
+ */
+ void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ bool HasCodecPluginID(uint64_t aPluginID) const override;
+
+ RefPtr<GenericPromise> Shutdown() override;
+
+ bool Denoising() const { return mDenoising; }
+
+ uint8_t SpatialLayers() const { return mSpatialLayers; }
+
+ uint8_t TemporalLayers() const { return mTemporalLayers; }
+
+ webrtc::VideoCodecMode CodecMode() const;
+
+ WebrtcVideoConduit(RefPtr<WebrtcCallWrapper> aCall,
+ nsCOMPtr<nsISerialEventTarget> aStsThread,
+ Options aOptions, std::string aPCHandle,
+ const TrackingId& aRecvTrackingId);
+ virtual ~WebrtcVideoConduit();
+
+ // Call thread.
+ void InitControl(VideoConduitControlInterface* aControl) override;
+
+ // Called when a parameter in mControl has changed. Call thread.
+ void OnControlConfigChange();
+
+ // Necessary Init steps on main thread.
+ MediaConduitErrorCode Init();
+
+ Ssrcs GetLocalSSRCs() const override;
+ Maybe<Ssrc> GetRemoteSSRC() const override;
+
+ // Call thread.
+ void UnsetRemoteSSRC(uint32_t aSsrc) override;
+
+ static unsigned ToLibwebrtcMaxFramerate(const Maybe<double>& aMaxFramerate);
+
+ private:
+ void NotifyUnsetCurrentRemoteSSRC();
+ void SetRemoteSSRCConfig(uint32_t aSsrc, uint32_t aRtxSsrc);
+ void SetRemoteSSRCAndRestartAsNeeded(uint32_t aSsrc, uint32_t aRtxSsrc);
+ rtc::RefCountedObject<mozilla::VideoStreamFactory>*
+ CreateVideoStreamFactory();
+
+ public:
+ // Creating a recv stream or a send stream requires a local ssrc to be
+ // configured. This method will generate one if needed.
+ void EnsureLocalSSRC();
+ // Creating a recv stream requires a remote ssrc to be configured. This method
+ // will generate one if needed.
+ void EnsureRemoteSSRC();
+
+ Maybe<webrtc::VideoReceiveStreamInterface::Stats> GetReceiverStats()
+ const override;
+ Maybe<webrtc::VideoSendStream::Stats> GetSenderStats() const override;
+ Maybe<webrtc::CallBasicStats> GetCallStats() const override;
+
+ bool AddFrameHistory(dom::Sequence<dom::RTCVideoFrameHistoryInternal>*
+ outHistories) const override;
+
+ uint64_t MozVideoLatencyAvg();
+
+ void DisableSsrcChanges() override {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mAllowSsrcChange = false;
+ }
+
+ void CollectTelemetryData() override;
+
+ void OnRtpReceived(MediaPacket&& aPacket, webrtc::RTPHeader&& aHeader);
+ void OnRtcpReceived(MediaPacket&& aPacket);
+
+ void OnRtcpBye() override;
+ void OnRtcpTimeout() override;
+
+ void SetTransportActive(bool aActive) override {
+ mTransportActive = aActive;
+ if (!aActive) {
+ mReceiverRtpEventListener.DisconnectIfExists();
+ mReceiverRtcpEventListener.DisconnectIfExists();
+ mSenderRtcpEventListener.DisconnectIfExists();
+ }
+ }
+ MediaEventSourceExc<MediaPacket>& SenderRtpSendEvent() override {
+ return mSenderRtpSendEvent;
+ }
+ MediaEventSourceExc<MediaPacket>& SenderRtcpSendEvent() override {
+ return mSenderRtcpSendEvent;
+ }
+ MediaEventSourceExc<MediaPacket>& ReceiverRtcpSendEvent() override {
+ return mReceiverRtcpSendEvent;
+ }
+ void ConnectReceiverRtpEvent(
+ MediaEventSourceExc<MediaPacket, webrtc::RTPHeader>& aEvent) override {
+ // Hold a strong-ref to `this` for safety, since we'll be disconnecting
+ // off-target.
+ mReceiverRtpEventListener = aEvent.Connect(
+ mCallThread, [this, self = RefPtr<WebrtcVideoConduit>(this)](
+ MediaPacket aPacket, webrtc::RTPHeader aHeader) {
+ OnRtpReceived(std::move(aPacket), std::move(aHeader));
+ });
+ }
+ void ConnectReceiverRtcpEvent(
+ MediaEventSourceExc<MediaPacket>& aEvent) override {
+ // Hold a strong-ref to `this` for safety, since we'll be disconnecting
+ // off-target.
+ mReceiverRtcpEventListener = aEvent.Connect(
+ mCallThread,
+ [this, self = RefPtr<WebrtcVideoConduit>(this)](MediaPacket aPacket) {
+ OnRtcpReceived(std::move(aPacket));
+ });
+ }
+ void ConnectSenderRtcpEvent(
+ MediaEventSourceExc<MediaPacket>& aEvent) override {
+ // Hold a strong-ref to `this` for safety, since we'll be disconnecting
+ // off-target.
+ mSenderRtcpEventListener = aEvent.Connect(
+ mCallThread,
+ [this, self = RefPtr<WebrtcVideoConduit>(this)](MediaPacket aPacket) {
+ OnRtcpReceived(std::move(aPacket));
+ });
+ }
+
+ std::vector<webrtc::RtpSource> GetUpstreamRtpSources() const override;
+
+ private:
+ // Don't allow copying/assigning.
+ WebrtcVideoConduit(const WebrtcVideoConduit&) = delete;
+ void operator=(const WebrtcVideoConduit&) = delete;
+
+ // Utility function to dump recv codec database
+ void DumpCodecDB() const;
+
+ // Video Latency Test averaging filter
+ void VideoLatencyUpdate(uint64_t aNewSample);
+
+ void CreateSendStream();
+ void DeleteSendStream();
+ void CreateRecvStream();
+ void DeleteRecvStream();
+
+ void DeliverPacket(rtc::CopyOnWriteBuffer packet, PacketType type) override;
+
+ MediaEventSource<void>& RtcpByeEvent() override { return mRtcpByeEvent; }
+ MediaEventSource<void>& RtcpTimeoutEvent() override {
+ return mRtcpTimeoutEvent;
+ }
+
+ bool RequiresNewSendStream(const VideoCodecConfig& newConfig) const;
+
+ mutable mozilla::ReentrantMonitor mRendererMonitor MOZ_UNANNOTATED;
+
+ // Accessed on any thread under mRendererMonitor.
+ RefPtr<mozilla::VideoRenderer> mRenderer;
+
+ // Accessed on any thread under mRendererMonitor.
+ unsigned short mReceivingWidth = 0;
+
+ // Accessed on any thread under mRendererMonitor.
+ unsigned short mReceivingHeight = 0;
+
+ // Call worker thread. All access to mCall->Call() happens here.
+ const nsCOMPtr<nsISerialEventTarget> mCallThread;
+
+ // Socket transport service thread that runs stats queries against us. Any
+ // thread.
+ const nsCOMPtr<nsISerialEventTarget> mStsThread;
+
+ // Thread on which we are fed video frames. Set lazily on first call to
+ // SendVideoFrame().
+ nsCOMPtr<nsISerialEventTarget> mFrameSendingThread;
+
+ struct Control {
+ // Mirrors that map to VideoConduitControlInterface for control. Call thread
+ // only.
+ Mirror<bool> mReceiving;
+ Mirror<bool> mTransmitting;
+ Mirror<Ssrcs> mLocalSsrcs;
+ Mirror<Ssrcs> mLocalRtxSsrcs;
+ Mirror<std::string> mLocalCname;
+ Mirror<std::string> mMid;
+ Mirror<Ssrc> mRemoteSsrc;
+ Mirror<Ssrc> mRemoteRtxSsrc;
+ Mirror<std::string> mSyncGroup;
+ Mirror<RtpExtList> mLocalRecvRtpExtensions;
+ Mirror<RtpExtList> mLocalSendRtpExtensions;
+ Mirror<Maybe<VideoCodecConfig>> mSendCodec;
+ Mirror<Maybe<RtpRtcpConfig>> mSendRtpRtcpConfig;
+ Mirror<std::vector<VideoCodecConfig>> mRecvCodecs;
+ Mirror<Maybe<RtpRtcpConfig>> mRecvRtpRtcpConfig;
+ Mirror<webrtc::VideoCodecMode> mCodecMode;
+
+ // For caching mRemoteSsrc and mRemoteRtxSsrc, since another caller may
+ // change the remote ssrc in the stream config directly.
+ Ssrc mConfiguredRemoteSsrc = 0;
+ Ssrc mConfiguredRemoteRtxSsrc = 0;
+ // For tracking changes to mSendCodec and mSendRtpRtcpConfig.
+ Maybe<VideoCodecConfig> mConfiguredSendCodec;
+ Maybe<RtpRtcpConfig> mConfiguredSendRtpRtcpConfig;
+ // For tracking changes to mRecvCodecs and mRecvRtpRtcpConfig.
+ std::vector<VideoCodecConfig> mConfiguredRecvCodecs;
+ Maybe<RtpRtcpConfig> mConfiguredRecvRtpRtcpConfig;
+
+ Control() = delete;
+ explicit Control(const RefPtr<AbstractThread>& aCallThread);
+ } mControl;
+
+ // WatchManager allowing Mirrors and other watch targets to trigger functions
+ // that will update the webrtc.org configuration.
+ WatchManager<WebrtcVideoConduit> mWatchManager;
+
+ mutable Mutex mMutex MOZ_UNANNOTATED;
+
+ // Decoder factory used by mRecvStream when it needs new decoders. This is
+ // not shared broader like some state in the WebrtcCallWrapper because it
+ // handles CodecPluginID plumbing tied to this VideoConduit.
+ const UniquePtr<WebrtcVideoDecoderFactory> mDecoderFactory;
+
+ // Encoder factory used by mSendStream when it needs new encoders. This is
+ // not shared broader like some state in the WebrtcCallWrapper because it
+ // handles CodecPluginID plumbing tied to this VideoConduit.
+ const UniquePtr<WebrtcVideoEncoderFactory> mEncoderFactory;
+
+ // Our own record of the sinks added to mVideoBroadcaster so we can support
+ // dispatching updates to sinks from off-Call-thread. Call thread only.
+ AutoTArray<rtc::VideoSinkInterface<webrtc::VideoFrame>*, 1> mRegisteredSinks;
+
+ // Broadcaster that distributes our frames to all registered sinks.
+ // Threadsafe.
+ rtc::VideoBroadcaster mVideoBroadcaster;
+
+ // Buffer pool used for scaling frames.
+ // Accessed on the frame-feeding thread only.
+ webrtc::VideoFrameBufferPool mBufferPool;
+
+ // Engine state we are concerned with. Written on the Call thread and read
+ // anywhere.
+ mozilla::Atomic<bool>
+ mEngineTransmitting; // If true ==> Transmit Subsystem is up and running
+ mozilla::Atomic<bool>
+ mEngineReceiving; // if true ==> Receive Subsystem up and running
+
+ // Written only on the Call thread. Guarded by mMutex, except for reads on the
+ // Call thread.
+ Maybe<VideoCodecConfig> mCurSendCodecConfig;
+
+ // Bookkeeping of stats for telemetry. Call thread only.
+ RunningStat mSendFramerate;
+ RunningStat mSendBitrate;
+ RunningStat mRecvFramerate;
+ RunningStat mRecvBitrate;
+
+ // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete this.
+ // Written only on the Call thread. Guarded by mMutex, except for reads on the
+ // Call thread.
+ webrtc::VideoReceiveStreamInterface* mRecvStream = nullptr;
+
+ // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete this.
+ webrtc::VideoSendStream* mSendStream = nullptr;
+
+ // Written on the frame feeding thread.
+ // Guarded by mMutex, except for reads on the frame feeding thread.
+ unsigned short mLastWidth = 0;
+
+ // Written on the frame feeding thread.
+ // Guarded by mMutex, except for reads on the frame feeding thread.
+ unsigned short mLastHeight = 0;
+
+ // Written on the frame feeding thread, the timestamp of the last frame on the
+ // send side, in microseconds. This is a local timestamp using the system
+ // clock with a unspecified epoch (Like mozilla::TimeStamp).
+ // Guarded by mMutex.
+ Maybe<uint64_t> mLastTimestampSendUs;
+
+ // Written on the frame receive thread, the rtp timestamp of the last frame
+ // on the receive side, in 90kHz base. This comes from the RTP packet.
+ // Guarded by mMutex.
+ Maybe<uint32_t> mLastRTPTimestampReceive;
+
+ // Accessed from any thread under mRendererMonitor.
+ uint64_t mVideoLatencyAvg = 0;
+
+ const bool mVideoLatencyTestEnable;
+
+ // All in bps.
+ const int mMinBitrate;
+ const int mStartBitrate;
+ const int mPrefMaxBitrate;
+ const int mMinBitrateEstimate;
+
+ // Max bitrate in bps as provided by negotiation. Call thread only.
+ int mNegotiatedMaxBitrate = 0;
+
+ // Set to true to force denoising on.
+ const bool mDenoising;
+
+ // Set to true to ignore sink wants (scaling due to bwe and cpu usage).
+ const bool mLockScaling;
+
+ const uint8_t mSpatialLayers;
+ const uint8_t mTemporalLayers;
+
+ static const unsigned int sAlphaNum = 7;
+ static const unsigned int sAlphaDen = 8;
+ static const unsigned int sRoundingPadding = 1024;
+
+ // WEBRTC.ORG Call API
+ // Const so can be accessed on any thread. All methods are called on the Call
+ // thread.
+ const RefPtr<WebrtcCallWrapper> mCall;
+
+ // Set up in the ctor and then not touched. Called through by the streams on
+ // any thread. Safe since we own and control the lifetime of the streams.
+ WebrtcSendTransport mSendTransport;
+ WebrtcReceiveTransport mRecvTransport;
+
+ // Written only on the Call thread. Guarded by mMutex, except for reads on the
+ // Call thread. Typical non-Call thread access is on the frame delivery
+ // thread.
+ webrtc::VideoSendStream::Config mSendStreamConfig;
+
+ // Call thread only.
+ webrtc::VideoEncoderConfig mEncoderConfig;
+
+ // Written only on the Call thread. Guarded by mMutex, except for reads on the
+ // Call thread. Calls can happen under mMutex on any thread.
+ DataMutex<RefPtr<rtc::RefCountedObject<VideoStreamFactory>>>
+ mVideoStreamFactory;
+
+ // Call thread only.
+ webrtc::VideoReceiveStreamInterface::Config mRecvStreamConfig;
+
+ // Are SSRC changes without signaling allowed or not.
+ // Call thread only.
+ bool mAllowSsrcChange = true;
+
+ // Accessed during configuration/signaling (Call thread), and on the frame
+ // delivery thread for frame history tracking. Set only on the Call thread.
+ Atomic<uint32_t> mRecvSSRC =
+ Atomic<uint32_t>(0); // this can change during a stream!
+
+ // Accessed from both the STS and frame delivery thread for frame history
+ // tracking. Set when receiving packets.
+ Atomic<uint32_t> mRemoteSendSSRC =
+ Atomic<uint32_t>(0); // this can change during a stream!
+
+ // Main thread only
+ nsTArray<uint64_t> mSendCodecPluginIDs;
+ // Main thread only
+ nsTArray<uint64_t> mRecvCodecPluginIDs;
+
+ // Main thread only
+ MediaEventListener mSendPluginCreated;
+ MediaEventListener mSendPluginReleased;
+ MediaEventListener mRecvPluginCreated;
+ MediaEventListener mRecvPluginReleased;
+
+ // Call thread only. ssrc -> base_seq
+ std::map<uint32_t, uint16_t> mRtpSendBaseSeqs;
+ // libwebrtc network thread only. ssrc -> base_seq.
+ // To track changes needed to mRtpSendBaseSeqs.
+ std::map<uint32_t, uint16_t> mRtpSendBaseSeqs_n;
+
+ // Tracking the attributes of received frames over time
+ // Protected by mRendererMonitor
+ dom::RTCVideoFrameHistoryInternal mReceivedFrameHistory;
+
+ // Thread safe
+ Atomic<bool> mTransportActive = Atomic<bool>(false);
+ MediaEventProducer<void> mRtcpByeEvent;
+ MediaEventProducer<void> mRtcpTimeoutEvent;
+ MediaEventProducerExc<MediaPacket> mSenderRtpSendEvent;
+ MediaEventProducerExc<MediaPacket> mSenderRtcpSendEvent;
+ MediaEventProducerExc<MediaPacket> mReceiverRtcpSendEvent;
+
+ // Assigned and revoked on mStsThread. Listeners for receiving packets.
+ MediaEventListener mSenderRtcpEventListener; // Rtp-transmitting pipeline
+ MediaEventListener mReceiverRtcpEventListener; // Rtp-receiving pipeline
+ MediaEventListener mReceiverRtpEventListener; // Rtp-receiving pipeline
+};
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/VideoStreamFactory.cpp b/dom/media/webrtc/libwebrtcglue/VideoStreamFactory.cpp
new file mode 100644
index 0000000000..9782f8760b
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/VideoStreamFactory.cpp
@@ -0,0 +1,387 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+#include "VideoStreamFactory.h"
+
+#include "common/browser_logging/CSFLog.h"
+#include "nsThreadUtils.h"
+#include "VideoConduit.h"
+
+template <class t>
+void ConstrainPreservingAspectRatio(uint16_t aMaxWidth, uint16_t aMaxHeight,
+ t* aWidth, t* aHeight) {
+ if (((*aWidth) <= aMaxWidth) && ((*aHeight) <= aMaxHeight)) {
+ return;
+ }
+
+ if ((*aWidth) * aMaxHeight > aMaxWidth * (*aHeight)) {
+ (*aHeight) = aMaxWidth * (*aHeight) / (*aWidth);
+ (*aWidth) = aMaxWidth;
+ } else {
+ (*aWidth) = aMaxHeight * (*aWidth) / (*aHeight);
+ (*aHeight) = aMaxHeight;
+ }
+}
+
+namespace mozilla {
+
+#ifdef LOGTAG
+# undef LOGTAG
+#endif
+#define LOGTAG "WebrtcVideoSessionConduit"
+
+#define DEFAULT_VIDEO_MAX_FRAMERATE 30u
+
+#define MB_OF(w, h) \
+ ((unsigned int)((((w + 15) >> 4)) * ((unsigned int)((h + 15) >> 4))))
+// For now, try to set the max rates well above the knee in the curve.
+// Chosen somewhat arbitrarily; it's hard to find good data oriented for
+// realtime interactive/talking-head recording. These rates assume
+// 30fps.
+
+// XXX Populate this based on a pref (which we should consider sorting because
+// people won't assume they need to).
+static VideoStreamFactory::ResolutionAndBitrateLimits
+ kResolutionAndBitrateLimits[] = {
+ // clang-format off
+ {MB_OF(1920, 1200), KBPS(1500), KBPS(2000), KBPS(10000)}, // >HD (3K, 4K, etc)
+ {MB_OF(1280, 720), KBPS(1200), KBPS(1500), KBPS(5000)}, // HD ~1080-1200
+ {MB_OF(800, 480), KBPS(200), KBPS(800), KBPS(2500)}, // HD ~720
+ {MB_OF(480, 270), KBPS(150), KBPS(500), KBPS(2000)}, // WVGA
+ {tl::Max<MB_OF(400, 240), MB_OF(352, 288)>::value, KBPS(125), KBPS(300), KBPS(1300)}, // VGA
+ {MB_OF(176, 144), KBPS(100), KBPS(150), KBPS(500)}, // WQVGA, CIF
+ {0 , KBPS(40), KBPS(80), KBPS(250)} // QCIF and below
+ // clang-format on
+};
+
+auto VideoStreamFactory::GetLimitsFor(unsigned int aWidth, unsigned int aHeight,
+ int aCapBps /* = 0 */)
+ -> ResolutionAndBitrateLimits {
+ // max bandwidth should be proportional (not linearly!) to resolution, and
+ // proportional (perhaps linearly, or close) to current frame rate.
+ int fs = MB_OF(aWidth, aHeight);
+
+ for (const auto& resAndLimits : kResolutionAndBitrateLimits) {
+ if (fs > resAndLimits.resolution_in_mb &&
+ // pick the highest range where at least start rate is within cap
+ // (or if we're at the end of the array).
+ (aCapBps == 0 || resAndLimits.start_bitrate_bps <= aCapBps ||
+ resAndLimits.resolution_in_mb == 0)) {
+ return resAndLimits;
+ }
+ }
+
+ MOZ_CRASH("Loop should have handled fallback");
+}
+
+/**
+ * Function to set the encoding bitrate limits based on incoming frame size and
+ * rate
+ * @param width, height: dimensions of the frame
+ * @param min: minimum bitrate in bps
+ * @param start: bitrate in bps that the encoder should start with
+ * @param cap: user-enforced max bitrate, or 0
+ * @param pref_cap: cap enforced by prefs
+ * @param negotiated_cap: cap negotiated through SDP
+ * @param aVideoStream stream to apply bitrates to
+ */
+static void SelectBitrates(unsigned short width, unsigned short height, int min,
+ int start, int cap, int pref_cap, int negotiated_cap,
+ webrtc::VideoStream& aVideoStream) {
+ int& out_min = aVideoStream.min_bitrate_bps;
+ int& out_start = aVideoStream.target_bitrate_bps;
+ int& out_max = aVideoStream.max_bitrate_bps;
+
+ VideoStreamFactory::ResolutionAndBitrateLimits resAndLimits =
+ VideoStreamFactory::GetLimitsFor(width, height);
+ out_min = MinIgnoreZero(resAndLimits.min_bitrate_bps, cap);
+ out_start = MinIgnoreZero(resAndLimits.start_bitrate_bps, cap);
+ out_max = MinIgnoreZero(resAndLimits.max_bitrate_bps, cap);
+
+ // Note: negotiated_cap is the max transport bitrate - it applies to
+ // a single codec encoding, but should also apply to the sum of all
+ // simulcast layers in this encoding! So sum(layers.maxBitrate) <=
+ // negotiated_cap
+ // Note that out_max already has had pref_cap applied to it
+ out_max = MinIgnoreZero(negotiated_cap, out_max);
+ out_min = std::min(out_min, out_max);
+ out_start = std::min(out_start, out_max);
+
+ if (min && min > out_min) {
+ out_min = min;
+ }
+ // If we try to set a minimum bitrate that is too low, ViE will reject it.
+ out_min = std::max(kViEMinCodecBitrate_bps, out_min);
+ out_max = std::max(kViEMinCodecBitrate_bps, out_max);
+ if (start && start > out_start) {
+ out_start = start;
+ }
+
+ // Ensure that min <= start <= max
+ if (out_min > out_max) {
+ out_min = out_max;
+ }
+ out_start = std::min(out_max, std::max(out_start, out_min));
+
+ MOZ_ASSERT(pref_cap == 0 || out_max <= pref_cap);
+}
+
+std::vector<webrtc::VideoStream> VideoStreamFactory::CreateEncoderStreams(
+ int aWidth, int aHeight, const webrtc::VideoEncoderConfig& aConfig) {
+ // We only allow one layer when screensharing
+ const size_t streamCount =
+ mCodecMode == webrtc::VideoCodecMode::kScreensharing
+ ? 1
+ : aConfig.number_of_streams;
+
+ MOZ_RELEASE_ASSERT(streamCount >= 1, "Should request at least one stream");
+
+ std::vector<webrtc::VideoStream> streams;
+ streams.reserve(streamCount);
+
+ {
+ auto frameRateController = mFramerateController.Lock();
+ frameRateController->Reset();
+ }
+
+ for (int idx = streamCount - 1; idx >= 0; --idx) {
+ webrtc::VideoStream video_stream;
+ auto& encoding = mCodecConfig.mEncodings[idx];
+ video_stream.active = encoding.active;
+ MOZ_ASSERT(encoding.constraints.scaleDownBy >= 1.0);
+
+ gfx::IntSize newSize(0, 0);
+
+ if (aWidth && aHeight) {
+ auto maxPixelCount = mLockScaling ? 0U : mWants.max_pixel_count;
+ newSize = CalculateScaledResolution(
+ aWidth, aHeight, encoding.constraints.scaleDownBy, maxPixelCount);
+ }
+
+ if (newSize.width == 0 || newSize.height == 0) {
+ CSFLogInfo(LOGTAG,
+ "%s Stream with RID %s ignored because of no resolution.",
+ __FUNCTION__, encoding.rid.c_str());
+ continue;
+ }
+
+ uint16_t max_width = mCodecConfig.mEncodingConstraints.maxWidth;
+ uint16_t max_height = mCodecConfig.mEncodingConstraints.maxHeight;
+ if (max_width || max_height) {
+ max_width = max_width ? max_width : UINT16_MAX;
+ max_height = max_height ? max_height : UINT16_MAX;
+ ConstrainPreservingAspectRatio(max_width, max_height, &newSize.width,
+ &newSize.height);
+ }
+
+ MOZ_ASSERT(newSize.width > 0);
+ MOZ_ASSERT(newSize.height > 0);
+ video_stream.width = newSize.width;
+ video_stream.height = newSize.height;
+ SelectMaxFramerateForAllStreams(newSize.width, newSize.height);
+
+ CSFLogInfo(LOGTAG, "%s Input frame %ux%u, RID %s scaling to %zux%zu",
+ __FUNCTION__, aWidth, aHeight, encoding.rid.c_str(),
+ video_stream.width, video_stream.height);
+
+ // mMaxFramerateForAllStreams is based on codec-wide stuff like fmtp, and
+ // hard-coded limits based on the source resolution.
+ // mCodecConfig.mEncodingConstraints.maxFps does not take the hard-coded
+ // limits into account, so we have mMaxFramerateForAllStreams which
+ // incorporates those. Per-encoding max framerate is based on parameters
+ // from JS, and maybe rid
+ unsigned int max_framerate = SelectFrameRate(
+ mMaxFramerateForAllStreams, video_stream.width, video_stream.height);
+ max_framerate = std::min(WebrtcVideoConduit::ToLibwebrtcMaxFramerate(
+ encoding.constraints.maxFps),
+ max_framerate);
+ if (max_framerate >= std::numeric_limits<int>::max()) {
+ // If nothing has specified any kind of limit (uncommon), pick something
+ // reasonable.
+ max_framerate = DEFAULT_VIDEO_MAX_FRAMERATE;
+ }
+ video_stream.max_framerate = static_cast<int>(max_framerate);
+ CSFLogInfo(LOGTAG, "%s Stream with RID %s maxFps=%d (global max fps = %u)",
+ __FUNCTION__, encoding.rid.c_str(), video_stream.max_framerate,
+ (unsigned)mMaxFramerateForAllStreams);
+
+ SelectBitrates(video_stream.width, video_stream.height, mMinBitrate,
+ mStartBitrate, encoding.constraints.maxBr, mPrefMaxBitrate,
+ mNegotiatedMaxBitrate, video_stream);
+
+ video_stream.bitrate_priority = aConfig.bitrate_priority;
+ video_stream.max_qp = kQpMax;
+
+ if (streamCount > 1) {
+ if (mCodecMode == webrtc::VideoCodecMode::kScreensharing) {
+ video_stream.num_temporal_layers = 1;
+ } else {
+ video_stream.num_temporal_layers = 2;
+ }
+ // XXX Bug 1390215 investigate using more of
+ // simulcast.cc:GetSimulcastConfig() or our own algorithm to replace it
+ }
+
+ if (mCodecConfig.mName == "H264") {
+ if (mCodecConfig.mEncodingConstraints.maxMbps > 0) {
+ // Not supported yet!
+ CSFLogError(LOGTAG, "%s H.264 max_mbps not supported yet",
+ __FUNCTION__);
+ }
+ }
+ streams.push_back(video_stream);
+ }
+
+ MOZ_RELEASE_ASSERT(streams.size(), "Should configure at least one stream");
+ return streams;
+}
+
+gfx::IntSize VideoStreamFactory::CalculateScaledResolution(
+ int aWidth, int aHeight, double aScaleDownByResolution,
+ unsigned int aMaxPixelCount) {
+ // If any adjustments like scaleResolutionDownBy or maxFS are being given
+ // we want to choose a height and width here to provide for more variety
+ // in possible resolutions.
+ int width = aWidth;
+ int height = aHeight;
+
+ if (aScaleDownByResolution > 1) {
+ width = static_cast<int>(aWidth / aScaleDownByResolution);
+ height = static_cast<int>(aHeight / aScaleDownByResolution);
+ }
+
+ // Check if we still need to adjust resolution down more due to other
+ // constraints.
+ if (mCodecConfig.mEncodingConstraints.maxFs > 0 || aMaxPixelCount > 0) {
+ auto currentFs = static_cast<unsigned int>(width * height);
+ auto maxFs =
+ (mCodecConfig.mEncodingConstraints.maxFs > 0 && aMaxPixelCount > 0)
+ ? std::min((mCodecConfig.mEncodingConstraints.maxFs * 16 * 16),
+ aMaxPixelCount)
+ : std::max((mCodecConfig.mEncodingConstraints.maxFs * 16 * 16),
+ aMaxPixelCount);
+
+ // If our currentFs is greater than maxFs we calculate a width and height
+ // that will get as close as possible to maxFs and try to maintain aspect
+ // ratio.
+ if (currentFs > maxFs) {
+ if (aWidth > aHeight) { // Landscape
+ auto aspectRatio = static_cast<double>(aWidth) / aHeight;
+
+ height = static_cast<int>(std::sqrt(maxFs / aspectRatio));
+ width = static_cast<int>(height * aspectRatio);
+ } else { // Portrait
+ auto aspectRatio = static_cast<double>(aHeight) / aWidth;
+
+ width = static_cast<int>(std::sqrt(maxFs / aspectRatio));
+ height = static_cast<int>(width * aspectRatio);
+ }
+ }
+ }
+
+ // Simplest possible adaptation to resolution alignment.
+ width -= width % mWants.resolution_alignment;
+ height -= height % mWants.resolution_alignment;
+
+ // Dont scale below our minimum value to prevent problems.
+ const int minSize = 1;
+ if (width < minSize || height < minSize) {
+ width = minSize;
+ height = minSize;
+ }
+
+ return gfx::IntSize(width, height);
+}
+
+void VideoStreamFactory::SelectMaxFramerateForAllStreams(
+ unsigned short aWidth, unsigned short aHeight) {
+ int max_fs = std::numeric_limits<int>::max();
+ if (!mLockScaling) {
+ max_fs = mWants.max_pixel_count;
+ }
+ // Limit resolution to max-fs
+ if (mCodecConfig.mEncodingConstraints.maxFs) {
+ // max-fs is in macroblocks, convert to pixels
+ max_fs = std::min(
+ max_fs,
+ static_cast<int>(mCodecConfig.mEncodingConstraints.maxFs * (16 * 16)));
+ }
+
+ unsigned int framerate_all_streams =
+ SelectFrameRate(mMaxFramerateForAllStreams, aWidth, aHeight);
+ unsigned int maxFrameRate = mMaxFramerateForAllStreams;
+ if (mMaxFramerateForAllStreams != framerate_all_streams) {
+ CSFLogDebug(LOGTAG, "%s: framerate changing to %u (from %u)", __FUNCTION__,
+ framerate_all_streams, maxFrameRate);
+ mMaxFramerateForAllStreams = framerate_all_streams;
+ }
+
+ int framerate_with_wants;
+ if (framerate_all_streams > std::numeric_limits<int>::max()) {
+ framerate_with_wants = std::numeric_limits<int>::max();
+ } else {
+ framerate_with_wants = static_cast<int>(framerate_all_streams);
+ }
+
+ framerate_with_wants =
+ std::min(framerate_with_wants, mWants.max_framerate_fps);
+ CSFLogDebug(LOGTAG,
+ "%s: Calling OnOutputFormatRequest, max_fs=%d, max_fps=%d",
+ __FUNCTION__, max_fs, framerate_with_wants);
+ auto frameRateController = mFramerateController.Lock();
+ frameRateController->SetMaxFramerate(framerate_with_wants);
+}
+
+unsigned int VideoStreamFactory::SelectFrameRate(
+ unsigned int aOldFramerate, unsigned short aSendingWidth,
+ unsigned short aSendingHeight) {
+ unsigned int new_framerate = aOldFramerate;
+
+ // Limit frame rate based on max-mbps
+ if (mCodecConfig.mEncodingConstraints.maxMbps) {
+ unsigned int cur_fs, mb_width, mb_height;
+
+ mb_width = (aSendingWidth + 15) >> 4;
+ mb_height = (aSendingHeight + 15) >> 4;
+
+ cur_fs = mb_width * mb_height;
+ if (cur_fs > 0) { // in case no frames have been sent
+ new_framerate = mCodecConfig.mEncodingConstraints.maxMbps / cur_fs;
+ }
+ }
+
+ new_framerate =
+ std::min(new_framerate, WebrtcVideoConduit::ToLibwebrtcMaxFramerate(
+ mCodecConfig.mEncodingConstraints.maxFps));
+ return new_framerate;
+}
+
+bool VideoStreamFactory::ShouldDropFrame(const webrtc::VideoFrame& aFrame) {
+ bool hasNonZeroLayer = false;
+ {
+ const size_t streamCount =
+ mCodecMode == webrtc::VideoCodecMode::kScreensharing
+ ? 1
+ : mCodecConfig.mEncodings.size();
+ for (int idx = streamCount - 1; idx >= 0; --idx) {
+ const auto& encoding = mCodecConfig.mEncodings[idx];
+ if (aFrame.width() / encoding.constraints.scaleDownBy >= 1.0 &&
+ aFrame.height() / encoding.constraints.scaleDownBy >= 1.0) {
+ hasNonZeroLayer = true;
+ break;
+ }
+ }
+ }
+ if (!hasNonZeroLayer) {
+ return true;
+ }
+
+ auto frameRateController = mFramerateController.Lock();
+ return frameRateController->ShouldDropFrame(aFrame.timestamp_us() *
+ rtc::kNumNanosecsPerMicrosec);
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/VideoStreamFactory.h b/dom/media/webrtc/libwebrtcglue/VideoStreamFactory.h
new file mode 100644
index 0000000000..70563a9a9a
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/VideoStreamFactory.h
@@ -0,0 +1,132 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
+
+#ifndef VideoStreamFactory_h
+#define VideoStreamFactory_h
+
+#include "CodecConfig.h"
+#include "mozilla/Atomics.h"
+#include "mozilla/DataMutex.h"
+#include "mozilla/gfx/Point.h"
+#include "mozilla/UniquePtr.h"
+#include "api/video/video_source_interface.h"
+#include "api/video_codecs/video_encoder_config.h"
+#include "common_video/framerate_controller.h"
+#include "rtc_base/time_utils.h"
+
+namespace webrtc {
+class VideoFrame;
+}
+
+namespace mozilla {
+
+// Factory class for VideoStreams... vie_encoder.cc will call this to
+// reconfigure.
+class VideoStreamFactory
+ : public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface {
+ public:
+ struct ResolutionAndBitrateLimits {
+ int resolution_in_mb;
+ int min_bitrate_bps;
+ int start_bitrate_bps;
+ int max_bitrate_bps;
+ };
+
+ static ResolutionAndBitrateLimits GetLimitsFor(unsigned int aWidth,
+ unsigned int aHeight,
+ int aCapBps = 0);
+
+ VideoStreamFactory(VideoCodecConfig aConfig,
+ webrtc::VideoCodecMode aCodecMode, int aMinBitrate,
+ int aStartBitrate, int aPrefMaxBitrate,
+ int aNegotiatedMaxBitrate,
+ const rtc::VideoSinkWants& aWants, bool aLockScaling)
+ : mCodecMode(aCodecMode),
+ mMaxFramerateForAllStreams(std::numeric_limits<unsigned int>::max()),
+ mCodecConfig(std::forward<VideoCodecConfig>(aConfig)),
+ mMinBitrate(aMinBitrate),
+ mStartBitrate(aStartBitrate),
+ mPrefMaxBitrate(aPrefMaxBitrate),
+ mNegotiatedMaxBitrate(aNegotiatedMaxBitrate),
+ mFramerateController("VideoStreamFactory::mFramerateController"),
+ mWants(aWants),
+ mLockScaling(aLockScaling) {}
+
+ // This gets called off-main thread and may hold internal webrtc.org
+ // locks. May *NOT* lock the conduit's mutex, to avoid deadlocks.
+ std::vector<webrtc::VideoStream> CreateEncoderStreams(
+ int aWidth, int aHeight,
+ const webrtc::VideoEncoderConfig& aConfig) override;
+ /**
+ * Function to select and change the encoding resolution based on incoming
+ * frame size and current available bandwidth.
+ * @param width, height: dimensions of the frame
+ */
+ void SelectMaxFramerateForAllStreams(unsigned short aWidth,
+ unsigned short aHeight);
+
+ /**
+ * Function to determine if the frame should be dropped based on the given
+ * frame's resolution (combined with the factory's scaleResolutionDownBy) or
+ * timestamp.
+ * @param aFrame frame to be evaluated.
+ * @return true if frame should be dropped, false otehrwise.
+ */
+ bool ShouldDropFrame(const webrtc::VideoFrame& aFrame);
+
+ private:
+ /**
+ * Function to calculate a scaled down width and height based on
+ * scaleDownByResolution, maxFS, and max pixel count settings.
+ * @param aWidth current frame width
+ * @param aHeight current frame height
+ * @param aScaleDownByResolution value to scale width and height down by.
+ * @param aMaxPixelCount maximum number of pixels wanted in a frame.
+ * @return a gfx:IntSize containing width and height to use. These may match
+ * the aWidth and aHeight passed in if no scaling was needed.
+ */
+ gfx::IntSize CalculateScaledResolution(int aWidth, int aHeight,
+ double aScaleDownByResolution,
+ unsigned int aMaxPixelCount);
+
+ /**
+ * Function to select and change the encoding frame rate based on incoming
+ * frame rate, current frame size and max-mbps setting.
+ * @param aOldFramerate current framerate
+ * @param aSendingWidth width of frames being sent
+ * @param aSendingHeight height of frames being sent
+ * @return new framerate meeting max-mbps requriements based on frame size
+ */
+ unsigned int SelectFrameRate(unsigned int aOldFramerate,
+ unsigned short aSendingWidth,
+ unsigned short aSendingHeight);
+
+ // Used to limit number of streams for screensharing.
+ Atomic<webrtc::VideoCodecMode> mCodecMode;
+
+ // The framerate we're currently sending at.
+ Atomic<unsigned int> mMaxFramerateForAllStreams;
+
+ // The current send codec config, containing simulcast layer configs.
+ const VideoCodecConfig mCodecConfig;
+
+ // Bitrate limits in bps.
+ const int mMinBitrate = 0;
+ const int mStartBitrate = 0;
+ const int mPrefMaxBitrate = 0;
+ const int mNegotiatedMaxBitrate = 0;
+
+ // DatamMutex used as object is mutated from a libwebrtc thread and
+ // a seperate thread used to pass video frames to libwebrtc.
+ DataMutex<webrtc::FramerateController> mFramerateController;
+
+ const rtc::VideoSinkWants mWants;
+ const bool mLockScaling;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/VideoTypes.h b/dom/media/webrtc/libwebrtcglue/VideoTypes.h
new file mode 100644
index 0000000000..e6602de5b6
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/VideoTypes.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (c) 2012, The WebRTC project authors. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ *
+ * * Neither the name of Google nor the names of its contributors may
+ * be used to endorse or promote products derived from this software
+ * without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef VIDEO_TYPE_
+#define VIDEO_TYPE_
+
+namespace mozilla {
+/*
+ * Enumeration for different video types supported by the
+ * video-engine. If more types will be supported in the future
+ * newer one shall be appended to the bottom of the list
+ */
+enum VideoType {
+ kVideoI420 = 0,
+ kVideoYV12 = 1,
+ kVideoYUY2 = 2,
+ kVideoUYVY = 3,
+ kVideoIYUV = 4,
+ kVideoARGB = 5,
+ kVideoRGB24 = 6,
+ kVideoRGB565 = 7,
+ kVideoARGB4444 = 8,
+ kVideoARGB1555 = 9,
+ kVideoMJPEG = 10,
+ kVideoNV12 = 11,
+ kVideoNV21 = 12,
+ kVideoBGRA = 13,
+ kVideoUnknown = 99
+};
+} // namespace mozilla
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.cpp b/dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.cpp
new file mode 100644
index 0000000000..d3c3d27e40
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.cpp
@@ -0,0 +1,105 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WebrtcCallWrapper.h"
+
+#include "jsapi/PeerConnectionCtx.h"
+#include "MediaConduitInterface.h"
+#include "TaskQueueWrapper.h"
+
+// libwebrtc includes
+#include "call/rtp_transport_controller_send_factory.h"
+
+namespace mozilla {
+
+/* static */ RefPtr<WebrtcCallWrapper> WebrtcCallWrapper::Create(
+ const dom::RTCStatsTimestampMaker& aTimestampMaker,
+ UniquePtr<media::ShutdownBlockingTicket> aShutdownTicket,
+ const RefPtr<SharedWebrtcState>& aSharedState) {
+ auto eventLog = MakeUnique<webrtc::RtcEventLogNull>();
+ auto taskQueueFactory = MakeUnique<SharedThreadPoolWebRtcTaskQueueFactory>();
+ auto videoBitrateAllocatorFactory =
+ WrapUnique(webrtc::CreateBuiltinVideoBitrateAllocatorFactory().release());
+ RefPtr<WebrtcCallWrapper> wrapper = new WebrtcCallWrapper(
+ aSharedState, std::move(videoBitrateAllocatorFactory),
+ std::move(eventLog), std::move(taskQueueFactory), aTimestampMaker,
+ std::move(aShutdownTicket));
+
+ wrapper->mCallThread->Dispatch(
+ NS_NewRunnableFunction(__func__, [wrapper, aSharedState] {
+ webrtc::Call::Config config(wrapper->mEventLog.get());
+ config.audio_state =
+ webrtc::AudioState::Create(aSharedState->mAudioStateConfig);
+ config.task_queue_factory = wrapper->mTaskQueueFactory.get();
+ config.trials = aSharedState->mTrials.get();
+ wrapper->SetCall(WrapUnique(webrtc::Call::Create(
+ config, &wrapper->mClock,
+ webrtc::RtpTransportControllerSendFactory().Create(
+ config.ExtractTransportConfig(), &wrapper->mClock))));
+ }));
+
+ return wrapper;
+}
+
+void WebrtcCallWrapper::SetCall(UniquePtr<webrtc::Call> aCall) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ MOZ_ASSERT(!mCall);
+ mCall = std::move(aCall);
+}
+
+webrtc::Call* WebrtcCallWrapper::Call() const {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ return mCall.get();
+}
+
+void WebrtcCallWrapper::UnsetRemoteSSRC(uint32_t aSsrc) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ for (auto conduit : mConduits) {
+ conduit->UnsetRemoteSSRC(aSsrc);
+ }
+}
+
+void WebrtcCallWrapper::RegisterConduit(MediaSessionConduit* conduit) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mConduits.insert(conduit);
+}
+
+void WebrtcCallWrapper::UnregisterConduit(MediaSessionConduit* conduit) {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mConduits.erase(conduit);
+}
+
+void WebrtcCallWrapper::Destroy() {
+ MOZ_ASSERT(mCallThread->IsOnCurrentThread());
+ mCall = nullptr;
+ mShutdownTicket = nullptr;
+}
+
+const dom::RTCStatsTimestampMaker& WebrtcCallWrapper::GetTimestampMaker()
+ const {
+ return mClock.mTimestampMaker;
+}
+
+WebrtcCallWrapper::~WebrtcCallWrapper() { MOZ_ASSERT(!mCall); }
+
+WebrtcCallWrapper::WebrtcCallWrapper(
+ RefPtr<SharedWebrtcState> aSharedState,
+ UniquePtr<webrtc::VideoBitrateAllocatorFactory>
+ aVideoBitrateAllocatorFactory,
+ UniquePtr<webrtc::RtcEventLog> aEventLog,
+ UniquePtr<webrtc::TaskQueueFactory> aTaskQueueFactory,
+ const dom::RTCStatsTimestampMaker& aTimestampMaker,
+ UniquePtr<media::ShutdownBlockingTicket> aShutdownTicket)
+ : mSharedState(std::move(aSharedState)),
+ mClock(aTimestampMaker),
+ mShutdownTicket(std::move(aShutdownTicket)),
+ mCallThread(mSharedState->mCallWorkerThread),
+ mAudioDecoderFactory(mSharedState->mAudioDecoderFactory),
+ mVideoBitrateAllocatorFactory(std::move(aVideoBitrateAllocatorFactory)),
+ mEventLog(std::move(aEventLog)),
+ mTaskQueueFactory(std::move(aTaskQueueFactory)) {}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.h b/dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.h
new file mode 100644
index 0000000000..f6376c94ed
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.h
@@ -0,0 +1,114 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim:set ts=2 sw=2 sts=2 et cindent: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_WEBRTCCALLWRAPPER_H_
+#define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_WEBRTCCALLWRAPPER_H_
+
+#include <set>
+
+#include "domstubs.h"
+#include "jsapi/RTCStatsReport.h"
+#include "nsISupportsImpl.h"
+#include "SystemTime.h"
+
+// libwebrtc includes
+#include "api/video/builtin_video_bitrate_allocator_factory.h"
+#include "call/call.h"
+#include "call/call_config.h"
+
+namespace mozilla {
+class AbstractThread;
+class MediaSessionConduit;
+class SharedWebrtcState;
+
+namespace media {
+class ShutdownBlockingTicket;
+}
+
+// Wrap the webrtc.org Call class adding mozilla add/ref support.
+class WebrtcCallWrapper {
+ public:
+ typedef webrtc::CallConfig Config;
+
+ static RefPtr<WebrtcCallWrapper> Create(
+ const dom::RTCStatsTimestampMaker& aTimestampMaker,
+ UniquePtr<media::ShutdownBlockingTicket> aShutdownTicket,
+ const RefPtr<SharedWebrtcState>& aSharedState);
+
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcCallWrapper)
+
+ // Don't allow copying/assigning.
+ WebrtcCallWrapper(const WebrtcCallWrapper&) = delete;
+ void operator=(const WebrtcCallWrapper&) = delete;
+
+ void SetCall(UniquePtr<webrtc::Call> aCall);
+
+ webrtc::Call* Call() const;
+
+ void UnsetRemoteSSRC(uint32_t aSsrc);
+
+ // Idempotent.
+ void RegisterConduit(MediaSessionConduit* conduit);
+
+ // Idempotent.
+ void UnregisterConduit(MediaSessionConduit* conduit);
+
+ // Allow destroying the Call instance on the Call worker thread.
+ //
+ // Note that shutdown is blocked until the Call instance is destroyed.
+ //
+ // This CallWrapper is designed to be sharable, and is held by several objects
+ // that are cycle-collectable. TaskQueueWrapper that the Call instances use
+ // for worker threads are based off SharedThreadPools, and will block
+ // xpcom-shutdown-threads until destroyed. The Call instance however will hold
+ // on to its worker threads until destruction.
+ //
+ // If the last ref to this CallWrapper is held to cycle collector shutdown we
+ // end up in a deadlock where cycle collector shutdown is required to destroy
+ // the SharedThreadPool that is blocking xpcom-shutdown-threads from finishing
+ // and triggering cycle collector shutdown.
+ //
+ // It would be nice to have the invariant where this class is immutable to the
+ // degree that mCall is const, but given the above that is not possible.
+ void Destroy();
+
+ const dom::RTCStatsTimestampMaker& GetTimestampMaker() const;
+
+ protected:
+ virtual ~WebrtcCallWrapper();
+
+ WebrtcCallWrapper(RefPtr<SharedWebrtcState> aSharedState,
+ UniquePtr<webrtc::VideoBitrateAllocatorFactory>
+ aVideoBitrateAllocatorFactory,
+ UniquePtr<webrtc::RtcEventLog> aEventLog,
+ UniquePtr<webrtc::TaskQueueFactory> aTaskQueueFactory,
+ const dom::RTCStatsTimestampMaker& aTimestampMaker,
+ UniquePtr<media::ShutdownBlockingTicket> aShutdownTicket);
+
+ const RefPtr<SharedWebrtcState> mSharedState;
+
+ // Allows conduits to know about one another, to avoid remote SSRC
+ // collisions.
+ std::set<MediaSessionConduit*> mConduits;
+ RTCStatsTimestampMakerRealtimeClock mClock;
+ UniquePtr<media::ShutdownBlockingTicket> mShutdownTicket;
+
+ public:
+ const RefPtr<AbstractThread> mCallThread;
+ const RefPtr<webrtc::AudioDecoderFactory> mAudioDecoderFactory;
+ const UniquePtr<webrtc::VideoBitrateAllocatorFactory>
+ mVideoBitrateAllocatorFactory;
+ const UniquePtr<webrtc::RtcEventLog> mEventLog;
+ const UniquePtr<webrtc::TaskQueueFactory> mTaskQueueFactory;
+
+ protected:
+ // Call worker thread only.
+ UniquePtr<webrtc::Call> mCall;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.cpp b/dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.cpp
new file mode 100644
index 0000000000..800e887d4a
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.cpp
@@ -0,0 +1,1028 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WebrtcGmpVideoCodec.h"
+
+#include <utility>
+#include <vector>
+
+#include "GMPLog.h"
+#include "MainThreadUtils.h"
+#include "VideoConduit.h"
+#include "gmp-video-frame-encoded.h"
+#include "gmp-video-frame-i420.h"
+#include "mozilla/CheckedInt.h"
+#include "mozilla/EndianUtils.h"
+#include "mozilla/IntegerPrintfMacros.h"
+#include "mozilla/SyncRunnable.h"
+#include "nsServiceManagerUtils.h"
+#include "transport/runnable_utils.h"
+#include "api/video/video_frame_type.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "media/base/media_constants.h"
+// #include "rtc_base/bind.h"
+
+namespace mozilla {
+
+// QP scaling thresholds.
+static const int kLowH264QpThreshold = 24;
+static const int kHighH264QpThreshold = 37;
+
+// Encoder.
+WebrtcGmpVideoEncoder::WebrtcGmpVideoEncoder(
+ const webrtc::SdpVideoFormat& aFormat, std::string aPCHandle)
+ : mGMP(nullptr),
+ mInitting(false),
+ mHost(nullptr),
+ mMaxPayloadSize(0),
+ mFormatParams(aFormat.parameters),
+ mCallbackMutex("WebrtcGmpVideoEncoder encoded callback mutex"),
+ mCallback(nullptr),
+ mPCHandle(std::move(aPCHandle)),
+ mInputImageMap("WebrtcGmpVideoEncoder::mInputImageMap") {
+ mCodecParams.mGMPApiVersion = 0;
+ mCodecParams.mCodecType = kGMPVideoCodecInvalid;
+ mCodecParams.mPLType = 0;
+ mCodecParams.mWidth = 0;
+ mCodecParams.mHeight = 0;
+ mCodecParams.mStartBitrate = 0;
+ mCodecParams.mMaxBitrate = 0;
+ mCodecParams.mMinBitrate = 0;
+ mCodecParams.mMaxFramerate = 0;
+ mCodecParams.mFrameDroppingOn = false;
+ mCodecParams.mKeyFrameInterval = 0;
+ mCodecParams.mQPMax = 0;
+ mCodecParams.mNumberOfSimulcastStreams = 0;
+ mCodecParams.mMode = kGMPCodecModeInvalid;
+ MOZ_ASSERT(!mPCHandle.empty());
+}
+
+WebrtcGmpVideoEncoder::~WebrtcGmpVideoEncoder() {
+ // We should not have been destroyed if we never closed our GMP
+ MOZ_ASSERT(!mGMP);
+}
+
+static int WebrtcFrameTypeToGmpFrameType(webrtc::VideoFrameType aIn,
+ GMPVideoFrameType* aOut) {
+ MOZ_ASSERT(aOut);
+ switch (aIn) {
+ case webrtc::VideoFrameType::kVideoFrameKey:
+ *aOut = kGMPKeyFrame;
+ break;
+ case webrtc::VideoFrameType::kVideoFrameDelta:
+ *aOut = kGMPDeltaFrame;
+ break;
+ case webrtc::VideoFrameType::kEmptyFrame:
+ *aOut = kGMPSkipFrame;
+ break;
+ default:
+ MOZ_CRASH("Unexpected webrtc::FrameType");
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+static int GmpFrameTypeToWebrtcFrameType(GMPVideoFrameType aIn,
+ webrtc::VideoFrameType* aOut) {
+ MOZ_ASSERT(aOut);
+ switch (aIn) {
+ case kGMPKeyFrame:
+ *aOut = webrtc::VideoFrameType::kVideoFrameKey;
+ break;
+ case kGMPDeltaFrame:
+ *aOut = webrtc::VideoFrameType::kVideoFrameDelta;
+ break;
+ case kGMPSkipFrame:
+ *aOut = webrtc::VideoFrameType::kEmptyFrame;
+ break;
+ default:
+ MOZ_CRASH("Unexpected GMPVideoFrameType");
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+static int SizeNumBytes(GMPBufferType aBufferType) {
+ switch (aBufferType) {
+ case GMP_BufferSingle:
+ return 0;
+ case GMP_BufferLength8:
+ return 1;
+ case GMP_BufferLength16:
+ return 2;
+ case GMP_BufferLength24:
+ return 3;
+ case GMP_BufferLength32:
+ return 4;
+ default:
+ MOZ_CRASH("Unexpected buffer type");
+ }
+}
+
+int32_t WebrtcGmpVideoEncoder::InitEncode(
+ const webrtc::VideoCodec* aCodecSettings,
+ const webrtc::VideoEncoder::Settings& aSettings) {
+ if (!mMPS) {
+ mMPS = do_GetService("@mozilla.org/gecko-media-plugin-service;1");
+ }
+ MOZ_ASSERT(mMPS);
+
+ if (!mGMPThread) {
+ if (NS_WARN_IF(NS_FAILED(mMPS->GetThread(getter_AddRefs(mGMPThread))))) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
+ MOZ_ASSERT(aCodecSettings->numberOfSimulcastStreams == 1,
+ "Simulcast not implemented for GMP-H264");
+
+ // Bug XXXXXX: transfer settings from codecSettings to codec.
+ GMPVideoCodec codecParams;
+ memset(&codecParams, 0, sizeof(codecParams));
+
+ codecParams.mGMPApiVersion = 33;
+ codecParams.mStartBitrate = aCodecSettings->startBitrate;
+ codecParams.mMinBitrate = aCodecSettings->minBitrate;
+ codecParams.mMaxBitrate = aCodecSettings->maxBitrate;
+ codecParams.mMaxFramerate = aCodecSettings->maxFramerate;
+
+ memset(&mCodecSpecificInfo.codecSpecific, 0,
+ sizeof(mCodecSpecificInfo.codecSpecific));
+ mCodecSpecificInfo.codecType = webrtc::kVideoCodecH264;
+ mCodecSpecificInfo.codecSpecific.H264.packetization_mode =
+ mFormatParams.count(cricket::kH264FmtpPacketizationMode) == 1 &&
+ mFormatParams.at(cricket::kH264FmtpPacketizationMode) == "1"
+ ? webrtc::H264PacketizationMode::NonInterleaved
+ : webrtc::H264PacketizationMode::SingleNalUnit;
+
+ uint32_t maxPayloadSize = aSettings.max_payload_size;
+ if (mCodecSpecificInfo.codecSpecific.H264.packetization_mode ==
+ webrtc::H264PacketizationMode::NonInterleaved) {
+ maxPayloadSize = 0; // No limit, use FUAs
+ }
+
+ if (aCodecSettings->mode == webrtc::VideoCodecMode::kScreensharing) {
+ codecParams.mMode = kGMPScreensharing;
+ } else {
+ codecParams.mMode = kGMPRealtimeVideo;
+ }
+
+ codecParams.mWidth = aCodecSettings->width;
+ codecParams.mHeight = aCodecSettings->height;
+
+ RefPtr<GmpInitDoneRunnable> initDone(new GmpInitDoneRunnable(mPCHandle));
+ mGMPThread->Dispatch(
+ WrapRunnableNM(WebrtcGmpVideoEncoder::InitEncode_g,
+ RefPtr<WebrtcGmpVideoEncoder>(this), codecParams,
+ aSettings.number_of_cores, maxPayloadSize, initDone),
+ NS_DISPATCH_NORMAL);
+
+ // Since init of the GMP encoder is a multi-step async dispatch (including
+ // dispatches to main), and since this function is invoked on main, there's
+ // no safe way to block until this init is done. If an error occurs, we'll
+ // handle it later.
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+/* static */
+void WebrtcGmpVideoEncoder::InitEncode_g(
+ const RefPtr<WebrtcGmpVideoEncoder>& aThis,
+ const GMPVideoCodec& aCodecParams, int32_t aNumberOfCores,
+ uint32_t aMaxPayloadSize, const RefPtr<GmpInitDoneRunnable>& aInitDone) {
+ nsTArray<nsCString> tags;
+ tags.AppendElement("h264"_ns);
+ UniquePtr<GetGMPVideoEncoderCallback> callback(
+ new InitDoneCallback(aThis, aInitDone, aCodecParams));
+ aThis->mInitting = true;
+ aThis->mMaxPayloadSize = aMaxPayloadSize;
+ nsresult rv = aThis->mMPS->GetGMPVideoEncoder(nullptr, &tags, ""_ns,
+ std::move(callback));
+ if (NS_WARN_IF(NS_FAILED(rv))) {
+ GMP_LOG_DEBUG("GMP Encode: GetGMPVideoEncoder failed");
+ aThis->Close_g();
+ aInitDone->Dispatch(WEBRTC_VIDEO_CODEC_ERROR,
+ "GMP Encode: GetGMPVideoEncoder failed");
+ }
+}
+
+int32_t WebrtcGmpVideoEncoder::GmpInitDone(GMPVideoEncoderProxy* aGMP,
+ GMPVideoHost* aHost,
+ std::string* aErrorOut) {
+ if (!mInitting || !aGMP || !aHost) {
+ *aErrorOut =
+ "GMP Encode: Either init was aborted, "
+ "or init failed to supply either a GMP Encoder or GMP host.";
+ if (aGMP) {
+ // This could destroy us, since aGMP may be the last thing holding a ref
+ // Return immediately.
+ aGMP->Close();
+ }
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ mInitting = false;
+
+ if (mGMP && mGMP != aGMP) {
+ Close_g();
+ }
+
+ mGMP = aGMP;
+ mHost = aHost;
+ mCachedPluginId = Some(mGMP->GetPluginId());
+ mInitPluginEvent.Notify(*mCachedPluginId);
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcGmpVideoEncoder::GmpInitDone(GMPVideoEncoderProxy* aGMP,
+ GMPVideoHost* aHost,
+ const GMPVideoCodec& aCodecParams,
+ std::string* aErrorOut) {
+ int32_t r = GmpInitDone(aGMP, aHost, aErrorOut);
+ if (r != WEBRTC_VIDEO_CODEC_OK) {
+ // We might have been destroyed if GmpInitDone failed.
+ // Return immediately.
+ return r;
+ }
+ mCodecParams = aCodecParams;
+ return InitEncoderForSize(aCodecParams.mWidth, aCodecParams.mHeight,
+ aErrorOut);
+}
+
+void WebrtcGmpVideoEncoder::Close_g() {
+ GMPVideoEncoderProxy* gmp(mGMP);
+ mGMP = nullptr;
+ mHost = nullptr;
+ mInitting = false;
+
+ if (mCachedPluginId) {
+ mReleasePluginEvent.Notify(*mCachedPluginId);
+ }
+ mCachedPluginId = Nothing();
+
+ if (gmp) {
+ // Do this last, since this could cause us to be destroyed
+ gmp->Close();
+ }
+}
+
+int32_t WebrtcGmpVideoEncoder::InitEncoderForSize(unsigned short aWidth,
+ unsigned short aHeight,
+ std::string* aErrorOut) {
+ mCodecParams.mWidth = aWidth;
+ mCodecParams.mHeight = aHeight;
+ // Pass dummy codecSpecific data for now...
+ nsTArray<uint8_t> codecSpecific;
+
+ GMPErr err =
+ mGMP->InitEncode(mCodecParams, codecSpecific, this, 1, mMaxPayloadSize);
+ if (err != GMPNoErr) {
+ *aErrorOut = "GMP Encode: InitEncode failed";
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcGmpVideoEncoder::Encode(
+ const webrtc::VideoFrame& aInputImage,
+ const std::vector<webrtc::VideoFrameType>* aFrameTypes) {
+ MOZ_ASSERT(aInputImage.width() >= 0 && aInputImage.height() >= 0);
+ if (!aFrameTypes) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // It is safe to copy aInputImage here because the frame buffer is held by
+ // a refptr.
+ mGMPThread->Dispatch(WrapRunnableNM(&WebrtcGmpVideoEncoder::Encode_g,
+ RefPtr<WebrtcGmpVideoEncoder>(this),
+ aInputImage, *aFrameTypes),
+ NS_DISPATCH_NORMAL);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void WebrtcGmpVideoEncoder::RegetEncoderForResolutionChange(
+ uint32_t aWidth, uint32_t aHeight,
+ const RefPtr<GmpInitDoneRunnable>& aInitDone) {
+ Close_g();
+
+ UniquePtr<GetGMPVideoEncoderCallback> callback(
+ new InitDoneForResolutionChangeCallback(this, aInitDone, aWidth,
+ aHeight));
+
+ // OpenH264 codec (at least) can't handle dynamic input resolution changes
+ // re-init the plugin when the resolution changes
+ // XXX allow codec to indicate it doesn't need re-init!
+ nsTArray<nsCString> tags;
+ tags.AppendElement("h264"_ns);
+ mInitting = true;
+ if (NS_WARN_IF(NS_FAILED(mMPS->GetGMPVideoEncoder(nullptr, &tags, ""_ns,
+ std::move(callback))))) {
+ aInitDone->Dispatch(WEBRTC_VIDEO_CODEC_ERROR,
+ "GMP Encode: GetGMPVideoEncoder failed");
+ }
+}
+
+void WebrtcGmpVideoEncoder::Encode_g(
+ const RefPtr<WebrtcGmpVideoEncoder>& aEncoder,
+ webrtc::VideoFrame aInputImage,
+ std::vector<webrtc::VideoFrameType> aFrameTypes) {
+ if (!aEncoder->mGMP) {
+ // destroyed via Terminate(), failed to init, or just not initted yet
+ GMP_LOG_DEBUG("GMP Encode: not initted yet");
+ return;
+ }
+ MOZ_ASSERT(aEncoder->mHost);
+
+ if (static_cast<uint32_t>(aInputImage.width()) !=
+ aEncoder->mCodecParams.mWidth ||
+ static_cast<uint32_t>(aInputImage.height()) !=
+ aEncoder->mCodecParams.mHeight) {
+ GMP_LOG_DEBUG("GMP Encode: resolution change from %ux%u to %dx%d",
+ aEncoder->mCodecParams.mWidth, aEncoder->mCodecParams.mHeight,
+ aInputImage.width(), aInputImage.height());
+
+ RefPtr<GmpInitDoneRunnable> initDone(
+ new GmpInitDoneRunnable(aEncoder->mPCHandle));
+ aEncoder->RegetEncoderForResolutionChange(aInputImage.width(),
+ aInputImage.height(), initDone);
+ if (!aEncoder->mGMP) {
+ // We needed to go async to re-get the encoder. Bail.
+ return;
+ }
+ }
+
+ GMPVideoFrame* ftmp = nullptr;
+ GMPErr err = aEncoder->mHost->CreateFrame(kGMPI420VideoFrame, &ftmp);
+ if (err != GMPNoErr) {
+ GMP_LOG_DEBUG("GMP Encode: failed to create frame on host");
+ return;
+ }
+ GMPUniquePtr<GMPVideoi420Frame> frame(static_cast<GMPVideoi420Frame*>(ftmp));
+ const webrtc::I420BufferInterface* input_image =
+ aInputImage.video_frame_buffer()->GetI420();
+ // check for overflow of stride * height
+ CheckedInt32 ysize =
+ CheckedInt32(input_image->StrideY()) * input_image->height();
+ MOZ_RELEASE_ASSERT(ysize.isValid());
+ // I will assume that if that doesn't overflow, the others case - YUV
+ // 4:2:0 has U/V widths <= Y, even with alignment issues.
+ err = frame->CreateFrame(
+ ysize.value(), input_image->DataY(),
+ input_image->StrideU() * ((input_image->height() + 1) / 2),
+ input_image->DataU(),
+ input_image->StrideV() * ((input_image->height() + 1) / 2),
+ input_image->DataV(), input_image->width(), input_image->height(),
+ input_image->StrideY(), input_image->StrideU(), input_image->StrideV());
+ if (err != GMPNoErr) {
+ GMP_LOG_DEBUG("GMP Encode: failed to create frame");
+ return;
+ }
+ frame->SetTimestamp((aInputImage.timestamp() * 1000ll) /
+ 90); // note: rounds down!
+ // frame->SetDuration(1000000ll/30); // XXX base duration on measured current
+ // FPS - or don't bother
+
+ // Bug XXXXXX: Set codecSpecific info
+ GMPCodecSpecificInfo info;
+ memset(&info, 0, sizeof(info));
+ info.mCodecType = kGMPVideoCodecH264;
+ nsTArray<uint8_t> codecSpecificInfo;
+ codecSpecificInfo.AppendElements((uint8_t*)&info,
+ sizeof(GMPCodecSpecificInfo));
+
+ nsTArray<GMPVideoFrameType> gmp_frame_types;
+ for (auto it = aFrameTypes.begin(); it != aFrameTypes.end(); ++it) {
+ GMPVideoFrameType ft;
+
+ int32_t ret = WebrtcFrameTypeToGmpFrameType(*it, &ft);
+ if (ret != WEBRTC_VIDEO_CODEC_OK) {
+ GMP_LOG_DEBUG(
+ "GMP Encode: failed to map webrtc frame type to gmp frame type");
+ return;
+ }
+
+ gmp_frame_types.AppendElement(ft);
+ }
+
+ {
+ auto inputImageMap = aEncoder->mInputImageMap.Lock();
+ DebugOnly<bool> inserted = false;
+ std::tie(std::ignore, inserted) = inputImageMap->insert(
+ {frame->Timestamp(), {aInputImage.timestamp_us()}});
+ MOZ_ASSERT(inserted, "Duplicate timestamp");
+ }
+
+ GMP_LOG_DEBUG("GMP Encode: %" PRIu64, (frame->Timestamp()));
+ err = aEncoder->mGMP->Encode(std::move(frame), codecSpecificInfo,
+ gmp_frame_types);
+ if (err != GMPNoErr) {
+ GMP_LOG_DEBUG("GMP Encode: failed to encode frame");
+ }
+}
+
+int32_t WebrtcGmpVideoEncoder::RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* aCallback) {
+ MutexAutoLock lock(mCallbackMutex);
+ mCallback = aCallback;
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+/* static */
+void WebrtcGmpVideoEncoder::ReleaseGmp_g(
+ const RefPtr<WebrtcGmpVideoEncoder>& aEncoder) {
+ aEncoder->Close_g();
+}
+
+int32_t WebrtcGmpVideoEncoder::Shutdown() {
+ GMP_LOG_DEBUG("GMP Released:");
+ RegisterEncodeCompleteCallback(nullptr);
+ if (mGMPThread) {
+ mGMPThread->Dispatch(WrapRunnableNM(&WebrtcGmpVideoEncoder::ReleaseGmp_g,
+ RefPtr<WebrtcGmpVideoEncoder>(this)),
+ NS_DISPATCH_NORMAL);
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcGmpVideoEncoder::SetRates(
+ const webrtc::VideoEncoder::RateControlParameters& aParameters) {
+ MOZ_ASSERT(mGMPThread);
+ MOZ_ASSERT(aParameters.bitrate.IsSpatialLayerUsed(0));
+ MOZ_ASSERT(!aParameters.bitrate.HasBitrate(0, 1),
+ "No simulcast support for H264");
+ MOZ_ASSERT(!aParameters.bitrate.IsSpatialLayerUsed(1),
+ "No simulcast support for H264");
+ mGMPThread->Dispatch(
+ WrapRunnableNM(&WebrtcGmpVideoEncoder::SetRates_g,
+ RefPtr<WebrtcGmpVideoEncoder>(this),
+ aParameters.bitrate.GetBitrate(0, 0) / 1000,
+ aParameters.framerate_fps > 0.0
+ ? Some(aParameters.framerate_fps)
+ : Nothing()),
+ NS_DISPATCH_NORMAL);
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebrtcVideoEncoder::EncoderInfo WebrtcGmpVideoEncoder::GetEncoderInfo() const {
+ WebrtcVideoEncoder::EncoderInfo info;
+ info.supports_native_handle = false;
+ info.implementation_name = "GMPOpenH264";
+ info.scaling_settings = WebrtcVideoEncoder::ScalingSettings(
+ kLowH264QpThreshold, kHighH264QpThreshold);
+ info.is_hardware_accelerated = false;
+ info.supports_simulcast = false;
+ return info;
+}
+
+/* static */
+int32_t WebrtcGmpVideoEncoder::SetRates_g(RefPtr<WebrtcGmpVideoEncoder> aThis,
+ uint32_t aNewBitRateKbps,
+ Maybe<double> aFrameRate) {
+ if (!aThis->mGMP) {
+ // destroyed via Terminate()
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ GMPErr err = aThis->mGMP->SetRates(
+ aNewBitRateKbps, aFrameRate
+ .map([](double aFr) {
+ // Avoid rounding to 0
+ return std::max(1U, static_cast<uint32_t>(aFr));
+ })
+ .valueOr(aThis->mCodecParams.mMaxFramerate));
+ if (err != GMPNoErr) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+// GMPVideoEncoderCallback virtual functions.
+void WebrtcGmpVideoEncoder::Terminated() {
+ GMP_LOG_DEBUG("GMP Encoder Terminated: %p", (void*)this);
+
+ mGMP->Close();
+ mGMP = nullptr;
+ mHost = nullptr;
+ mInitting = false;
+ // Could now notify that it's dead
+}
+
+void WebrtcGmpVideoEncoder::Encoded(
+ GMPVideoEncodedFrame* aEncodedFrame,
+ const nsTArray<uint8_t>& aCodecSpecificInfo) {
+ webrtc::Timestamp capture_time = webrtc::Timestamp::Micros(0);
+ {
+ auto inputImageMap = mInputImageMap.Lock();
+ auto handle = inputImageMap->extract(aEncodedFrame->TimeStamp());
+ MOZ_ASSERT(handle);
+ if (handle) {
+ capture_time = webrtc::Timestamp::Micros(handle.mapped().timestamp_us);
+ }
+ }
+
+ MutexAutoLock lock(mCallbackMutex);
+ if (!mCallback) {
+ return;
+ }
+
+ webrtc::VideoFrameType ft;
+ GmpFrameTypeToWebrtcFrameType(aEncodedFrame->FrameType(), &ft);
+ uint32_t timestamp = (aEncodedFrame->TimeStamp() * 90ll + 999) / 1000;
+
+ GMP_LOG_DEBUG("GMP Encoded: %" PRIu64 ", type %d, len %d",
+ aEncodedFrame->TimeStamp(), aEncodedFrame->BufferType(),
+ aEncodedFrame->Size());
+
+ if (!aEncodedFrame->Buffer()) {
+ GMP_LOG_ERROR("GMP plugin returned null buffer");
+ return;
+ }
+
+ // Libwebrtc's RtpPacketizerH264 expects a 3- or 4-byte NALU start sequence
+ // before the start of the NALU payload. {0,0,1} or {0,0,0,1}. We set this
+ // in-place. Any other length of the length field we reject.
+
+ const int sizeNumBytes = SizeNumBytes(aEncodedFrame->BufferType());
+ uint32_t unitOffset = 0;
+ uint32_t unitSize = 0;
+ // Make sure we don't read past the end of the buffer getting the size
+ while (unitOffset + sizeNumBytes < aEncodedFrame->Size()) {
+ uint8_t* unitBuffer = aEncodedFrame->Buffer() + unitOffset;
+ switch (aEncodedFrame->BufferType()) {
+ case GMP_BufferLength24: {
+#if MOZ_LITTLE_ENDIAN()
+ unitSize = (static_cast<uint32_t>(*unitBuffer)) |
+ (static_cast<uint32_t>(*(unitBuffer + 1)) << 8) |
+ (static_cast<uint32_t>(*(unitBuffer + 2)) << 16);
+#else
+ unitSize = (static_cast<uint32_t>(*unitBuffer) << 16) |
+ (static_cast<uint32_t>(*(unitBuffer + 1)) << 8) |
+ (static_cast<uint32_t>(*(unitBuffer + 2)));
+#endif
+ const uint8_t startSequence[] = {0, 0, 1};
+ if (memcmp(unitBuffer, startSequence, 3) == 0) {
+ // This is a bug in OpenH264 where it misses to convert the NALU start
+ // sequence to the NALU size per the GMP protocol. We mitigate this by
+ // letting it through as this is what libwebrtc already expects and
+ // scans for.
+ unitSize = aEncodedFrame->Size() - 3;
+ break;
+ }
+ memcpy(unitBuffer, startSequence, 3);
+ break;
+ }
+ case GMP_BufferLength32: {
+#if MOZ_LITTLE_ENDIAN()
+ unitSize = LittleEndian::readUint32(unitBuffer);
+#else
+ unitSize = BigEndian::readUint32(unitBuffer);
+#endif
+ const uint8_t startSequence[] = {0, 0, 0, 1};
+ if (memcmp(unitBuffer, startSequence, 4) == 0) {
+ // This is a bug in OpenH264 where it misses to convert the NALU start
+ // sequence to the NALU size per the GMP protocol. We mitigate this by
+ // letting it through as this is what libwebrtc already expects and
+ // scans for.
+ unitSize = aEncodedFrame->Size() - 4;
+ break;
+ }
+ memcpy(unitBuffer, startSequence, 4);
+ break;
+ }
+ default:
+ GMP_LOG_ERROR("GMP plugin returned type we cannot handle (%d)",
+ aEncodedFrame->BufferType());
+ return;
+ }
+
+ MOZ_ASSERT(unitSize != 0);
+ MOZ_ASSERT(unitOffset + sizeNumBytes + unitSize <= aEncodedFrame->Size());
+ if (unitSize == 0 ||
+ unitOffset + sizeNumBytes + unitSize > aEncodedFrame->Size()) {
+ // XXX Should we kill the plugin for returning extra bytes? Probably
+ GMP_LOG_ERROR(
+ "GMP plugin returned badly formatted encoded data: "
+ "unitOffset=%u, sizeNumBytes=%d, unitSize=%u, size=%u",
+ unitOffset, sizeNumBytes, unitSize, aEncodedFrame->Size());
+ return;
+ }
+
+ unitOffset += sizeNumBytes + unitSize;
+ }
+
+ if (unitOffset != aEncodedFrame->Size()) {
+ // At most 3 bytes can be left over, depending on buffertype
+ GMP_LOG_DEBUG("GMP plugin returned %u extra bytes",
+ aEncodedFrame->Size() - unitOffset);
+ }
+
+ webrtc::EncodedImage unit;
+ unit.SetEncodedData(webrtc::EncodedImageBuffer::Create(
+ aEncodedFrame->Buffer(), aEncodedFrame->Size()));
+ unit._frameType = ft;
+ unit.SetTimestamp(timestamp);
+ unit.capture_time_ms_ = capture_time.ms();
+ unit._encodedWidth = aEncodedFrame->EncodedWidth();
+ unit._encodedHeight = aEncodedFrame->EncodedHeight();
+
+ // Parse QP.
+ mH264BitstreamParser.ParseBitstream(unit);
+ unit.qp_ = mH264BitstreamParser.GetLastSliceQp().value_or(-1);
+
+ // TODO: Currently the OpenH264 codec does not preserve any codec
+ // specific info passed into it and just returns default values.
+ // If this changes in the future, it would be nice to get rid of
+ // mCodecSpecificInfo.
+ mCallback->OnEncodedImage(unit, &mCodecSpecificInfo);
+}
+
+// Decoder.
+WebrtcGmpVideoDecoder::WebrtcGmpVideoDecoder(std::string aPCHandle,
+ TrackingId aTrackingId)
+ : mGMP(nullptr),
+ mInitting(false),
+ mHost(nullptr),
+ mCallbackMutex("WebrtcGmpVideoDecoder decoded callback mutex"),
+ mCallback(nullptr),
+ mDecoderStatus(GMPNoErr),
+ mPCHandle(std::move(aPCHandle)),
+ mTrackingId(std::move(aTrackingId)) {
+ MOZ_ASSERT(!mPCHandle.empty());
+}
+
+WebrtcGmpVideoDecoder::~WebrtcGmpVideoDecoder() {
+ // We should not have been destroyed if we never closed our GMP
+ MOZ_ASSERT(!mGMP);
+}
+
+bool WebrtcGmpVideoDecoder::Configure(
+ const webrtc::VideoDecoder::Settings& settings) {
+ if (!mMPS) {
+ mMPS = do_GetService("@mozilla.org/gecko-media-plugin-service;1");
+ }
+ MOZ_ASSERT(mMPS);
+
+ if (!mGMPThread) {
+ if (NS_WARN_IF(NS_FAILED(mMPS->GetThread(getter_AddRefs(mGMPThread))))) {
+ return false;
+ }
+ }
+
+ RefPtr<GmpInitDoneRunnable> initDone(new GmpInitDoneRunnable(mPCHandle));
+ mGMPThread->Dispatch(
+ WrapRunnableNM(&WebrtcGmpVideoDecoder::Configure_g,
+ RefPtr<WebrtcGmpVideoDecoder>(this), settings, initDone),
+ NS_DISPATCH_NORMAL);
+
+ return true;
+}
+
+/* static */
+void WebrtcGmpVideoDecoder::Configure_g(
+ const RefPtr<WebrtcGmpVideoDecoder>& aThis,
+ const webrtc::VideoDecoder::Settings& settings, // unused
+ const RefPtr<GmpInitDoneRunnable>& aInitDone) {
+ nsTArray<nsCString> tags;
+ tags.AppendElement("h264"_ns);
+ UniquePtr<GetGMPVideoDecoderCallback> callback(
+ new InitDoneCallback(aThis, aInitDone));
+ aThis->mInitting = true;
+ nsresult rv = aThis->mMPS->GetGMPVideoDecoder(nullptr, &tags, ""_ns,
+ std::move(callback));
+ if (NS_WARN_IF(NS_FAILED(rv))) {
+ GMP_LOG_DEBUG("GMP Decode: GetGMPVideoDecoder failed");
+ aThis->Close_g();
+ aInitDone->Dispatch(WEBRTC_VIDEO_CODEC_ERROR,
+ "GMP Decode: GetGMPVideoDecoder failed.");
+ }
+}
+
+int32_t WebrtcGmpVideoDecoder::GmpInitDone(GMPVideoDecoderProxy* aGMP,
+ GMPVideoHost* aHost,
+ std::string* aErrorOut) {
+ if (!mInitting || !aGMP || !aHost) {
+ *aErrorOut =
+ "GMP Decode: Either init was aborted, "
+ "or init failed to supply either a GMP decoder or GMP host.";
+ if (aGMP) {
+ // This could destroy us, since aGMP may be the last thing holding a ref
+ // Return immediately.
+ aGMP->Close();
+ }
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ mInitting = false;
+
+ if (mGMP && mGMP != aGMP) {
+ Close_g();
+ }
+
+ mGMP = aGMP;
+ mHost = aHost;
+ mCachedPluginId = Some(mGMP->GetPluginId());
+ mInitPluginEvent.Notify(*mCachedPluginId);
+ // Bug XXXXXX: transfer settings from codecSettings to codec.
+ GMPVideoCodec codec;
+ memset(&codec, 0, sizeof(codec));
+ codec.mGMPApiVersion = 33;
+
+ // XXX this is currently a hack
+ // GMPVideoCodecUnion codecSpecific;
+ // memset(&codecSpecific, 0, sizeof(codecSpecific));
+ nsTArray<uint8_t> codecSpecific;
+ nsresult rv = mGMP->InitDecode(codec, codecSpecific, this, 1);
+ if (NS_FAILED(rv)) {
+ *aErrorOut = "GMP Decode: InitDecode failed";
+ mQueuedFrames.Clear();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // now release any frames that got queued waiting for InitDone
+ if (!mQueuedFrames.IsEmpty()) {
+ // So we're safe to call Decode_g(), which asserts it's empty
+ nsTArray<UniquePtr<GMPDecodeData>> temp = std::move(mQueuedFrames);
+ for (auto& queued : temp) {
+ Decode_g(RefPtr<WebrtcGmpVideoDecoder>(this), std::move(queued));
+ }
+ }
+
+ // This is an ugly solution to asynchronous decoding errors
+ // from Decode_g() not being returned to the synchronous Decode() method.
+ // If we don't return an error code at this point, our caller ultimately won't
+ // know to request a PLI and the video stream will remain frozen unless an IDR
+ // happens to arrive for other reasons. Bug 1492852 tracks implementing a
+ // proper solution.
+ if (mDecoderStatus != GMPNoErr) {
+ GMP_LOG_ERROR("%s: Decoder status is bad (%u)!", __PRETTY_FUNCTION__,
+ static_cast<unsigned>(mDecoderStatus));
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void WebrtcGmpVideoDecoder::Close_g() {
+ GMPVideoDecoderProxy* gmp(mGMP);
+ mGMP = nullptr;
+ mHost = nullptr;
+ mInitting = false;
+
+ if (mCachedPluginId) {
+ mReleasePluginEvent.Notify(*mCachedPluginId);
+ }
+ mCachedPluginId = Nothing();
+
+ if (gmp) {
+ // Do this last, since this could cause us to be destroyed
+ gmp->Close();
+ }
+}
+
+int32_t WebrtcGmpVideoDecoder::Decode(const webrtc::EncodedImage& aInputImage,
+ bool aMissingFrames,
+ int64_t aRenderTimeMs) {
+ MOZ_ASSERT(mGMPThread);
+ MOZ_ASSERT(!NS_IsMainThread());
+ if (!aInputImage.size()) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ MediaInfoFlag flag = MediaInfoFlag::None;
+ flag |= (aInputImage._frameType == webrtc::VideoFrameType::kVideoFrameKey
+ ? MediaInfoFlag::KeyFrame
+ : MediaInfoFlag::NonKeyFrame);
+ flag |= MediaInfoFlag::SoftwareDecoding;
+ flag |= MediaInfoFlag::VIDEO_H264;
+ mPerformanceRecorder.Start((aInputImage.Timestamp() * 1000ll) / 90,
+ "WebrtcGmpVideoDecoder"_ns, mTrackingId, flag);
+
+ // This is an ugly solution to asynchronous decoding errors
+ // from Decode_g() not being returned to the synchronous Decode() method.
+ // If we don't return an error code at this point, our caller ultimately won't
+ // know to request a PLI and the video stream will remain frozen unless an IDR
+ // happens to arrive for other reasons. Bug 1492852 tracks implementing a
+ // proper solution.
+ auto decodeData =
+ MakeUnique<GMPDecodeData>(aInputImage, aMissingFrames, aRenderTimeMs);
+
+ mGMPThread->Dispatch(WrapRunnableNM(&WebrtcGmpVideoDecoder::Decode_g,
+ RefPtr<WebrtcGmpVideoDecoder>(this),
+ std::move(decodeData)),
+ NS_DISPATCH_NORMAL);
+
+ if (mDecoderStatus != GMPNoErr) {
+ GMP_LOG_ERROR("%s: Decoder status is bad (%u)!", __PRETTY_FUNCTION__,
+ static_cast<unsigned>(mDecoderStatus));
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+/* static */
+void WebrtcGmpVideoDecoder::Decode_g(const RefPtr<WebrtcGmpVideoDecoder>& aThis,
+ UniquePtr<GMPDecodeData>&& aDecodeData) {
+ if (!aThis->mGMP) {
+ if (aThis->mInitting) {
+ // InitDone hasn't been called yet (race)
+ aThis->mQueuedFrames.AppendElement(std::move(aDecodeData));
+ return;
+ }
+ // destroyed via Terminate(), failed to init, or just not initted yet
+ GMP_LOG_DEBUG("GMP Decode: not initted yet");
+
+ aThis->mDecoderStatus = GMPDecodeErr;
+ return;
+ }
+
+ MOZ_ASSERT(aThis->mQueuedFrames.IsEmpty());
+ MOZ_ASSERT(aThis->mHost);
+
+ GMPVideoFrame* ftmp = nullptr;
+ GMPErr err = aThis->mHost->CreateFrame(kGMPEncodedVideoFrame, &ftmp);
+ if (err != GMPNoErr) {
+ GMP_LOG_ERROR("%s: CreateFrame failed (%u)!", __PRETTY_FUNCTION__,
+ static_cast<unsigned>(err));
+ aThis->mDecoderStatus = err;
+ return;
+ }
+
+ GMPUniquePtr<GMPVideoEncodedFrame> frame(
+ static_cast<GMPVideoEncodedFrame*>(ftmp));
+ err = frame->CreateEmptyFrame(aDecodeData->mImage.size());
+ if (err != GMPNoErr) {
+ GMP_LOG_ERROR("%s: CreateEmptyFrame failed (%u)!", __PRETTY_FUNCTION__,
+ static_cast<unsigned>(err));
+ aThis->mDecoderStatus = err;
+ return;
+ }
+
+ // XXX At this point, we only will get mode1 data (a single length and a
+ // buffer) Session_info.cc/etc code needs to change to support mode 0.
+ *(reinterpret_cast<uint32_t*>(frame->Buffer())) = frame->Size();
+
+ // XXX It'd be wonderful not to have to memcpy the encoded data!
+ memcpy(frame->Buffer() + 4, aDecodeData->mImage.data() + 4,
+ frame->Size() - 4);
+
+ frame->SetEncodedWidth(aDecodeData->mImage._encodedWidth);
+ frame->SetEncodedHeight(aDecodeData->mImage._encodedHeight);
+ frame->SetTimeStamp((aDecodeData->mImage.Timestamp() * 1000ll) /
+ 90); // rounds down
+ frame->SetCompleteFrame(
+ true); // upstream no longer deals with incomplete frames
+ frame->SetBufferType(GMP_BufferLength32);
+
+ GMPVideoFrameType ft;
+ int32_t ret =
+ WebrtcFrameTypeToGmpFrameType(aDecodeData->mImage._frameType, &ft);
+ if (ret != WEBRTC_VIDEO_CODEC_OK) {
+ GMP_LOG_ERROR("%s: WebrtcFrameTypeToGmpFrameType failed (%u)!",
+ __PRETTY_FUNCTION__, static_cast<unsigned>(ret));
+ aThis->mDecoderStatus = GMPDecodeErr;
+ return;
+ }
+
+ // Bug XXXXXX: Set codecSpecific info
+ GMPCodecSpecificInfo info;
+ memset(&info, 0, sizeof(info));
+ info.mCodecType = kGMPVideoCodecH264;
+ info.mCodecSpecific.mH264.mSimulcastIdx = 0;
+ nsTArray<uint8_t> codecSpecificInfo;
+ codecSpecificInfo.AppendElements((uint8_t*)&info,
+ sizeof(GMPCodecSpecificInfo));
+
+ GMP_LOG_DEBUG("GMP Decode: %" PRIu64 ", len %zu%s", frame->TimeStamp(),
+ aDecodeData->mImage.size(),
+ ft == kGMPKeyFrame ? ", KeyFrame" : "");
+
+ nsresult rv =
+ aThis->mGMP->Decode(std::move(frame), aDecodeData->mMissingFrames,
+ codecSpecificInfo, aDecodeData->mRenderTimeMs);
+ if (NS_FAILED(rv)) {
+ GMP_LOG_ERROR("%s: Decode failed (rv=%u)!", __PRETTY_FUNCTION__,
+ static_cast<unsigned>(rv));
+ aThis->mDecoderStatus = GMPDecodeErr;
+ return;
+ }
+
+ aThis->mDecoderStatus = GMPNoErr;
+}
+
+int32_t WebrtcGmpVideoDecoder::RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback* aCallback) {
+ MutexAutoLock lock(mCallbackMutex);
+ mCallback = aCallback;
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+/* static */
+void WebrtcGmpVideoDecoder::ReleaseGmp_g(
+ const RefPtr<WebrtcGmpVideoDecoder>& aDecoder) {
+ aDecoder->Close_g();
+}
+
+int32_t WebrtcGmpVideoDecoder::ReleaseGmp() {
+ GMP_LOG_DEBUG("GMP Released:");
+ RegisterDecodeCompleteCallback(nullptr);
+
+ if (mGMPThread) {
+ mGMPThread->Dispatch(WrapRunnableNM(&WebrtcGmpVideoDecoder::ReleaseGmp_g,
+ RefPtr<WebrtcGmpVideoDecoder>(this)),
+ NS_DISPATCH_NORMAL);
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void WebrtcGmpVideoDecoder::Terminated() {
+ GMP_LOG_DEBUG("GMP Decoder Terminated: %p", (void*)this);
+
+ mGMP->Close();
+ mGMP = nullptr;
+ mHost = nullptr;
+ mInitting = false;
+ // Could now notify that it's dead
+}
+
+void WebrtcGmpVideoDecoder::Decoded(GMPVideoi420Frame* aDecodedFrame) {
+ // we have two choices here: wrap the frame with a callback that frees
+ // the data later (risking running out of shmems), or copy the data out
+ // always. Also, we can only Destroy() the frame on the gmp thread, so
+ // copying is simplest if expensive.
+ // I420 size including rounding...
+ CheckedInt32 length =
+ (CheckedInt32(aDecodedFrame->Stride(kGMPYPlane)) *
+ aDecodedFrame->Height()) +
+ (aDecodedFrame->Stride(kGMPVPlane) + aDecodedFrame->Stride(kGMPUPlane)) *
+ ((aDecodedFrame->Height() + 1) / 2);
+ int32_t size = length.value();
+ MOZ_RELEASE_ASSERT(length.isValid() && size > 0);
+
+ // Don't use MakeUniqueFallible here, because UniquePtr isn't copyable, and
+ // the closure below in WrapI420Buffer uses std::function which _is_ copyable.
+ // We'll alloc the buffer here, so we preserve the "fallible" nature, and
+ // then hand a shared_ptr, which is copyable, to WrapI420Buffer.
+ auto falliblebuffer = new (std::nothrow) uint8_t[size];
+ if (falliblebuffer) {
+ auto buffer = std::shared_ptr<uint8_t>(falliblebuffer);
+
+ // This is 3 separate buffers currently anyways, no use in trying to
+ // see if we can use a single memcpy.
+ uint8_t* buffer_y = buffer.get();
+ memcpy(buffer_y, aDecodedFrame->Buffer(kGMPYPlane),
+ aDecodedFrame->Stride(kGMPYPlane) * aDecodedFrame->Height());
+ // Should this be aligned, making it non-contiguous? Assume no, this is
+ // already factored into the strides.
+ uint8_t* buffer_u =
+ buffer_y + aDecodedFrame->Stride(kGMPYPlane) * aDecodedFrame->Height();
+ memcpy(buffer_u, aDecodedFrame->Buffer(kGMPUPlane),
+ aDecodedFrame->Stride(kGMPUPlane) *
+ ((aDecodedFrame->Height() + 1) / 2));
+ uint8_t* buffer_v = buffer_u + aDecodedFrame->Stride(kGMPUPlane) *
+ ((aDecodedFrame->Height() + 1) / 2);
+ memcpy(buffer_v, aDecodedFrame->Buffer(kGMPVPlane),
+ aDecodedFrame->Stride(kGMPVPlane) *
+ ((aDecodedFrame->Height() + 1) / 2));
+
+ MutexAutoLock lock(mCallbackMutex);
+ if (mCallback) {
+ // Note: the last parameter to WrapI420Buffer is named no_longer_used,
+ // but is currently called in the destructor of WrappedYuvBuffer when
+ // the buffer is "no_longer_used".
+ rtc::scoped_refptr<webrtc::I420BufferInterface> video_frame_buffer =
+ webrtc::WrapI420Buffer(
+ aDecodedFrame->Width(), aDecodedFrame->Height(), buffer_y,
+ aDecodedFrame->Stride(kGMPYPlane), buffer_u,
+ aDecodedFrame->Stride(kGMPUPlane), buffer_v,
+ aDecodedFrame->Stride(kGMPVPlane), [buffer] {});
+
+ GMP_LOG_DEBUG("GMP Decoded: %" PRIu64, aDecodedFrame->Timestamp());
+ auto videoFrame =
+ webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(video_frame_buffer)
+ .set_timestamp_rtp(
+ // round up
+ (aDecodedFrame->Timestamp() * 90ll + 999) / 1000)
+ .build();
+ mPerformanceRecorder.Record(
+ static_cast<int64_t>(aDecodedFrame->Timestamp()),
+ [&](DecodeStage& aStage) {
+ aStage.SetImageFormat(DecodeStage::YUV420P);
+ aStage.SetResolution(aDecodedFrame->Width(),
+ aDecodedFrame->Height());
+ aStage.SetColorDepth(gfx::ColorDepth::COLOR_8);
+ });
+ mCallback->Decoded(videoFrame);
+ }
+ }
+ aDecodedFrame->Destroy();
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.h b/dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.h
new file mode 100644
index 0000000000..6ddc70dcbf
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.h
@@ -0,0 +1,505 @@
+/*
+ * Copyright (c) 2012, The WebRTC project authors. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ *
+ * * Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in
+ * the documentation and/or other materials provided with the
+ * distribution.
+ *
+ * * Neither the name of Google nor the names of its contributors may
+ * be used to endorse or promote products derived from this software
+ * without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTCGMPVIDEOCODEC_H_
+#define WEBRTCGMPVIDEOCODEC_H_
+
+#include <queue>
+#include <string>
+
+#include "nsThreadUtils.h"
+#include "mozilla/Monitor.h"
+#include "mozilla/Mutex.h"
+#include "mozilla/Telemetry.h"
+
+#include "mozIGeckoMediaPluginService.h"
+#include "MediaConduitInterface.h"
+#include "AudioConduit.h"
+#include "PerformanceRecorder.h"
+#include "VideoConduit.h"
+#include "api/video/video_frame_type.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+#include "common_video/h264/h264_bitstream_parser.h"
+
+#include "gmp-video-host.h"
+#include "GMPVideoDecoderProxy.h"
+#include "GMPVideoEncoderProxy.h"
+
+#include "jsapi/PeerConnectionImpl.h"
+
+namespace mozilla {
+
+class GmpInitDoneRunnable : public Runnable {
+ public:
+ explicit GmpInitDoneRunnable(std::string aPCHandle)
+ : Runnable("GmpInitDoneRunnable"),
+ mResult(WEBRTC_VIDEO_CODEC_OK),
+ mPCHandle(std::move(aPCHandle)) {}
+
+ NS_IMETHOD Run() override {
+ Telemetry::Accumulate(Telemetry::WEBRTC_GMP_INIT_SUCCESS,
+ mResult == WEBRTC_VIDEO_CODEC_OK);
+ if (mResult == WEBRTC_VIDEO_CODEC_OK) {
+ // Might be useful to notify the PeerConnection about successful init
+ // someday.
+ return NS_OK;
+ }
+
+ PeerConnectionWrapper wrapper(mPCHandle);
+ if (wrapper.impl()) {
+ wrapper.impl()->OnMediaError(mError);
+ }
+ return NS_OK;
+ }
+
+ void Dispatch(int32_t aResult, const std::string& aError = "") {
+ mResult = aResult;
+ mError = aError;
+ nsCOMPtr<nsIThread> mainThread(do_GetMainThread());
+ if (mainThread) {
+ // For some reason, the compiler on CI is treating |this| as a const
+ // pointer, despite the fact that we're in a non-const function. And,
+ // interestingly enough, correcting this doesn't require a const_cast.
+ mainThread->Dispatch(do_AddRef(static_cast<nsIRunnable*>(this)),
+ NS_DISPATCH_NORMAL);
+ }
+ }
+
+ int32_t Result() { return mResult; }
+
+ private:
+ int32_t mResult;
+ const std::string mPCHandle;
+ std::string mError;
+};
+
+// Hold a frame for later decode
+class GMPDecodeData {
+ public:
+ GMPDecodeData(const webrtc::EncodedImage& aInputImage, bool aMissingFrames,
+ int64_t aRenderTimeMs)
+ : mImage(aInputImage),
+ mMissingFrames(aMissingFrames),
+ mRenderTimeMs(aRenderTimeMs) {
+ // We want to use this for queuing, and the calling code recycles the
+ // buffer on return from Decode()
+ MOZ_RELEASE_ASSERT(aInputImage.size() <
+ (std::numeric_limits<size_t>::max() >> 1));
+ }
+
+ ~GMPDecodeData() = default;
+
+ const webrtc::EncodedImage mImage;
+ const bool mMissingFrames;
+ const int64_t mRenderTimeMs;
+};
+
+class RefCountedWebrtcVideoEncoder {
+ public:
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(RefCountedWebrtcVideoEncoder);
+
+ // Implement sort of WebrtcVideoEncoder interface and support refcounting.
+ // (We cannot use |Release|, since that's needed for nsRefPtr)
+ virtual int32_t InitEncode(
+ const webrtc::VideoCodec* aCodecSettings,
+ const webrtc::VideoEncoder::Settings& aSettings) = 0;
+
+ virtual int32_t Encode(
+ const webrtc::VideoFrame& aInputImage,
+ const std::vector<webrtc::VideoFrameType>* aFrameTypes) = 0;
+
+ virtual int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* aCallback) = 0;
+
+ virtual int32_t Shutdown() = 0;
+
+ virtual int32_t SetRates(
+ const webrtc::VideoEncoder::RateControlParameters& aParameters) = 0;
+
+ virtual MediaEventSource<uint64_t>* InitPluginEvent() = 0;
+
+ virtual MediaEventSource<uint64_t>* ReleasePluginEvent() = 0;
+
+ virtual WebrtcVideoEncoder::EncoderInfo GetEncoderInfo() const = 0;
+
+ protected:
+ virtual ~RefCountedWebrtcVideoEncoder() = default;
+};
+
+class WebrtcGmpVideoEncoder : public GMPVideoEncoderCallbackProxy,
+ public RefCountedWebrtcVideoEncoder {
+ public:
+ WebrtcGmpVideoEncoder(const webrtc::SdpVideoFormat& aFormat,
+ std::string aPCHandle);
+
+ // Implement VideoEncoder interface, sort of.
+ // (We cannot use |Release|, since that's needed for nsRefPtr)
+ int32_t InitEncode(const webrtc::VideoCodec* aCodecSettings,
+ const webrtc::VideoEncoder::Settings& aSettings) override;
+
+ int32_t Encode(
+ const webrtc::VideoFrame& aInputImage,
+ const std::vector<webrtc::VideoFrameType>* aFrameTypes) override;
+
+ int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* aCallback) override;
+
+ int32_t Shutdown() override;
+
+ int32_t SetRates(
+ const webrtc::VideoEncoder::RateControlParameters& aParameters) override;
+
+ WebrtcVideoEncoder::EncoderInfo GetEncoderInfo() const override;
+
+ MediaEventSource<uint64_t>* InitPluginEvent() override {
+ return &mInitPluginEvent;
+ }
+
+ MediaEventSource<uint64_t>* ReleasePluginEvent() override {
+ return &mReleasePluginEvent;
+ }
+
+ // GMPVideoEncoderCallback virtual functions.
+ virtual void Terminated() override;
+
+ virtual void Encoded(GMPVideoEncodedFrame* aEncodedFrame,
+ const nsTArray<uint8_t>& aCodecSpecificInfo) override;
+
+ virtual void Error(GMPErr aError) override {}
+
+ private:
+ virtual ~WebrtcGmpVideoEncoder();
+
+ static void InitEncode_g(const RefPtr<WebrtcGmpVideoEncoder>& aThis,
+ const GMPVideoCodec& aCodecParams,
+ int32_t aNumberOfCores, uint32_t aMaxPayloadSize,
+ const RefPtr<GmpInitDoneRunnable>& aInitDone);
+ int32_t GmpInitDone(GMPVideoEncoderProxy* aGMP, GMPVideoHost* aHost,
+ const GMPVideoCodec& aCodecParams,
+ std::string* aErrorOut);
+ int32_t GmpInitDone(GMPVideoEncoderProxy* aGMP, GMPVideoHost* aHost,
+ std::string* aErrorOut);
+ int32_t InitEncoderForSize(unsigned short aWidth, unsigned short aHeight,
+ std::string* aErrorOut);
+ static void ReleaseGmp_g(const RefPtr<WebrtcGmpVideoEncoder>& aEncoder);
+ void Close_g();
+
+ class InitDoneCallback : public GetGMPVideoEncoderCallback {
+ public:
+ InitDoneCallback(const RefPtr<WebrtcGmpVideoEncoder>& aEncoder,
+ const RefPtr<GmpInitDoneRunnable>& aInitDone,
+ const GMPVideoCodec& aCodecParams)
+ : mEncoder(aEncoder),
+ mInitDone(aInitDone),
+ mCodecParams(aCodecParams) {}
+
+ virtual void Done(GMPVideoEncoderProxy* aGMP,
+ GMPVideoHost* aHost) override {
+ std::string errorOut;
+ int32_t result =
+ mEncoder->GmpInitDone(aGMP, aHost, mCodecParams, &errorOut);
+
+ mInitDone->Dispatch(result, errorOut);
+ }
+
+ private:
+ const RefPtr<WebrtcGmpVideoEncoder> mEncoder;
+ const RefPtr<GmpInitDoneRunnable> mInitDone;
+ const GMPVideoCodec mCodecParams;
+ };
+
+ static void Encode_g(const RefPtr<WebrtcGmpVideoEncoder>& aEncoder,
+ webrtc::VideoFrame aInputImage,
+ std::vector<webrtc::VideoFrameType> aFrameTypes);
+ void RegetEncoderForResolutionChange(
+ uint32_t aWidth, uint32_t aHeight,
+ const RefPtr<GmpInitDoneRunnable>& aInitDone);
+
+ class InitDoneForResolutionChangeCallback
+ : public GetGMPVideoEncoderCallback {
+ public:
+ InitDoneForResolutionChangeCallback(
+ const RefPtr<WebrtcGmpVideoEncoder>& aEncoder,
+ const RefPtr<GmpInitDoneRunnable>& aInitDone, uint32_t aWidth,
+ uint32_t aHeight)
+ : mEncoder(aEncoder),
+ mInitDone(aInitDone),
+ mWidth(aWidth),
+ mHeight(aHeight) {}
+
+ virtual void Done(GMPVideoEncoderProxy* aGMP,
+ GMPVideoHost* aHost) override {
+ std::string errorOut;
+ int32_t result = mEncoder->GmpInitDone(aGMP, aHost, &errorOut);
+ if (result != WEBRTC_VIDEO_CODEC_OK) {
+ mInitDone->Dispatch(result, errorOut);
+ return;
+ }
+
+ result = mEncoder->InitEncoderForSize(mWidth, mHeight, &errorOut);
+ mInitDone->Dispatch(result, errorOut);
+ }
+
+ private:
+ const RefPtr<WebrtcGmpVideoEncoder> mEncoder;
+ const RefPtr<GmpInitDoneRunnable> mInitDone;
+ const uint32_t mWidth;
+ const uint32_t mHeight;
+ };
+
+ static int32_t SetRates_g(RefPtr<WebrtcGmpVideoEncoder> aThis,
+ uint32_t aNewBitRateKbps, Maybe<double> aFrameRate);
+
+ nsCOMPtr<mozIGeckoMediaPluginService> mMPS;
+ nsCOMPtr<nsIThread> mGMPThread;
+ GMPVideoEncoderProxy* mGMP;
+ // Used to handle a race where Release() is called while init is in progress
+ bool mInitting;
+ GMPVideoHost* mHost;
+ GMPVideoCodec mCodecParams;
+ uint32_t mMaxPayloadSize;
+ const webrtc::SdpVideoFormat::Parameters mFormatParams;
+ webrtc::CodecSpecificInfo mCodecSpecificInfo;
+ webrtc::H264BitstreamParser mH264BitstreamParser;
+ // Protects mCallback
+ Mutex mCallbackMutex MOZ_UNANNOTATED;
+ webrtc::EncodedImageCallback* mCallback;
+ Maybe<uint64_t> mCachedPluginId;
+ const std::string mPCHandle;
+
+ struct InputImageData {
+ int64_t timestamp_us;
+ };
+ // Map rtp time -> input image data
+ DataMutex<std::map<uint32_t, InputImageData>> mInputImageMap;
+
+ MediaEventProducer<uint64_t> mInitPluginEvent;
+ MediaEventProducer<uint64_t> mReleasePluginEvent;
+};
+
+// Basically a strong ref to a RefCountedWebrtcVideoEncoder, that also
+// translates from Release() to RefCountedWebrtcVideoEncoder::Shutdown(),
+// since we need RefCountedWebrtcVideoEncoder::Release() for managing the
+// refcount. The webrtc.org code gets one of these, so it doesn't unilaterally
+// delete the "real" encoder.
+class WebrtcVideoEncoderProxy : public WebrtcVideoEncoder {
+ public:
+ explicit WebrtcVideoEncoderProxy(
+ RefPtr<RefCountedWebrtcVideoEncoder> aEncoder)
+ : mEncoderImpl(std::move(aEncoder)) {}
+
+ virtual ~WebrtcVideoEncoderProxy() {
+ RegisterEncodeCompleteCallback(nullptr);
+ }
+
+ MediaEventSource<uint64_t>* InitPluginEvent() override {
+ return mEncoderImpl->InitPluginEvent();
+ }
+
+ MediaEventSource<uint64_t>* ReleasePluginEvent() override {
+ return mEncoderImpl->ReleasePluginEvent();
+ }
+
+ int32_t InitEncode(const webrtc::VideoCodec* aCodecSettings,
+ const WebrtcVideoEncoder::Settings& aSettings) override {
+ return mEncoderImpl->InitEncode(aCodecSettings, aSettings);
+ }
+
+ int32_t Encode(
+ const webrtc::VideoFrame& aInputImage,
+ const std::vector<webrtc::VideoFrameType>* aFrameTypes) override {
+ return mEncoderImpl->Encode(aInputImage, aFrameTypes);
+ }
+
+ int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* aCallback) override {
+ return mEncoderImpl->RegisterEncodeCompleteCallback(aCallback);
+ }
+
+ int32_t Release() override { return mEncoderImpl->Shutdown(); }
+
+ void SetRates(const RateControlParameters& aParameters) override {
+ mEncoderImpl->SetRates(aParameters);
+ }
+
+ EncoderInfo GetEncoderInfo() const override {
+ return mEncoderImpl->GetEncoderInfo();
+ }
+
+ private:
+ const RefPtr<RefCountedWebrtcVideoEncoder> mEncoderImpl;
+};
+
+class WebrtcGmpVideoDecoder : public GMPVideoDecoderCallbackProxy {
+ public:
+ WebrtcGmpVideoDecoder(std::string aPCHandle, TrackingId aTrackingId);
+ NS_INLINE_DECL_THREADSAFE_REFCOUNTING(WebrtcGmpVideoDecoder);
+
+ // Implement VideoEncoder interface, sort of.
+ // (We cannot use |Release|, since that's needed for nsRefPtr)
+ virtual bool Configure(const webrtc::VideoDecoder::Settings& settings);
+ virtual int32_t Decode(const webrtc::EncodedImage& aInputImage,
+ bool aMissingFrames, int64_t aRenderTimeMs);
+ virtual int32_t RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback* aCallback);
+
+ virtual int32_t ReleaseGmp();
+
+ MediaEventSource<uint64_t>* InitPluginEvent() { return &mInitPluginEvent; }
+
+ MediaEventSource<uint64_t>* ReleasePluginEvent() {
+ return &mReleasePluginEvent;
+ }
+
+ // GMPVideoDecoderCallbackProxy
+ virtual void Terminated() override;
+
+ virtual void Decoded(GMPVideoi420Frame* aDecodedFrame) override;
+
+ virtual void ReceivedDecodedReferenceFrame(
+ const uint64_t aPictureId) override {
+ MOZ_CRASH();
+ }
+
+ virtual void ReceivedDecodedFrame(const uint64_t aPictureId) override {
+ MOZ_CRASH();
+ }
+
+ virtual void InputDataExhausted() override {}
+
+ virtual void DrainComplete() override {}
+
+ virtual void ResetComplete() override {}
+
+ virtual void Error(GMPErr aError) override { mDecoderStatus = aError; }
+
+ private:
+ virtual ~WebrtcGmpVideoDecoder();
+
+ static void Configure_g(const RefPtr<WebrtcGmpVideoDecoder>& aThis,
+ const webrtc::VideoDecoder::Settings& settings,
+ const RefPtr<GmpInitDoneRunnable>& aInitDone);
+ int32_t GmpInitDone(GMPVideoDecoderProxy* aGMP, GMPVideoHost* aHost,
+ std::string* aErrorOut);
+ static void ReleaseGmp_g(const RefPtr<WebrtcGmpVideoDecoder>& aDecoder);
+ void Close_g();
+
+ class InitDoneCallback : public GetGMPVideoDecoderCallback {
+ public:
+ explicit InitDoneCallback(const RefPtr<WebrtcGmpVideoDecoder>& aDecoder,
+ const RefPtr<GmpInitDoneRunnable>& aInitDone)
+ : mDecoder(aDecoder), mInitDone(aInitDone) {}
+
+ virtual void Done(GMPVideoDecoderProxy* aGMP,
+ GMPVideoHost* aHost) override {
+ std::string errorOut;
+ int32_t result = mDecoder->GmpInitDone(aGMP, aHost, &errorOut);
+
+ mInitDone->Dispatch(result, errorOut);
+ }
+
+ private:
+ const RefPtr<WebrtcGmpVideoDecoder> mDecoder;
+ const RefPtr<GmpInitDoneRunnable> mInitDone;
+ };
+
+ static void Decode_g(const RefPtr<WebrtcGmpVideoDecoder>& aThis,
+ UniquePtr<GMPDecodeData>&& aDecodeData);
+
+ nsCOMPtr<mozIGeckoMediaPluginService> mMPS;
+ nsCOMPtr<nsIThread> mGMPThread;
+ GMPVideoDecoderProxy* mGMP; // Addref is held for us
+ // Used to handle a race where Release() is called while init is in progress
+ bool mInitting;
+ // Frames queued for decode while mInitting is true
+ nsTArray<UniquePtr<GMPDecodeData>> mQueuedFrames;
+ GMPVideoHost* mHost;
+ // Protects mCallback
+ Mutex mCallbackMutex MOZ_UNANNOTATED;
+ webrtc::DecodedImageCallback* mCallback;
+ Maybe<uint64_t> mCachedPluginId;
+ Atomic<GMPErr, ReleaseAcquire> mDecoderStatus;
+ const std::string mPCHandle;
+ const TrackingId mTrackingId;
+ PerformanceRecorderMulti<DecodeStage> mPerformanceRecorder;
+
+ MediaEventProducer<uint64_t> mInitPluginEvent;
+ MediaEventProducer<uint64_t> mReleasePluginEvent;
+};
+
+// Basically a strong ref to a WebrtcGmpVideoDecoder, that also translates
+// from Release() to WebrtcGmpVideoDecoder::ReleaseGmp(), since we need
+// WebrtcGmpVideoDecoder::Release() for managing the refcount.
+// The webrtc.org code gets one of these, so it doesn't unilaterally delete
+// the "real" encoder.
+class WebrtcVideoDecoderProxy : public WebrtcVideoDecoder {
+ public:
+ explicit WebrtcVideoDecoderProxy(std::string aPCHandle,
+ TrackingId aTrackingId)
+ : mDecoderImpl(new WebrtcGmpVideoDecoder(std::move(aPCHandle),
+ std::move(aTrackingId))) {}
+
+ virtual ~WebrtcVideoDecoderProxy() {
+ RegisterDecodeCompleteCallback(nullptr);
+ }
+
+ MediaEventSource<uint64_t>* InitPluginEvent() override {
+ return mDecoderImpl->InitPluginEvent();
+ }
+
+ MediaEventSource<uint64_t>* ReleasePluginEvent() override {
+ return mDecoderImpl->ReleasePluginEvent();
+ }
+
+ bool Configure(const Settings& settings) override {
+ return mDecoderImpl->Configure(settings);
+ }
+
+ int32_t Decode(const webrtc::EncodedImage& aInputImage, bool aMissingFrames,
+ int64_t aRenderTimeMs) override {
+ return mDecoderImpl->Decode(aInputImage, aMissingFrames, aRenderTimeMs);
+ }
+
+ int32_t RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback* aCallback) override {
+ return mDecoderImpl->RegisterDecodeCompleteCallback(aCallback);
+ }
+
+ int32_t Release() override { return mDecoderImpl->ReleaseGmp(); }
+
+ private:
+ const RefPtr<WebrtcGmpVideoDecoder> mDecoderImpl;
+};
+
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcImageBuffer.h b/dom/media/webrtc/libwebrtcglue/WebrtcImageBuffer.h
new file mode 100644
index 0000000000..305f4df577
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcImageBuffer.h
@@ -0,0 +1,53 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WebrtcImageBuffer_h__
+#define WebrtcImageBuffer_h__
+
+#include "common_video/include/video_frame_buffer.h"
+
+namespace mozilla {
+namespace layers {
+class Image;
+}
+
+class ImageBuffer : public webrtc::VideoFrameBuffer {
+ public:
+ explicit ImageBuffer(RefPtr<layers::Image>&& aImage)
+ : mImage(std::move(aImage)) {}
+
+ rtc::scoped_refptr<webrtc::I420BufferInterface> ToI420() override {
+ RefPtr<layers::PlanarYCbCrImage> image = mImage->AsPlanarYCbCrImage();
+ MOZ_ASSERT(image);
+ if (!image) {
+ // TODO. YUV420 ReadBack, Image only provides a RGB readback.
+ return nullptr;
+ }
+ const layers::PlanarYCbCrData* data = image->GetData();
+ rtc::scoped_refptr<webrtc::I420BufferInterface> buf =
+ webrtc::WrapI420Buffer(
+ data->mPictureRect.width, data->mPictureRect.height,
+ data->mYChannel, data->mYStride, data->mCbChannel,
+ data->mCbCrStride, data->mCrChannel, data->mCbCrStride,
+ [image] { /* keep reference alive*/ });
+ return buf;
+ }
+
+ Type type() const override { return Type::kNative; }
+
+ int width() const override { return mImage->GetSize().width; }
+
+ int height() const override { return mImage->GetSize().height; }
+
+ RefPtr<layers::Image> GetNativeImage() const { return mImage; }
+
+ private:
+ const RefPtr<layers::Image> mImage;
+};
+
+} // namespace mozilla
+
+#endif // WebrtcImageBuffer_h__
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.cpp b/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.cpp
new file mode 100644
index 0000000000..a14a4d4f75
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.cpp
@@ -0,0 +1,201 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WebrtcMediaDataDecoderCodec.h"
+
+#include "ImageContainer.h"
+#include "MediaDataDecoderProxy.h"
+#include "PDMFactory.h"
+#include "VideoUtils.h"
+#include "mozilla/layers/ImageBridgeChild.h"
+#include "mozilla/media/MediaUtils.h"
+
+namespace mozilla {
+
+WebrtcMediaDataDecoder::WebrtcMediaDataDecoder(nsACString& aCodecMimeType,
+ TrackingId aTrackingId)
+ : mThreadPool(GetMediaThreadPool(MediaThreadType::SUPERVISOR)),
+ mTaskQueue(TaskQueue::Create(do_AddRef(mThreadPool),
+ "WebrtcMediaDataDecoder::mTaskQueue")),
+ mImageContainer(MakeAndAddRef<layers::ImageContainer>(
+ layers::ImageContainer::ASYNCHRONOUS)),
+ mFactory(new PDMFactory()),
+ mTrackType(TrackInfo::kUndefinedTrack),
+ mCodecType(aCodecMimeType),
+ mTrackingId(std::move(aTrackingId)) {}
+
+WebrtcMediaDataDecoder::~WebrtcMediaDataDecoder() {}
+
+bool WebrtcMediaDataDecoder::Configure(
+ const webrtc::VideoDecoder::Settings& settings) {
+ nsCString codec;
+ mTrackType = TrackInfo::kVideoTrack;
+ mInfo = VideoInfo(settings.max_render_resolution().Width(),
+ settings.max_render_resolution().Height());
+ mInfo.mMimeType = mCodecType;
+
+ return WEBRTC_VIDEO_CODEC_OK == CreateDecoder();
+}
+
+int32_t WebrtcMediaDataDecoder::Decode(const webrtc::EncodedImage& aInputImage,
+ bool aMissingFrames,
+ int64_t aRenderTimeMs) {
+ if (!mCallback || !mDecoder) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+
+ if (!aInputImage.data() || !aInputImage.size()) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+
+ // Always start with a complete key frame.
+ if (mNeedKeyframe) {
+ if (aInputImage._frameType != webrtc::VideoFrameType::kVideoFrameKey)
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ // We have a key frame - is it complete?
+ mNeedKeyframe = false;
+ }
+
+ auto disabledHardwareAcceleration =
+ MakeScopeExit([&] { mDisabledHardwareAcceleration = true; });
+
+ RefPtr<MediaRawData> compressedFrame =
+ new MediaRawData(aInputImage.data(), aInputImage.size());
+ if (!compressedFrame->Data()) {
+ return WEBRTC_VIDEO_CODEC_MEMORY;
+ }
+
+ compressedFrame->mTime =
+ media::TimeUnit::FromMicroseconds(aInputImage.Timestamp());
+ compressedFrame->mTimecode =
+ media::TimeUnit::FromMicroseconds(aRenderTimeMs * 1000);
+ compressedFrame->mKeyframe =
+ aInputImage._frameType == webrtc::VideoFrameType::kVideoFrameKey;
+ {
+ media::Await(
+ do_AddRef(mThreadPool), mDecoder->Decode(compressedFrame),
+ [&](const MediaDataDecoder::DecodedData& aResults) {
+ mResults = aResults.Clone();
+ mError = NS_OK;
+ },
+ [&](const MediaResult& aError) { mError = aError; });
+
+ for (auto& frame : mResults) {
+ MOZ_ASSERT(frame->mType == MediaData::Type::VIDEO_DATA);
+ RefPtr<VideoData> video = frame->As<VideoData>();
+ MOZ_ASSERT(video);
+ if (!video->mImage) {
+ // Nothing to display.
+ continue;
+ }
+ rtc::scoped_refptr<ImageBuffer> image(
+ new rtc::RefCountedObject<ImageBuffer>(std::move(video->mImage)));
+
+ auto videoFrame = webrtc::VideoFrame::Builder()
+ .set_video_frame_buffer(image)
+ .set_timestamp_rtp(aInputImage.Timestamp())
+ .set_rotation(aInputImage.rotation_)
+ .build();
+ mCallback->Decoded(videoFrame);
+ }
+ mResults.Clear();
+ }
+
+ if (NS_FAILED(mError) && mError != NS_ERROR_DOM_MEDIA_CANCELED) {
+ CreateDecoder();
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ if (NS_FAILED(mError)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ disabledHardwareAcceleration.release();
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaDataDecoder::RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback* aCallback) {
+ mCallback = aCallback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaDataDecoder::Release() {
+ if (mDecoder) {
+ RefPtr<MediaDataDecoder> decoder = std::move(mDecoder);
+ decoder->Flush()->Then(mTaskQueue, __func__,
+ [decoder]() { decoder->Shutdown(); });
+ }
+
+ mNeedKeyframe = true;
+ mError = NS_OK;
+
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool WebrtcMediaDataDecoder::OnTaskQueue() const {
+ return mTaskQueue->IsOnCurrentThread();
+}
+
+int32_t WebrtcMediaDataDecoder::CreateDecoder() {
+ RefPtr<layers::KnowsCompositor> knowsCompositor =
+ layers::ImageBridgeChild::GetSingleton();
+
+ if (mDecoder) {
+ Release();
+ }
+
+ RefPtr<TaskQueue> tq =
+ TaskQueue::Create(GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER),
+ "webrtc decode TaskQueue");
+ RefPtr<MediaDataDecoder> decoder;
+
+ media::Await(do_AddRef(mThreadPool), InvokeAsync(tq, __func__, [&] {
+ RefPtr<GenericPromise> p =
+ mFactory
+ ->CreateDecoder(
+ {mInfo,
+ CreateDecoderParams::OptionSet(
+ CreateDecoderParams::Option::LowLatency,
+ CreateDecoderParams::Option::FullH264Parsing,
+ CreateDecoderParams::Option::
+ ErrorIfNoInitializationData,
+ mDisabledHardwareAcceleration
+ ? CreateDecoderParams::Option::
+ HardwareDecoderNotAllowed
+ : CreateDecoderParams::Option::Default),
+ mTrackType, mImageContainer, knowsCompositor,
+ Some(mTrackingId)})
+ ->Then(
+ tq, __func__,
+ [&](RefPtr<MediaDataDecoder>&& aDecoder) {
+ decoder = std::move(aDecoder);
+ return GenericPromise::CreateAndResolve(
+ true, __func__);
+ },
+ [](const MediaResult& aResult) {
+ return GenericPromise::CreateAndReject(
+ NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__);
+ });
+ return p;
+ }));
+
+ if (!decoder) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+
+ // We need to wrap our decoder in a MediaDataDecoderProxy so that it always
+ // run on an nsISerialEventTarget (which the webrtc code doesn't do)
+ mDecoder = new MediaDataDecoderProxy(decoder.forget(), tq.forget());
+
+ media::Await(
+ do_AddRef(mThreadPool), mDecoder->Init(),
+ [&](TrackInfo::TrackType) { mError = NS_OK; },
+ [&](const MediaResult& aError) { mError = aError; });
+
+ return NS_SUCCEEDED(mError) ? WEBRTC_VIDEO_CODEC_OK
+ : WEBRTC_VIDEO_CODEC_ERROR;
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.h b/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.h
new file mode 100644
index 0000000000..ccb54c692b
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.h
@@ -0,0 +1,70 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WebrtcMediaDataDecoderCodec_h__
+#define WebrtcMediaDataDecoderCodec_h__
+
+#include "MediaConduitInterface.h"
+#include "MediaInfo.h"
+#include "MediaResult.h"
+#include "PlatformDecoderModule.h"
+#include "VideoConduit.h"
+#include "WebrtcImageBuffer.h"
+#include "common_video/include/video_frame_buffer.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+
+namespace webrtc {
+class DecodedImageCallback;
+}
+namespace mozilla {
+namespace layers {
+class Image;
+class ImageContainer;
+} // namespace layers
+
+class PDMFactory;
+class SharedThreadPool;
+class TaskQueue;
+
+class WebrtcMediaDataDecoder : public WebrtcVideoDecoder {
+ public:
+ WebrtcMediaDataDecoder(nsACString& aCodecMimeType, TrackingId aTrackingId);
+
+ bool Configure(const webrtc::VideoDecoder::Settings& settings) override;
+
+ int32_t Decode(const webrtc::EncodedImage& inputImage, bool missingFrames,
+ int64_t renderTimeMs = -1) override;
+
+ int32_t RegisterDecodeCompleteCallback(
+ webrtc::DecodedImageCallback* callback) override;
+
+ int32_t Release() override;
+
+ private:
+ ~WebrtcMediaDataDecoder();
+ void QueueFrame(MediaRawData* aFrame);
+ bool OnTaskQueue() const;
+ int32_t CreateDecoder();
+
+ const RefPtr<SharedThreadPool> mThreadPool;
+ const RefPtr<TaskQueue> mTaskQueue;
+ const RefPtr<layers::ImageContainer> mImageContainer;
+ const RefPtr<PDMFactory> mFactory;
+ RefPtr<MediaDataDecoder> mDecoder;
+ webrtc::DecodedImageCallback* mCallback = nullptr;
+ VideoInfo mInfo;
+ TrackInfo::TrackType mTrackType;
+ bool mNeedKeyframe = true;
+ MozPromiseRequestHolder<MediaDataDecoder::DecodePromise> mDecodeRequest;
+
+ MediaResult mError = NS_OK;
+ MediaDataDecoder::DecodedData mResults;
+ const nsCString mCodecType;
+ bool mDisabledHardwareAcceleration = false;
+ const TrackingId mTrackingId;
+};
+
+} // namespace mozilla
+
+#endif // WebrtcMediaDataDecoderCodec_h__
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.cpp b/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.cpp
new file mode 100644
index 0000000000..42bb2bbd15
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.cpp
@@ -0,0 +1,499 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WebrtcMediaDataEncoderCodec.h"
+
+#include "AnnexB.h"
+#include "ImageContainer.h"
+#include "MediaData.h"
+#include "PEMFactory.h"
+#include "VideoUtils.h"
+#include "mozilla/Maybe.h"
+#include "mozilla/Span.h"
+#include "mozilla/gfx/Point.h"
+#include "mozilla/media/MediaUtils.h"
+#include "api/video_codecs/h264_profile_level_id.h"
+#include "media/base/media_constants.h"
+#include "system_wrappers/include/clock.h"
+#include "modules/video_coding/utility/vp8_header_parser.h"
+#include "modules/video_coding/utility/vp9_uncompressed_header_parser.h"
+
+namespace mozilla {
+
+extern LazyLogModule sPEMLog;
+
+#undef LOG
+#define LOG(msg, ...) \
+ MOZ_LOG(sPEMLog, LogLevel::Debug, \
+ ("WebrtcMediaDataEncoder=%p, " msg, this, ##__VA_ARGS__))
+
+#undef LOG_V
+#define LOG_V(msg, ...) \
+ MOZ_LOG(sPEMLog, LogLevel::Verbose, \
+ ("WebrtcMediaDataEncoder=%p, " msg, this, ##__VA_ARGS__))
+
+using namespace media;
+using namespace layers;
+using MimeTypeResult = Maybe<nsLiteralCString>;
+
+static MimeTypeResult ConvertWebrtcCodecTypeToMimeType(
+ const webrtc::VideoCodecType& aType) {
+ switch (aType) {
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ return Some("video/vp8"_ns);
+ case webrtc::VideoCodecType::kVideoCodecVP9:
+ return Some("video/vp9"_ns);
+ case webrtc::VideoCodecType::kVideoCodecH264:
+ return Some("video/avc"_ns);
+ default:
+ MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unsupported codec type");
+ }
+ return Nothing();
+}
+
+bool WebrtcMediaDataEncoder::CanCreate(
+ const webrtc::VideoCodecType aCodecType) {
+ auto factory = MakeRefPtr<PEMFactory>();
+ MimeTypeResult mimeType = ConvertWebrtcCodecTypeToMimeType(aCodecType);
+ return mimeType ? factory->SupportsMimeType(mimeType.ref()) : false;
+}
+
+static const char* PacketModeStr(const webrtc::CodecSpecificInfo& aInfo) {
+ MOZ_ASSERT(aInfo.codecType != webrtc::VideoCodecType::kVideoCodecGeneric);
+
+ if (aInfo.codecType != webrtc::VideoCodecType::kVideoCodecH264) {
+ return "N/A";
+ }
+ switch (aInfo.codecSpecific.H264.packetization_mode) {
+ case webrtc::H264PacketizationMode::SingleNalUnit:
+ return "SingleNalUnit";
+ case webrtc::H264PacketizationMode::NonInterleaved:
+ return "NonInterleaved";
+ default:
+ return "Unknown";
+ }
+}
+
+static MediaDataEncoder::H264Specific::ProfileLevel ConvertProfileLevel(
+ const webrtc::SdpVideoFormat::Parameters& aParameters) {
+ const absl::optional<webrtc::H264ProfileLevelId> profileLevel =
+ webrtc::ParseSdpForH264ProfileLevelId(aParameters);
+ if (profileLevel &&
+ (profileLevel->profile == webrtc::H264Profile::kProfileBaseline ||
+ profileLevel->profile ==
+ webrtc::H264Profile::kProfileConstrainedBaseline)) {
+ return MediaDataEncoder::H264Specific::ProfileLevel::BaselineAutoLevel;
+ }
+ return MediaDataEncoder::H264Specific::ProfileLevel::MainAutoLevel;
+}
+
+static MediaDataEncoder::VPXSpecific::Complexity MapComplexity(
+ webrtc::VideoCodecComplexity aComplexity) {
+ switch (aComplexity) {
+ case webrtc::VideoCodecComplexity::kComplexityNormal:
+ return MediaDataEncoder::VPXSpecific::Complexity::Normal;
+ case webrtc::VideoCodecComplexity::kComplexityHigh:
+ return MediaDataEncoder::VPXSpecific::Complexity::High;
+ case webrtc::VideoCodecComplexity::kComplexityHigher:
+ return MediaDataEncoder::VPXSpecific::Complexity::Higher;
+ case webrtc::VideoCodecComplexity::kComplexityMax:
+ return MediaDataEncoder::VPXSpecific::Complexity::Max;
+ default:
+ MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Bad complexity value");
+ }
+}
+
+WebrtcMediaDataEncoder::WebrtcMediaDataEncoder(
+ const webrtc::SdpVideoFormat& aFormat)
+ : mTaskQueue(
+ TaskQueue::Create(GetMediaThreadPool(MediaThreadType::SUPERVISOR),
+ "WebrtcMediaDataEncoder::mTaskQueue")),
+ mFactory(new PEMFactory()),
+ mCallbackMutex("WebrtcMediaDataEncoderCodec encoded callback mutex"),
+ mFormatParams(aFormat.parameters),
+ // Use the same lower and upper bound as h264_video_toolbox_encoder which
+ // is an encoder from webrtc's upstream codebase.
+ // 0.5 is set as a mininum to prevent overcompensating for large temporary
+ // overshoots. We don't want to degrade video quality too badly.
+ // 0.95 is set to prevent oscillations. When a lower bitrate is set on the
+ // encoder than previously set, its output seems to have a brief period of
+ // drastically reduced bitrate, so we want to avoid that. In steady state
+ // conditions, 0.95 seems to give us better overall bitrate over long
+ // periods of time.
+ mBitrateAdjuster(0.5, 0.95) {
+ PodZero(&mCodecSpecific.codecSpecific);
+}
+
+static void InitCodecSpecficInfo(
+ webrtc::CodecSpecificInfo& aInfo, const webrtc::VideoCodec* aCodecSettings,
+ const webrtc::SdpVideoFormat::Parameters& aParameters) {
+ MOZ_ASSERT(aCodecSettings);
+
+ aInfo.codecType = aCodecSettings->codecType;
+ switch (aCodecSettings->codecType) {
+ case webrtc::VideoCodecType::kVideoCodecH264: {
+ aInfo.codecSpecific.H264.packetization_mode =
+ aParameters.count(cricket::kH264FmtpPacketizationMode) == 1 &&
+ aParameters.at(cricket::kH264FmtpPacketizationMode) == "1"
+ ? webrtc::H264PacketizationMode::NonInterleaved
+ : webrtc::H264PacketizationMode::SingleNalUnit;
+ break;
+ }
+ case webrtc::VideoCodecType::kVideoCodecVP9: {
+ MOZ_ASSERT(aCodecSettings->VP9().numberOfSpatialLayers == 1);
+ aInfo.codecSpecific.VP9.flexible_mode =
+ aCodecSettings->VP9().flexibleMode;
+ aInfo.codecSpecific.VP9.first_frame_in_picture = true;
+ break;
+ }
+ default:
+ break;
+ }
+}
+
+int32_t WebrtcMediaDataEncoder::InitEncode(
+ const webrtc::VideoCodec* aCodecSettings,
+ const webrtc::VideoEncoder::Settings& aSettings) {
+ MOZ_ASSERT(aCodecSettings);
+
+ if (aCodecSettings->numberOfSimulcastStreams > 1) {
+ LOG("Only one stream is supported. Falling back to simulcast adaptor");
+ return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED;
+ }
+
+ if (mEncoder) {
+ // Clean existing encoder.
+ Shutdown();
+ }
+
+ RefPtr<MediaDataEncoder> encoder = CreateEncoder(aCodecSettings);
+ if (!encoder) {
+ LOG("Fail to create encoder. Falling back to SW");
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+
+ InitCodecSpecficInfo(mCodecSpecific, aCodecSettings, mFormatParams);
+ LOG("Init encode, mimeType %s, mode %s", mInfo.mMimeType.get(),
+ PacketModeStr(mCodecSpecific));
+ if (!media::Await(do_AddRef(mTaskQueue), encoder->Init()).IsResolve()) {
+ LOG("Fail to init encoder. Falling back to SW");
+ return WEBRTC_VIDEO_CODEC_FALLBACK_SOFTWARE;
+ }
+ mEncoder = std::move(encoder);
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+bool WebrtcMediaDataEncoder::SetupConfig(
+ const webrtc::VideoCodec* aCodecSettings) {
+ MimeTypeResult mimeType =
+ ConvertWebrtcCodecTypeToMimeType(aCodecSettings->codecType);
+ if (!mimeType) {
+ LOG("Get incorrect mime type");
+ return false;
+ }
+ mInfo = VideoInfo(aCodecSettings->width, aCodecSettings->height);
+ mInfo.mMimeType = mimeType.extract();
+ mMaxFrameRate = aCodecSettings->maxFramerate;
+ // Those bitrates in codec setting are all kbps, so we have to covert them to
+ // bps.
+ mMaxBitrateBps = aCodecSettings->maxBitrate * 1000;
+ mMinBitrateBps = aCodecSettings->minBitrate * 1000;
+ mBitrateAdjuster.SetTargetBitrateBps(aCodecSettings->startBitrate * 1000);
+ return true;
+}
+
+already_AddRefed<MediaDataEncoder> WebrtcMediaDataEncoder::CreateEncoder(
+ const webrtc::VideoCodec* aCodecSettings) {
+ if (!SetupConfig(aCodecSettings)) {
+ return nullptr;
+ }
+ LOG("Request platform encoder for %s, bitRate=%u bps, frameRate=%u",
+ mInfo.mMimeType.get(), mBitrateAdjuster.GetTargetBitrateBps(),
+ aCodecSettings->maxFramerate);
+
+ size_t keyframeInterval = 1;
+ switch (aCodecSettings->codecType) {
+ case webrtc::VideoCodecType::kVideoCodecH264: {
+ keyframeInterval = aCodecSettings->H264().keyFrameInterval;
+ break;
+ }
+ case webrtc::VideoCodecType::kVideoCodecVP8: {
+ keyframeInterval = aCodecSettings->VP8().keyFrameInterval;
+ break;
+ }
+ case webrtc::VideoCodecType::kVideoCodecVP9: {
+ keyframeInterval = aCodecSettings->VP9().keyFrameInterval;
+ break;
+ }
+ default:
+ MOZ_ASSERT_UNREACHABLE("Unsupported codec type");
+ return nullptr;
+ }
+ CreateEncoderParams params(
+ mInfo, MediaDataEncoder::Usage::Realtime,
+ TaskQueue::Create(GetMediaThreadPool(MediaThreadType::PLATFORM_ENCODER),
+ "WebrtcMediaDataEncoder::mEncoder"),
+ MediaDataEncoder::PixelFormat::YUV420P, aCodecSettings->maxFramerate,
+ keyframeInterval, mBitrateAdjuster.GetTargetBitrateBps());
+ switch (aCodecSettings->codecType) {
+ case webrtc::VideoCodecType::kVideoCodecH264: {
+ params.SetCodecSpecific(
+ MediaDataEncoder::H264Specific(ConvertProfileLevel(mFormatParams)));
+ break;
+ }
+ case webrtc::VideoCodecType::kVideoCodecVP8: {
+ const webrtc::VideoCodecVP8& vp8 = aCodecSettings->VP8();
+ const webrtc::VideoCodecComplexity complexity =
+ aCodecSettings->GetVideoEncoderComplexity();
+ const bool frameDropEnabled = aCodecSettings->GetFrameDropEnabled();
+ params.SetCodecSpecific(MediaDataEncoder::VPXSpecific::VP8(
+ MapComplexity(complexity), false, vp8.numberOfTemporalLayers,
+ vp8.denoisingOn, vp8.automaticResizeOn, frameDropEnabled));
+ break;
+ }
+ case webrtc::VideoCodecType::kVideoCodecVP9: {
+ const webrtc::VideoCodecVP9& vp9 = aCodecSettings->VP9();
+ const webrtc::VideoCodecComplexity complexity =
+ aCodecSettings->GetVideoEncoderComplexity();
+ const bool frameDropEnabled = aCodecSettings->GetFrameDropEnabled();
+ params.SetCodecSpecific(MediaDataEncoder::VPXSpecific::VP9(
+ MapComplexity(complexity), false, vp9.numberOfTemporalLayers,
+ vp9.denoisingOn, vp9.automaticResizeOn, frameDropEnabled,
+ vp9.adaptiveQpMode, vp9.numberOfSpatialLayers, vp9.flexibleMode));
+ break;
+ }
+ default:
+ MOZ_MAKE_COMPILER_ASSUME_IS_UNREACHABLE("Unsupported codec type");
+ }
+ return mFactory->CreateEncoder(params);
+}
+
+WebrtcVideoEncoder::EncoderInfo WebrtcMediaDataEncoder::GetEncoderInfo() const {
+ WebrtcVideoEncoder::EncoderInfo info;
+ info.supports_native_handle = false;
+ info.implementation_name = "MediaDataEncoder";
+ info.is_hardware_accelerated = false;
+ info.supports_simulcast = false;
+
+#ifdef MOZ_WIDGET_ANDROID
+ // Assume MediaDataEncoder is used mainly for hardware encoding. 16-alignment
+ // seems required on Android. This could be improved by querying the
+ // underlying encoder.
+ info.requested_resolution_alignment = 16;
+ info.apply_alignment_to_all_simulcast_layers = true;
+#endif
+ return info;
+}
+
+int32_t WebrtcMediaDataEncoder::RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* aCallback) {
+ MutexAutoLock lock(mCallbackMutex);
+ mCallback = aCallback;
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaDataEncoder::Shutdown() {
+ LOG("Release encoder");
+ {
+ MutexAutoLock lock(mCallbackMutex);
+ mCallback = nullptr;
+ mError = NS_OK;
+ }
+ if (mEncoder) {
+ media::Await(do_AddRef(mTaskQueue), mEncoder->Shutdown());
+ mEncoder = nullptr;
+ }
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+static already_AddRefed<VideoData> CreateVideoDataFromWebrtcVideoFrame(
+ const webrtc::VideoFrame& aFrame, const bool aIsKeyFrame,
+ const TimeUnit aDuration) {
+ MOZ_ASSERT(aFrame.video_frame_buffer()->type() ==
+ webrtc::VideoFrameBuffer::Type::kI420,
+ "Only support YUV420!");
+ const webrtc::I420BufferInterface* i420 =
+ aFrame.video_frame_buffer()->GetI420();
+
+ PlanarYCbCrData yCbCrData;
+ yCbCrData.mYChannel = const_cast<uint8_t*>(i420->DataY());
+ yCbCrData.mYStride = i420->StrideY();
+ yCbCrData.mCbChannel = const_cast<uint8_t*>(i420->DataU());
+ yCbCrData.mCrChannel = const_cast<uint8_t*>(i420->DataV());
+ MOZ_ASSERT(i420->StrideU() == i420->StrideV());
+ yCbCrData.mCbCrStride = i420->StrideU();
+ yCbCrData.mPictureRect = gfx::IntRect(0, 0, i420->width(), i420->height());
+ yCbCrData.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
+
+ RefPtr<PlanarYCbCrImage> image =
+ new RecyclingPlanarYCbCrImage(new BufferRecycleBin());
+ image->CopyData(yCbCrData);
+
+ // Although webrtc::VideoFrame::timestamp_rtp_ will likely be deprecated,
+ // webrtc::EncodedImage and the VPx encoders still use it in the imported
+ // version of libwebrtc. Not using the same timestamp values generates
+ // discontinuous time and confuses the video receiver when switching from
+ // platform to libwebrtc encoder.
+ TimeUnit timestamp =
+ FramesToTimeUnit(aFrame.timestamp(), cricket::kVideoCodecClockrate);
+ return VideoData::CreateFromImage(image->GetSize(), 0, timestamp, aDuration,
+ image, aIsKeyFrame, timestamp);
+}
+
+static void UpdateCodecSpecificInfo(webrtc::CodecSpecificInfo& aInfo,
+ const gfx::IntSize& aSize,
+ const bool aIsKeyframe) {
+ switch (aInfo.codecType) {
+ case webrtc::VideoCodecType::kVideoCodecVP8: {
+ // See webrtc::VP8EncoderImpl::PopulateCodecSpecific().
+ webrtc::CodecSpecificInfoVP8& vp8 = aInfo.codecSpecific.VP8;
+ vp8.keyIdx = webrtc::kNoKeyIdx;
+ // Cannot be 100% sure unless parsing significant portion of the
+ // bitstream. Treat all frames as referenced just to be safe.
+ vp8.nonReference = false;
+ // One temporal layer only.
+ vp8.temporalIdx = webrtc::kNoTemporalIdx;
+ vp8.layerSync = false;
+ break;
+ }
+ case webrtc::VideoCodecType::kVideoCodecVP9: {
+ // See webrtc::VP9EncoderImpl::PopulateCodecSpecific().
+ webrtc::CodecSpecificInfoVP9& vp9 = aInfo.codecSpecific.VP9;
+ vp9.inter_pic_predicted = !aIsKeyframe;
+ vp9.ss_data_available = aIsKeyframe && !vp9.flexible_mode;
+ // One temporal & spatial layer only.
+ vp9.temporal_idx = webrtc::kNoTemporalIdx;
+ vp9.temporal_up_switch = false;
+ vp9.num_spatial_layers = 1;
+ vp9.end_of_picture = true;
+ vp9.gof_idx = webrtc::kNoGofIdx;
+ vp9.width[0] = aSize.width;
+ vp9.height[0] = aSize.height;
+ break;
+ }
+ default:
+ break;
+ }
+}
+
+static void GetVPXQp(const webrtc::VideoCodecType aType,
+ webrtc::EncodedImage& aImage) {
+ switch (aType) {
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ webrtc::vp8::GetQp(aImage.data(), aImage.size(), &(aImage.qp_));
+ break;
+ case webrtc::VideoCodecType::kVideoCodecVP9:
+ webrtc::vp9::GetQp(aImage.data(), aImage.size(), &(aImage.qp_));
+ break;
+ default:
+ break;
+ }
+}
+
+int32_t WebrtcMediaDataEncoder::Encode(
+ const webrtc::VideoFrame& aInputFrame,
+ const std::vector<webrtc::VideoFrameType>* aFrameTypes) {
+ if (!aInputFrame.size() || !aInputFrame.video_frame_buffer() ||
+ aFrameTypes->empty()) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+
+ if (!mEncoder) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ {
+ MutexAutoLock lock(mCallbackMutex);
+ if (!mCallback) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ if (NS_FAILED(mError)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+
+ LOG_V("Encode frame, type %d size %u", static_cast<int>((*aFrameTypes)[0]),
+ aInputFrame.size());
+ MOZ_ASSERT(aInputFrame.video_frame_buffer()->type() ==
+ webrtc::VideoFrameBuffer::Type::kI420);
+ RefPtr<VideoData> data = CreateVideoDataFromWebrtcVideoFrame(
+ aInputFrame, (*aFrameTypes)[0] == webrtc::VideoFrameType::kVideoFrameKey,
+ TimeUnit::FromSeconds(1.0 / mMaxFrameRate));
+ const gfx::IntSize displaySize = data->mDisplay;
+
+ mEncoder->Encode(data)->Then(
+ mTaskQueue, __func__,
+ [self = RefPtr<WebrtcMediaDataEncoder>(this), this,
+ displaySize](MediaDataEncoder::EncodedData aFrames) {
+ LOG_V("Received encoded frame, nums %zu width %d height %d",
+ aFrames.Length(), displaySize.width, displaySize.height);
+ for (auto& frame : aFrames) {
+ MutexAutoLock lock(mCallbackMutex);
+ if (!mCallback) {
+ break;
+ }
+ webrtc::EncodedImage image;
+ image.SetEncodedData(
+ webrtc::EncodedImageBuffer::Create(frame->Data(), frame->Size()));
+ image._encodedWidth = displaySize.width;
+ image._encodedHeight = displaySize.height;
+ CheckedInt64 time =
+ TimeUnitToFrames(frame->mTime, cricket::kVideoCodecClockrate);
+ if (!time.isValid()) {
+ self->mError = MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
+ "invalid timestamp from encoder");
+ break;
+ }
+ image.SetTimestamp(time.value());
+ image._frameType = frame->mKeyframe
+ ? webrtc::VideoFrameType::kVideoFrameKey
+ : webrtc::VideoFrameType::kVideoFrameDelta;
+ GetVPXQp(mCodecSpecific.codecType, image);
+ UpdateCodecSpecificInfo(mCodecSpecific, displaySize,
+ frame->mKeyframe);
+
+ LOG_V("Send encoded image");
+ self->mCallback->OnEncodedImage(image, &mCodecSpecific);
+ self->mBitrateAdjuster.Update(image.size());
+ }
+ },
+ [self = RefPtr<WebrtcMediaDataEncoder>(this)](const MediaResult aError) {
+ self->mError = aError;
+ });
+ return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int32_t WebrtcMediaDataEncoder::SetRates(
+ const webrtc::VideoEncoder::RateControlParameters& aParameters) {
+ MOZ_ASSERT(aParameters.bitrate.IsSpatialLayerUsed(0));
+ MOZ_ASSERT(!aParameters.bitrate.IsSpatialLayerUsed(1),
+ "No simulcast support for platform encoder");
+
+ const uint32_t newBitrateBps = aParameters.bitrate.GetBitrate(0, 0);
+ if (newBitrateBps < mMinBitrateBps || newBitrateBps > mMaxBitrateBps) {
+ return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+ }
+
+ // We have already been in this bitrate.
+ if (mBitrateAdjuster.GetAdjustedBitrateBps() == newBitrateBps) {
+ return WEBRTC_VIDEO_CODEC_OK;
+ }
+
+ if (!mEncoder) {
+ return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+ }
+ {
+ MutexAutoLock lock(mCallbackMutex);
+ if (NS_FAILED(mError)) {
+ return WEBRTC_VIDEO_CODEC_ERROR;
+ }
+ }
+ mBitrateAdjuster.SetTargetBitrateBps(newBitrateBps);
+ LOG("Set bitrate %u bps, minBitrate %u bps, maxBitrate %u bps", newBitrateBps,
+ mMinBitrateBps, mMaxBitrateBps);
+ auto rv =
+ media::Await(do_AddRef(mTaskQueue), mEncoder->SetBitrate(newBitrateBps));
+ return rv.IsResolve() ? WEBRTC_VIDEO_CODEC_OK : WEBRTC_VIDEO_CODEC_ERROR;
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.h b/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.h
new file mode 100644
index 0000000000..a5888c10e7
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.h
@@ -0,0 +1,76 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef WebrtcMediaDataEncoderCodec_h__
+#define WebrtcMediaDataEncoderCodec_h__
+
+#include "MediaConduitInterface.h"
+#include "MediaInfo.h"
+#include "MediaResult.h"
+#include "PlatformEncoderModule.h"
+#include "WebrtcGmpVideoCodec.h"
+#include "common_video/include/bitrate_adjuster.h"
+#include "modules/video_coding/include/video_codec_interface.h"
+
+namespace mozilla {
+
+class MediaData;
+class PEMFactory;
+class SharedThreadPool;
+class TaskQueue;
+
+class WebrtcMediaDataEncoder : public RefCountedWebrtcVideoEncoder {
+ public:
+ static bool CanCreate(const webrtc::VideoCodecType aCodecType);
+
+ explicit WebrtcMediaDataEncoder(const webrtc::SdpVideoFormat& aFormat);
+
+ int32_t InitEncode(const webrtc::VideoCodec* aCodecSettings,
+ const webrtc::VideoEncoder::Settings& aSettings) override;
+
+ int32_t RegisterEncodeCompleteCallback(
+ webrtc::EncodedImageCallback* aCallback) override;
+
+ int32_t Shutdown() override;
+
+ int32_t Encode(
+ const webrtc::VideoFrame& aFrame,
+ const std::vector<webrtc::VideoFrameType>* aFrameTypes) override;
+
+ int32_t SetRates(
+ const webrtc::VideoEncoder::RateControlParameters& aParameters) override;
+
+ WebrtcVideoEncoder::EncoderInfo GetEncoderInfo() const override;
+ MediaEventSource<uint64_t>* InitPluginEvent() override { return nullptr; }
+
+ MediaEventSource<uint64_t>* ReleasePluginEvent() override { return nullptr; }
+
+ private:
+ virtual ~WebrtcMediaDataEncoder() = default;
+
+ bool SetupConfig(const webrtc::VideoCodec* aCodecSettings);
+ already_AddRefed<MediaDataEncoder> CreateEncoder(
+ const webrtc::VideoCodec* aCodecSettings);
+ bool InitEncoder();
+
+ const RefPtr<TaskQueue> mTaskQueue;
+ const RefPtr<PEMFactory> mFactory;
+ RefPtr<MediaDataEncoder> mEncoder;
+
+ Mutex mCallbackMutex MOZ_UNANNOTATED; // Protects mCallback and mError.
+ webrtc::EncodedImageCallback* mCallback = nullptr;
+ MediaResult mError = NS_OK;
+
+ VideoInfo mInfo;
+ webrtc::SdpVideoFormat::Parameters mFormatParams;
+ webrtc::CodecSpecificInfo mCodecSpecific;
+ webrtc::BitrateAdjuster mBitrateAdjuster;
+ uint32_t mMaxFrameRate;
+ uint32_t mMinBitrateBps;
+ uint32_t mMaxBitrateBps;
+};
+
+} // namespace mozilla
+
+#endif // WebrtcMediaDataEncoderCodec_h__
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.cpp b/dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.cpp
new file mode 100644
index 0000000000..6acec07ea3
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.cpp
@@ -0,0 +1,139 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "WebrtcVideoCodecFactory.h"
+
+#include "GmpVideoCodec.h"
+#include "MediaDataCodec.h"
+#include "VideoConduit.h"
+#include "mozilla/StaticPrefs_media.h"
+
+// libwebrtc includes
+#include "api/rtp_headers.h"
+#include "api/video_codecs/video_codec.h"
+#include "api/video_codecs/video_encoder_software_fallback_wrapper.h"
+#include "media/engine/encoder_simulcast_proxy.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp9/include/vp9.h"
+
+namespace mozilla {
+
+std::unique_ptr<webrtc::VideoDecoder>
+WebrtcVideoDecoderFactory::CreateVideoDecoder(
+ const webrtc::SdpVideoFormat& aFormat) {
+ std::unique_ptr<webrtc::VideoDecoder> decoder;
+ auto type = webrtc::PayloadStringToCodecType(aFormat.name);
+
+ // Attempt to create a decoder using MediaDataDecoder.
+ decoder.reset(MediaDataCodec::CreateDecoder(type, mTrackingId));
+ if (decoder) {
+ return decoder;
+ }
+
+ switch (type) {
+ case webrtc::VideoCodecType::kVideoCodecH264: {
+ // Get an external decoder
+ auto gmpDecoder =
+ WrapUnique(GmpVideoCodec::CreateDecoder(mPCHandle, mTrackingId));
+ mCreatedGmpPluginEvent.Forward(*gmpDecoder->InitPluginEvent());
+ mReleasedGmpPluginEvent.Forward(*gmpDecoder->ReleasePluginEvent());
+ decoder.reset(gmpDecoder.release());
+ break;
+ }
+
+ // Use libvpx decoders as fallbacks.
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ if (!decoder) {
+ decoder = webrtc::VP8Decoder::Create();
+ }
+ break;
+ case webrtc::VideoCodecType::kVideoCodecVP9:
+ decoder = webrtc::VP9Decoder::Create();
+ break;
+
+ default:
+ break;
+ }
+
+ return decoder;
+}
+
+std::unique_ptr<webrtc::VideoEncoder>
+WebrtcVideoEncoderFactory::CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& aFormat) {
+ if (!mInternalFactory->Supports(aFormat)) {
+ return nullptr;
+ }
+ auto type = webrtc::PayloadStringToCodecType(aFormat.name);
+ switch (type) {
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ // XXX We might be able to use the simulcast proxy for more codecs, but
+ // that requires testing.
+ return std::make_unique<webrtc::EncoderSimulcastProxy>(
+ mInternalFactory.get(), aFormat);
+ default:
+ return mInternalFactory->CreateVideoEncoder(aFormat);
+ }
+}
+
+bool WebrtcVideoEncoderFactory::InternalFactory::Supports(
+ const webrtc::SdpVideoFormat& aFormat) {
+ switch (webrtc::PayloadStringToCodecType(aFormat.name)) {
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ case webrtc::VideoCodecType::kVideoCodecVP9:
+ case webrtc::VideoCodecType::kVideoCodecH264:
+ return true;
+ default:
+ return false;
+ }
+}
+
+std::unique_ptr<webrtc::VideoEncoder>
+WebrtcVideoEncoderFactory::InternalFactory::CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& aFormat) {
+ MOZ_ASSERT(Supports(aFormat));
+
+ std::unique_ptr<webrtc::VideoEncoder> platformEncoder;
+ platformEncoder.reset(MediaDataCodec::CreateEncoder(aFormat));
+ const bool fallback = StaticPrefs::media_webrtc_software_encoder_fallback();
+ if (!fallback && platformEncoder) {
+ return platformEncoder;
+ }
+
+ std::unique_ptr<webrtc::VideoEncoder> encoder;
+ switch (webrtc::PayloadStringToCodecType(aFormat.name)) {
+ case webrtc::VideoCodecType::kVideoCodecH264: {
+ // get an external encoder
+ auto gmpEncoder =
+ WrapUnique(GmpVideoCodec::CreateEncoder(aFormat, mPCHandle));
+ mCreatedGmpPluginEvent.Forward(*gmpEncoder->InitPluginEvent());
+ mReleasedGmpPluginEvent.Forward(*gmpEncoder->ReleasePluginEvent());
+ encoder.reset(gmpEncoder.release());
+ break;
+ }
+ // libvpx fallbacks.
+ case webrtc::VideoCodecType::kVideoCodecVP8:
+ if (!encoder) {
+ encoder = webrtc::VP8Encoder::Create();
+ }
+ break;
+ case webrtc::VideoCodecType::kVideoCodecVP9:
+ encoder = webrtc::VP9Encoder::Create();
+ break;
+
+ default:
+ break;
+ }
+ if (fallback && encoder && platformEncoder) {
+ return webrtc::CreateVideoEncoderSoftwareFallbackWrapper(
+ std::move(encoder), std::move(platformEncoder), false);
+ }
+ if (platformEncoder) {
+ return platformEncoder;
+ }
+ return encoder;
+}
+
+} // namespace mozilla
diff --git a/dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.h b/dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.h
new file mode 100644
index 0000000000..ef5765043f
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.h
@@ -0,0 +1,124 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_WEBRTCVIDEOCODECFACTORY_H_
+#define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_WEBRTCVIDEOCODECFACTORY_H_
+
+#include "api/video_codecs/video_decoder_factory.h"
+#include "api/video_codecs/video_encoder_factory.h"
+#include "MediaEventSource.h"
+#include "PerformanceRecorder.h"
+
+namespace mozilla {
+class GmpPluginNotifierInterface {
+ virtual void DisconnectAll() = 0;
+ virtual MediaEventSource<uint64_t>& CreatedGmpPluginEvent() = 0;
+ virtual MediaEventSource<uint64_t>& ReleasedGmpPluginEvent() = 0;
+};
+
+class GmpPluginNotifier : public GmpPluginNotifierInterface {
+ public:
+ explicit GmpPluginNotifier(nsCOMPtr<nsISerialEventTarget> aOwningThread)
+ : mOwningThread(std::move(aOwningThread)),
+ mCreatedGmpPluginEvent(mOwningThread),
+ mReleasedGmpPluginEvent(mOwningThread) {}
+
+ ~GmpPluginNotifier() = default;
+
+ void DisconnectAll() override {
+ MOZ_ASSERT(mOwningThread->IsOnCurrentThread());
+ mCreatedGmpPluginEvent.DisconnectAll();
+ mReleasedGmpPluginEvent.DisconnectAll();
+ }
+
+ MediaEventSource<uint64_t>& CreatedGmpPluginEvent() override {
+ return mCreatedGmpPluginEvent;
+ }
+
+ MediaEventSource<uint64_t>& ReleasedGmpPluginEvent() override {
+ return mReleasedGmpPluginEvent;
+ }
+
+ protected:
+ const nsCOMPtr<nsISerialEventTarget> mOwningThread;
+ MediaEventForwarder<uint64_t> mCreatedGmpPluginEvent;
+ MediaEventForwarder<uint64_t> mReleasedGmpPluginEvent;
+};
+
+class WebrtcVideoDecoderFactory : public GmpPluginNotifier,
+ public webrtc::VideoDecoderFactory {
+ public:
+ WebrtcVideoDecoderFactory(nsCOMPtr<nsISerialEventTarget> aOwningThread,
+ std::string aPCHandle, TrackingId aTrackingId)
+ : GmpPluginNotifier(std::move(aOwningThread)),
+ mPCHandle(std::move(aPCHandle)),
+ mTrackingId(std::move(aTrackingId)) {}
+
+ std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override {
+ MOZ_CRASH("Unexpected call");
+ return std::vector<webrtc::SdpVideoFormat>();
+ }
+
+ std::unique_ptr<webrtc::VideoDecoder> CreateVideoDecoder(
+ const webrtc::SdpVideoFormat& aFormat) override;
+
+ private:
+ const std::string mPCHandle;
+ const TrackingId mTrackingId;
+};
+
+class WebrtcVideoEncoderFactory : public GmpPluginNotifierInterface,
+ public webrtc::VideoEncoderFactory {
+ class InternalFactory : public GmpPluginNotifier,
+ public webrtc::VideoEncoderFactory {
+ public:
+ InternalFactory(nsCOMPtr<nsISerialEventTarget> aOwningThread,
+ std::string aPCHandle)
+ : GmpPluginNotifier(std::move(aOwningThread)),
+ mPCHandle(std::move(aPCHandle)) {}
+
+ std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override {
+ MOZ_CRASH("Unexpected call");
+ return std::vector<webrtc::SdpVideoFormat>();
+ }
+
+ std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& aFormat) override;
+
+ bool Supports(const webrtc::SdpVideoFormat& aFormat);
+
+ private:
+ const std::string mPCHandle;
+ };
+
+ public:
+ explicit WebrtcVideoEncoderFactory(
+ nsCOMPtr<nsISerialEventTarget> aOwningThread, std::string aPCHandle)
+ : mInternalFactory(MakeUnique<InternalFactory>(std::move(aOwningThread),
+ std::move(aPCHandle))) {}
+
+ std::vector<webrtc::SdpVideoFormat> GetSupportedFormats() const override {
+ MOZ_CRASH("Unexpected call");
+ return std::vector<webrtc::SdpVideoFormat>();
+ }
+
+ std::unique_ptr<webrtc::VideoEncoder> CreateVideoEncoder(
+ const webrtc::SdpVideoFormat& aFormat) override;
+
+ void DisconnectAll() override { mInternalFactory->DisconnectAll(); }
+
+ MediaEventSource<uint64_t>& CreatedGmpPluginEvent() override {
+ return mInternalFactory->CreatedGmpPluginEvent();
+ }
+ MediaEventSource<uint64_t>& ReleasedGmpPluginEvent() override {
+ return mInternalFactory->ReleasedGmpPluginEvent();
+ }
+
+ private:
+ const UniquePtr<InternalFactory> mInternalFactory;
+};
+} // namespace mozilla
+
+#endif
diff --git a/dom/media/webrtc/libwebrtcglue/moz.build b/dom/media/webrtc/libwebrtcglue/moz.build
new file mode 100644
index 0000000000..62bac2ffe6
--- /dev/null
+++ b/dom/media/webrtc/libwebrtcglue/moz.build
@@ -0,0 +1,35 @@
+# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
+# vim: set filetype=python:
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+include("/dom/media/webrtc/third_party_build/webrtc.mozbuild")
+
+LOCAL_INCLUDES += [
+ "!/ipc/ipdl/_ipdlheaders",
+ "/dom/media/gmp", # for GMPLog.h,
+ "/dom/media/webrtc",
+ "/ipc/chromium/src",
+ "/media/libyuv/libyuv/include",
+ "/media/webrtc",
+ "/third_party/libsrtp/src/include",
+ "/third_party/libwebrtc",
+ "/third_party/libwebrtc/third_party/abseil-cpp",
+]
+
+UNIFIED_SOURCES += [
+ "AudioConduit.cpp",
+ "GmpVideoCodec.cpp",
+ "MediaConduitInterface.cpp",
+ "MediaDataCodec.cpp",
+ "SystemTime.cpp",
+ "VideoConduit.cpp",
+ "VideoStreamFactory.cpp",
+ "WebrtcCallWrapper.cpp",
+ "WebrtcGmpVideoCodec.cpp",
+ "WebrtcMediaDataDecoderCodec.cpp",
+ "WebrtcMediaDataEncoderCodec.cpp",
+ "WebrtcVideoCodecFactory.cpp",
+]
+
+FINAL_LIBRARY = "xul"