From 26a029d407be480d791972afb5975cf62c9360a6 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Fri, 19 Apr 2024 02:47:55 +0200 Subject: Adding upstream version 124.0.1. Signed-off-by: Daniel Baumann --- dom/media/webrtc/libwebrtcglue/AudioConduit.cpp | 1050 ++++++++++ dom/media/webrtc/libwebrtcglue/AudioConduit.h | 299 +++ dom/media/webrtc/libwebrtcglue/CallWorkerThread.h | 116 ++ dom/media/webrtc/libwebrtcglue/CodecConfig.h | 237 +++ .../webrtc/libwebrtcglue/FrameTransformer.cpp | 87 + dom/media/webrtc/libwebrtcglue/FrameTransformer.h | 79 + .../webrtc/libwebrtcglue/FrameTransformerProxy.cpp | 258 +++ .../webrtc/libwebrtcglue/FrameTransformerProxy.h | 124 ++ dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp | 22 + dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h | 27 + .../webrtc/libwebrtcglue/MediaConduitControl.h | 79 + .../webrtc/libwebrtcglue/MediaConduitErrors.h | 46 + .../webrtc/libwebrtcglue/MediaConduitInterface.cpp | 152 ++ .../webrtc/libwebrtcglue/MediaConduitInterface.h | 499 +++++ dom/media/webrtc/libwebrtcglue/MediaDataCodec.cpp | 70 + dom/media/webrtc/libwebrtcglue/MediaDataCodec.h | 32 + dom/media/webrtc/libwebrtcglue/RtpRtcpConfig.h | 24 + dom/media/webrtc/libwebrtcglue/RunningStat.h | 48 + dom/media/webrtc/libwebrtcglue/SystemTime.cpp | 60 + dom/media/webrtc/libwebrtcglue/SystemTime.h | 44 + dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h | 181 ++ dom/media/webrtc/libwebrtcglue/VideoConduit.cpp | 2083 ++++++++++++++++++++ dom/media/webrtc/libwebrtcglue/VideoConduit.h | 496 +++++ .../webrtc/libwebrtcglue/VideoStreamFactory.cpp | 399 ++++ .../webrtc/libwebrtcglue/VideoStreamFactory.h | 132 ++ .../webrtc/libwebrtcglue/WebrtcCallWrapper.cpp | 105 + dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.h | 114 ++ .../webrtc/libwebrtcglue/WebrtcGmpVideoCodec.cpp | 1043 ++++++++++ .../webrtc/libwebrtcglue/WebrtcGmpVideoCodec.h | 507 +++++ dom/media/webrtc/libwebrtcglue/WebrtcImageBuffer.h | 53 + .../libwebrtcglue/WebrtcMediaDataDecoderCodec.cpp | 209 ++ .../libwebrtcglue/WebrtcMediaDataDecoderCodec.h | 70 + .../libwebrtcglue/WebrtcMediaDataEncoderCodec.cpp | 535 +++++ .../libwebrtcglue/WebrtcMediaDataEncoderCodec.h | 78 + .../libwebrtcglue/WebrtcVideoCodecFactory.cpp | 139 ++ .../webrtc/libwebrtcglue/WebrtcVideoCodecFactory.h | 124 ++ dom/media/webrtc/libwebrtcglue/moz.build | 37 + 37 files changed, 9658 insertions(+) create mode 100644 dom/media/webrtc/libwebrtcglue/AudioConduit.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/AudioConduit.h create mode 100644 dom/media/webrtc/libwebrtcglue/CallWorkerThread.h create mode 100644 dom/media/webrtc/libwebrtcglue/CodecConfig.h create mode 100644 dom/media/webrtc/libwebrtcglue/FrameTransformer.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/FrameTransformer.h create mode 100644 dom/media/webrtc/libwebrtcglue/FrameTransformerProxy.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/FrameTransformerProxy.h create mode 100644 dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h create mode 100644 dom/media/webrtc/libwebrtcglue/MediaConduitControl.h create mode 100644 dom/media/webrtc/libwebrtcglue/MediaConduitErrors.h create mode 100644 dom/media/webrtc/libwebrtcglue/MediaConduitInterface.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h create mode 100644 dom/media/webrtc/libwebrtcglue/MediaDataCodec.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/MediaDataCodec.h create mode 100644 dom/media/webrtc/libwebrtcglue/RtpRtcpConfig.h create mode 100644 dom/media/webrtc/libwebrtcglue/RunningStat.h create mode 100644 dom/media/webrtc/libwebrtcglue/SystemTime.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/SystemTime.h create mode 100644 dom/media/webrtc/libwebrtcglue/TaskQueueWrapper.h create mode 100644 dom/media/webrtc/libwebrtcglue/VideoConduit.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/VideoConduit.h create mode 100644 dom/media/webrtc/libwebrtcglue/VideoStreamFactory.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/VideoStreamFactory.h create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcCallWrapper.h create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcGmpVideoCodec.h create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcImageBuffer.h create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcMediaDataDecoderCodec.h create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcMediaDataEncoderCodec.h create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.cpp create mode 100644 dom/media/webrtc/libwebrtcglue/WebrtcVideoCodecFactory.h create mode 100644 dom/media/webrtc/libwebrtcglue/moz.build (limited to 'dom/media/webrtc/libwebrtcglue') diff --git a/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp b/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp new file mode 100644 index 0000000000..49f049cd21 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/AudioConduit.cpp @@ -0,0 +1,1050 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "AudioConduit.h" + +#include "common/browser_logging/CSFLog.h" +#include "MediaConduitControl.h" +#include "transport/SrtpFlow.h" // For SRTP_MAX_EXPANSION +#include "WebrtcCallWrapper.h" +#include "libwebrtcglue/FrameTransformer.h" +#include +#include "CodecConfig.h" +#include "mozilla/StateMirroring.h" +#include +#include "mozilla/MozPromise.h" +#include "mozilla/RefPtr.h" +#include "mozilla/RWLock.h" + +// libwebrtc includes +#include "api/audio_codecs/builtin_audio_encoder_factory.h" +#include "audio/audio_receive_stream.h" +#include "media/base/media_constants.h" +#include "rtc_base/ref_counted_object.h" + +#include "api/audio/audio_frame.h" +#include "api/audio/audio_mixer.h" +#include "api/audio_codecs/audio_format.h" +#include "api/call/transport.h" +#include "api/media_types.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/transport/rtp/rtp_source.h" +#include +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "call/call_basic_stats.h" +#include "domstubs.h" +#include "jsapi/RTCStatsReport.h" +#include +#include "MainThreadUtils.h" +#include +#include "MediaConduitErrors.h" +#include "MediaConduitInterface.h" +#include +#include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "mozilla/Assertions.h" +#include "mozilla/Atomics.h" +#include "mozilla/Maybe.h" +#include "mozilla/StateWatching.h" +#include "nsCOMPtr.h" +#include "nsError.h" +#include "nsISerialEventTarget.h" +#include "nsThreadUtils.h" +#include "rtc_base/copy_on_write_buffer.h" +#include "rtc_base/network/sent_packet.h" +#include +#include +#include "transport/mediapacket.h" + +// for ntohs +#ifdef HAVE_NETINET_IN_H +# include +#elif defined XP_WIN +# include +#endif + +#ifdef MOZ_WIDGET_ANDROID +# include "AndroidBridge.h" +#endif + +namespace mozilla { + +namespace { + +static const char* acLogTag = "WebrtcAudioSessionConduit"; +#ifdef LOGTAG +# undef LOGTAG +#endif +#define LOGTAG acLogTag + +using namespace cricket; +using LocalDirection = MediaSessionConduitLocalDirection; + +const char kCodecParamCbr[] = "cbr"; + +} // namespace + +/** + * Factory Method for AudioConduit + */ +RefPtr AudioSessionConduit::Create( + RefPtr aCall, + nsCOMPtr aStsThread) { + CSFLogDebug(LOGTAG, "%s ", __FUNCTION__); + MOZ_ASSERT(NS_IsMainThread()); + + return MakeRefPtr(std::move(aCall), + std::move(aStsThread)); +} + +#define INIT_MIRROR(name, val) \ + name(aCallThread, val, "WebrtcAudioConduit::Control::" #name " (Mirror)") +WebrtcAudioConduit::Control::Control(const RefPtr& aCallThread) + : INIT_MIRROR(mReceiving, false), + INIT_MIRROR(mTransmitting, false), + INIT_MIRROR(mLocalSsrcs, Ssrcs()), + INIT_MIRROR(mLocalCname, std::string()), + INIT_MIRROR(mMid, std::string()), + INIT_MIRROR(mRemoteSsrc, 0), + INIT_MIRROR(mSyncGroup, std::string()), + INIT_MIRROR(mLocalRecvRtpExtensions, RtpExtList()), + INIT_MIRROR(mLocalSendRtpExtensions, RtpExtList()), + INIT_MIRROR(mSendCodec, Nothing()), + INIT_MIRROR(mRecvCodecs, std::vector()), + INIT_MIRROR(mFrameTransformerProxySend, nullptr), + INIT_MIRROR(mFrameTransformerProxyRecv, nullptr) {} +#undef INIT_MIRROR + +RefPtr WebrtcAudioConduit::Shutdown() { + MOZ_ASSERT(NS_IsMainThread()); + + mControl.mOnDtmfEventListener.DisconnectIfExists(); + + return InvokeAsync( + mCallThread, "WebrtcAudioConduit::Shutdown (main thread)", + [this, self = RefPtr(this)] { + mControl.mReceiving.DisconnectIfConnected(); + mControl.mTransmitting.DisconnectIfConnected(); + mControl.mLocalSsrcs.DisconnectIfConnected(); + mControl.mLocalCname.DisconnectIfConnected(); + mControl.mMid.DisconnectIfConnected(); + mControl.mRemoteSsrc.DisconnectIfConnected(); + mControl.mSyncGroup.DisconnectIfConnected(); + mControl.mLocalRecvRtpExtensions.DisconnectIfConnected(); + mControl.mLocalSendRtpExtensions.DisconnectIfConnected(); + mControl.mSendCodec.DisconnectIfConnected(); + mControl.mRecvCodecs.DisconnectIfConnected(); + mControl.mFrameTransformerProxySend.DisconnectIfConnected(); + mControl.mFrameTransformerProxyRecv.DisconnectIfConnected(); + mWatchManager.Shutdown(); + + { + AutoWriteLock lock(mLock); + DeleteSendStream(); + DeleteRecvStream(); + } + + return GenericPromise::CreateAndResolve( + true, "WebrtcAudioConduit::Shutdown (call thread)"); + }); +} + +WebrtcAudioConduit::WebrtcAudioConduit( + RefPtr aCall, nsCOMPtr aStsThread) + : mCall(std::move(aCall)), + mSendTransport(this), + mRecvTransport(this), + mRecvStream(nullptr), + mSendStreamConfig(&mSendTransport), + mSendStream(nullptr), + mSendStreamRunning(false), + mRecvStreamRunning(false), + mDtmfEnabled(false), + mLock("WebrtcAudioConduit::mLock"), + mCallThread(mCall->mCallThread), + mStsThread(std::move(aStsThread)), + mControl(mCall->mCallThread), + mWatchManager(this, mCall->mCallThread) { + mRecvStreamConfig.rtcp_send_transport = &mRecvTransport; + mRecvStreamConfig.rtp.rtcp_event_observer = this; +} + +/** + * Destruction defines for our super-classes + */ +WebrtcAudioConduit::~WebrtcAudioConduit() { + CSFLogDebug(LOGTAG, "%s ", __FUNCTION__); + MOZ_ASSERT(!mSendStream && !mRecvStream, + "Call DeleteStreams prior to ~WebrtcAudioConduit."); +} + +#define CONNECT(aCanonical, aMirror) \ + do { \ + /* Ensure the watchmanager is wired up before the mirror receives its \ + * initial mirrored value. */ \ + mCall->mCallThread->DispatchStateChange( \ + NS_NewRunnableFunction(__func__, [this, self = RefPtr(this)] { \ + mWatchManager.Watch(aMirror, \ + &WebrtcAudioConduit::OnControlConfigChange); \ + })); \ + (aCanonical).ConnectMirror(&(aMirror)); \ + } while (0) + +void WebrtcAudioConduit::InitControl(AudioConduitControlInterface* aControl) { + MOZ_ASSERT(NS_IsMainThread()); + + CONNECT(aControl->CanonicalReceiving(), mControl.mReceiving); + CONNECT(aControl->CanonicalTransmitting(), mControl.mTransmitting); + CONNECT(aControl->CanonicalLocalSsrcs(), mControl.mLocalSsrcs); + CONNECT(aControl->CanonicalLocalCname(), mControl.mLocalCname); + CONNECT(aControl->CanonicalMid(), mControl.mMid); + CONNECT(aControl->CanonicalRemoteSsrc(), mControl.mRemoteSsrc); + CONNECT(aControl->CanonicalSyncGroup(), mControl.mSyncGroup); + CONNECT(aControl->CanonicalLocalRecvRtpExtensions(), + mControl.mLocalRecvRtpExtensions); + CONNECT(aControl->CanonicalLocalSendRtpExtensions(), + mControl.mLocalSendRtpExtensions); + CONNECT(aControl->CanonicalAudioSendCodec(), mControl.mSendCodec); + CONNECT(aControl->CanonicalAudioRecvCodecs(), mControl.mRecvCodecs); + CONNECT(aControl->CanonicalFrameTransformerProxySend(), + mControl.mFrameTransformerProxySend); + CONNECT(aControl->CanonicalFrameTransformerProxyRecv(), + mControl.mFrameTransformerProxyRecv); + mControl.mOnDtmfEventListener = aControl->OnDtmfEvent().Connect( + mCall->mCallThread, this, &WebrtcAudioConduit::OnDtmfEvent); +} + +#undef CONNECT + +void WebrtcAudioConduit::OnDtmfEvent(const DtmfEvent& aEvent) { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(mSendStream); + MOZ_ASSERT(mDtmfEnabled); + mSendStream->SendTelephoneEvent(aEvent.mPayloadType, aEvent.mPayloadFrequency, + aEvent.mEventCode, aEvent.mLengthMs); +} + +void WebrtcAudioConduit::OnControlConfigChange() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + + bool recvStreamReconfigureNeeded = false; + bool sendStreamReconfigureNeeded = false; + bool recvStreamRecreationNeeded = false; + bool sendStreamRecreationNeeded = false; + + if (!mControl.mLocalSsrcs.Ref().empty()) { + if (mControl.mLocalSsrcs.Ref()[0] != mSendStreamConfig.rtp.ssrc) { + sendStreamRecreationNeeded = true; + + // For now... + recvStreamRecreationNeeded = true; + } + mRecvStreamConfig.rtp.local_ssrc = mControl.mLocalSsrcs.Ref()[0]; + mSendStreamConfig.rtp.ssrc = mControl.mLocalSsrcs.Ref()[0]; + + // In the future we can do this instead of recreating the recv stream: + // if (mRecvStream) { + // mCall->Call()->OnLocalSsrcUpdated(mRecvStream, + // mControl.mLocalSsrcs.Ref()[0]); + // } + } + + if (mControl.mLocalCname.Ref() != mSendStreamConfig.rtp.c_name) { + mSendStreamConfig.rtp.c_name = mControl.mLocalCname.Ref(); + sendStreamReconfigureNeeded = true; + } + + if (mControl.mMid.Ref() != mSendStreamConfig.rtp.mid) { + mSendStreamConfig.rtp.mid = mControl.mMid.Ref(); + sendStreamReconfigureNeeded = true; + } + + if (mControl.mRemoteSsrc.Ref() != mControl.mConfiguredRemoteSsrc) { + mRecvStreamConfig.rtp.remote_ssrc = mControl.mConfiguredRemoteSsrc = + mControl.mRemoteSsrc.Ref(); + recvStreamRecreationNeeded = true; + } + + if (mControl.mSyncGroup.Ref() != mRecvStreamConfig.sync_group) { + mRecvStreamConfig.sync_group = mControl.mSyncGroup.Ref(); + // For now... + recvStreamRecreationNeeded = true; + // In the future we can do this instead of recreating the recv stream: + // if (mRecvStream) { + // mCall->Call()->OnUpdateSyncGroup(mRecvStream, + // mRecvStreamConfig.sync_group); + // } + } + + if (auto filteredExtensions = FilterExtensions( + LocalDirection::kSend, mControl.mLocalSendRtpExtensions); + filteredExtensions != mSendStreamConfig.rtp.extensions) { + // At the very least, we need a reconfigure. Recreation needed if the + // extmap for any extension has changed, but not for adding/removing + // extensions. + sendStreamReconfigureNeeded = true; + + for (const auto& newExt : filteredExtensions) { + if (sendStreamRecreationNeeded) { + break; + } + for (const auto& oldExt : mSendStreamConfig.rtp.extensions) { + if (newExt.uri == oldExt.uri) { + if (newExt.id != oldExt.id) { + sendStreamRecreationNeeded = true; + } + // We're done handling newExt, one way or another + break; + } + } + } + + mSendStreamConfig.rtp.extensions = std::move(filteredExtensions); + } + + mControl.mSendCodec.Ref().apply([&](const auto& aConfig) { + if (mControl.mConfiguredSendCodec != mControl.mSendCodec.Ref()) { + mControl.mConfiguredSendCodec = mControl.mSendCodec; + if (ValidateCodecConfig(aConfig, true) == kMediaConduitNoError) { + mSendStreamConfig.encoder_factory = + webrtc::CreateBuiltinAudioEncoderFactory(); + + webrtc::AudioSendStream::Config::SendCodecSpec spec( + aConfig.mType, CodecConfigToLibwebrtcFormat(aConfig)); + mSendStreamConfig.send_codec_spec = spec; + + mDtmfEnabled = aConfig.mDtmfEnabled; + sendStreamReconfigureNeeded = true; + } + } + }); + + if (mControl.mConfiguredRecvCodecs != mControl.mRecvCodecs.Ref()) { + mControl.mConfiguredRecvCodecs = mControl.mRecvCodecs; + mRecvStreamConfig.decoder_factory = mCall->mAudioDecoderFactory; + mRecvStreamConfig.decoder_map.clear(); + + for (const auto& codec : mControl.mRecvCodecs.Ref()) { + if (ValidateCodecConfig(codec, false) != kMediaConduitNoError) { + continue; + } + mRecvStreamConfig.decoder_map.emplace( + codec.mType, CodecConfigToLibwebrtcFormat(codec)); + } + + recvStreamReconfigureNeeded = true; + } + + if (mControl.mConfiguredFrameTransformerProxySend.get() != + mControl.mFrameTransformerProxySend.Ref().get()) { + mControl.mConfiguredFrameTransformerProxySend = + mControl.mFrameTransformerProxySend.Ref(); + if (!mSendStreamConfig.frame_transformer) { + mSendStreamConfig.frame_transformer = + new rtc::RefCountedObject(false); + sendStreamRecreationNeeded = true; + } + static_cast(mSendStreamConfig.frame_transformer.get()) + ->SetProxy(mControl.mConfiguredFrameTransformerProxySend); + } + + if (mControl.mConfiguredFrameTransformerProxyRecv.get() != + mControl.mFrameTransformerProxyRecv.Ref().get()) { + mControl.mConfiguredFrameTransformerProxyRecv = + mControl.mFrameTransformerProxyRecv.Ref(); + if (!mRecvStreamConfig.frame_transformer) { + mRecvStreamConfig.frame_transformer = + new rtc::RefCountedObject(false); + recvStreamRecreationNeeded = true; + } + static_cast(mRecvStreamConfig.frame_transformer.get()) + ->SetProxy(mControl.mConfiguredFrameTransformerProxyRecv); + } + + if (!recvStreamReconfigureNeeded && !sendStreamReconfigureNeeded && + !recvStreamRecreationNeeded && !sendStreamRecreationNeeded && + mControl.mReceiving == mRecvStreamRunning && + mControl.mTransmitting == mSendStreamRunning) { + // No changes applied -- no need to lock. + return; + } + + if (recvStreamRecreationNeeded) { + recvStreamReconfigureNeeded = false; + } + if (sendStreamRecreationNeeded) { + sendStreamReconfigureNeeded = false; + } + + { + AutoWriteLock lock(mLock); + // Recreate/Stop/Start streams as needed. + if (recvStreamRecreationNeeded) { + DeleteRecvStream(); + } + if (mControl.mReceiving) { + CreateRecvStream(); + } + if (sendStreamRecreationNeeded) { + DeleteSendStream(); + } + if (mControl.mTransmitting) { + CreateSendStream(); + } + } + + // We make sure to not hold the lock while stopping/starting/reconfiguring + // streams, so as to not cause deadlocks. These methods can cause our platform + // codecs to dispatch sync runnables to main, and main may grab the lock. + + if (mRecvStream && recvStreamReconfigureNeeded) { + MOZ_ASSERT(!recvStreamRecreationNeeded); + mRecvStream->SetDecoderMap(mRecvStreamConfig.decoder_map); + } + + if (mSendStream && sendStreamReconfigureNeeded) { + MOZ_ASSERT(!sendStreamRecreationNeeded); + // TODO: Pass a callback here, so we can react to RTCErrors thrown by + // libwebrtc. + mSendStream->Reconfigure(mSendStreamConfig, nullptr); + } + + if (!mControl.mReceiving) { + StopReceiving(); + } + if (!mControl.mTransmitting) { + StopTransmitting(); + } + + if (mControl.mReceiving) { + StartReceiving(); + } + if (mControl.mTransmitting) { + StartTransmitting(); + } +} + +std::vector WebrtcAudioConduit::GetLocalSSRCs() const { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + return std::vector(1, mRecvStreamConfig.rtp.local_ssrc); +} + +bool WebrtcAudioConduit::OverrideRemoteSSRC(uint32_t aSsrc) { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + + if (mRecvStreamConfig.rtp.remote_ssrc == aSsrc) { + return true; + } + mRecvStreamConfig.rtp.remote_ssrc = aSsrc; + + const bool wasReceiving = mRecvStreamRunning; + const bool hadRecvStream = mRecvStream; + + StopReceiving(); + + if (hadRecvStream) { + AutoWriteLock lock(mLock); + DeleteRecvStream(); + CreateRecvStream(); + } + + if (wasReceiving) { + StartReceiving(); + } + return true; +} + +Maybe WebrtcAudioConduit::GetRemoteSSRC() const { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + // libwebrtc uses 0 to mean a lack of SSRC. That is not to spec. + return mRecvStreamConfig.rtp.remote_ssrc == 0 + ? Nothing() + : Some(mRecvStreamConfig.rtp.remote_ssrc); +} + +Maybe +WebrtcAudioConduit::GetReceiverStats() const { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + if (!mRecvStream) { + return Nothing(); + } + return Some(mRecvStream->GetStats()); +} + +Maybe WebrtcAudioConduit::GetSenderStats() + const { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + if (!mSendStream) { + return Nothing(); + } + return Some(mSendStream->GetStats()); +} + +Maybe WebrtcAudioConduit::GetCallStats() const { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + if (!mCall->Call()) { + return Nothing(); + } + return Some(mCall->Call()->GetStats()); +} + +void WebrtcAudioConduit::OnRtcpBye() { mRtcpByeEvent.Notify(); } + +void WebrtcAudioConduit::OnRtcpTimeout() { mRtcpTimeoutEvent.Notify(); } + +void WebrtcAudioConduit::SetTransportActive(bool aActive) { + MOZ_ASSERT(mStsThread->IsOnCurrentThread()); + if (mTransportActive == aActive) { + return; + } + + // If false, This stops us from sending + mTransportActive = aActive; + + // We queue this because there might be notifications to these listeners + // pending, and we don't want to drop them by letting this jump ahead of + // those notifications. We move the listeners into the lambda in case the + // transport comes back up before we disconnect them. (The Connect calls + // happen in MediaPipeline) + // We retain a strong reference to ourself, because the listeners are holding + // a non-refcounted reference to us, and moving them into the lambda could + // conceivably allow them to outlive us. + if (!aActive) { + MOZ_ALWAYS_SUCCEEDS(mCallThread->Dispatch(NS_NewRunnableFunction( + __func__, + [self = RefPtr(this), + recvRtpListener = std::move(mReceiverRtpEventListener)]() mutable { + recvRtpListener.DisconnectIfExists(); + }))); + } +} + +// AudioSessionConduit Implementation +MediaConduitErrorCode WebrtcAudioConduit::SendAudioFrame( + std::unique_ptr frame) { + CSFLogDebug(LOGTAG, "%s ", __FUNCTION__); + // Following checks need to be performed + // 1. Non null audio buffer pointer, and + // 2. Valid sample rate, and + // 3. Appropriate Sample Length for 10 ms audio-frame. This represents the + // block size used upstream for processing. + // Ex: for 16000 sample rate , valid block-length is 160. + // Similarly for 32000 sample rate, valid block length is 320. + + if (!frame->data() || + (IsSamplingFreqSupported(frame->sample_rate_hz()) == false) || + ((frame->samples_per_channel() % (frame->sample_rate_hz() / 100) != 0))) { + CSFLogError(LOGTAG, "%s Invalid Parameters ", __FUNCTION__); + MOZ_ASSERT(PR_FALSE); + return kMediaConduitMalformedArgument; + } + + // This is the AudioProxyThread, blocking it for a bit is fine. + AutoReadLock lock(mLock); + if (!mSendStreamRunning) { + CSFLogError(LOGTAG, "%s Engine not transmitting ", __FUNCTION__); + return kMediaConduitSessionNotInited; + } + + mSendStream->SendAudioData(std::move(frame)); + return kMediaConduitNoError; +} + +MediaConduitErrorCode WebrtcAudioConduit::GetAudioFrame( + int32_t samplingFreqHz, webrtc::AudioFrame* frame) { + CSFLogDebug(LOGTAG, "%s ", __FUNCTION__); + + // validate params + if (!frame) { + CSFLogError(LOGTAG, "%s Null Audio Buffer Pointer", __FUNCTION__); + MOZ_ASSERT(PR_FALSE); + return kMediaConduitMalformedArgument; + } + + // Validate sample length + if (GetNum10msSamplesForFrequency(samplingFreqHz) == 0) { + CSFLogError(LOGTAG, "%s Invalid Sampling Frequency ", __FUNCTION__); + MOZ_ASSERT(PR_FALSE); + return kMediaConduitMalformedArgument; + } + + // If the lock is taken, skip this chunk to avoid blocking the audio thread. + AutoTryReadLock tryLock(mLock); + if (!tryLock) { + CSFLogError(LOGTAG, "%s Conduit going through negotiation ", __FUNCTION__); + return kMediaConduitPlayoutError; + } + + // Conduit should have reception enabled before we ask for decoded + // samples + if (!mRecvStreamRunning) { + CSFLogError(LOGTAG, "%s Engine not Receiving ", __FUNCTION__); + return kMediaConduitSessionNotInited; + } + + // Unfortunate to have to cast to an internal class, but that looks like the + // only way short of interfacing with a layer above (which mixes all streams, + // which we don't want) or a layer below (which we try to avoid because it is + // less stable). + auto info = static_cast(mRecvStream) + ->GetAudioFrameWithInfo(samplingFreqHz, frame); + + if (info == webrtc::AudioMixer::Source::AudioFrameInfo::kError) { + CSFLogError(LOGTAG, "%s Getting audio frame failed", __FUNCTION__); + return kMediaConduitPlayoutError; + } + + CSFLogDebug(LOGTAG, "%s Got %zu channels of %zu samples", __FUNCTION__, + frame->num_channels(), frame->samples_per_channel()); + return kMediaConduitNoError; +} + +// Transport Layer Callbacks +void WebrtcAudioConduit::OnRtpReceived(webrtc::RtpPacketReceived&& aPacket, + webrtc::RTPHeader&& aHeader) { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + + if (mAllowSsrcChange && mRecvStreamConfig.rtp.remote_ssrc != aHeader.ssrc) { + CSFLogDebug(LOGTAG, "%s: switching from SSRC %u to %u", __FUNCTION__, + mRecvStreamConfig.rtp.remote_ssrc, aHeader.ssrc); + OverrideRemoteSSRC(aHeader.ssrc); + } + + CSFLogVerbose(LOGTAG, "%s: seq# %u, Len %zu, SSRC %u (0x%x) ", __FUNCTION__, + aPacket.SequenceNumber(), aPacket.size(), aPacket.Ssrc(), + aPacket.Ssrc()); + + // Libwebrtc commit cde4b67d9d now expect calls to + // SourceTracker::GetSources() to happen on the call thread. We'll + // grab the value now while on the call thread, and dispatch to main + // to store the cached value if we have new source information. + // See Bug 1845621. + std::vector sources; + if (mRecvStream) { + sources = mRecvStream->GetSources(); + } + + bool needsCacheUpdate = false; + { + AutoReadLock lock(mLock); + needsCacheUpdate = sources != mRtpSources; + } + + // only dispatch to main if we have new data + if (needsCacheUpdate) { + GetMainThreadSerialEventTarget()->Dispatch(NS_NewRunnableFunction( + __func__, [this, rtpSources = std::move(sources), + self = RefPtr(this)]() { + AutoWriteLock lock(mLock); + mRtpSources = rtpSources; + })); + } + + mRtpPacketEvent.Notify(); + if (mCall->Call()) { + mCall->Call()->Receiver()->DeliverRtpPacket( + webrtc::MediaType::AUDIO, std::move(aPacket), + [self = RefPtr(this)]( + const webrtc::RtpPacketReceived& packet) { + CSFLogVerbose( + LOGTAG, + "AudioConduit %p: failed demuxing packet, ssrc: %u seq: %u", + self.get(), packet.Ssrc(), packet.SequenceNumber()); + return false; + }); + } +} + +Maybe WebrtcAudioConduit::RtpSendBaseSeqFor(uint32_t aSsrc) const { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + auto it = mRtpSendBaseSeqs.find(aSsrc); + if (it == mRtpSendBaseSeqs.end()) { + return Nothing(); + } + return Some(it->second); +} + +const dom::RTCStatsTimestampMaker& WebrtcAudioConduit::GetTimestampMaker() + const { + return mCall->GetTimestampMaker(); +} + +void WebrtcAudioConduit::StopTransmitting() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(!mLock.LockedForWritingByCurrentThread()); + + if (!mSendStreamRunning) { + return; + } + + if (mSendStream) { + CSFLogDebug(LOGTAG, "%s Stopping send stream", __FUNCTION__); + mSendStream->Stop(); + } + + mSendStreamRunning = false; +} + +void WebrtcAudioConduit::StartTransmitting() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(mSendStream); + MOZ_ASSERT(!mLock.LockedForWritingByCurrentThread()); + + if (mSendStreamRunning) { + return; + } + + CSFLogDebug(LOGTAG, "%s Starting send stream", __FUNCTION__); + + mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::AUDIO, + webrtc::kNetworkUp); + mSendStream->Start(); + mSendStreamRunning = true; +} + +void WebrtcAudioConduit::StopReceiving() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(!mLock.LockedForWritingByCurrentThread()); + + if (!mRecvStreamRunning) { + return; + } + + if (mRecvStream) { + CSFLogDebug(LOGTAG, "%s Stopping recv stream", __FUNCTION__); + mRecvStream->Stop(); + } + + mRecvStreamRunning = false; +} + +void WebrtcAudioConduit::StartReceiving() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(mRecvStream); + MOZ_ASSERT(!mLock.LockedForWritingByCurrentThread()); + + if (mRecvStreamRunning) { + return; + } + + CSFLogDebug(LOGTAG, "%s Starting receive stream (SSRC %u (0x%x))", + __FUNCTION__, mRecvStreamConfig.rtp.remote_ssrc, + mRecvStreamConfig.rtp.remote_ssrc); + + mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::AUDIO, + webrtc::kNetworkUp); + mRecvStream->Start(); + mRecvStreamRunning = true; +} + +bool WebrtcAudioConduit::SendRtp(const uint8_t* aData, size_t aLength, + const webrtc::PacketOptions& aOptions) { + MOZ_ASSERT(aLength >= 12); + const uint16_t seqno = ntohs(*((uint16_t*)&aData[2])); + const uint32_t ssrc = ntohl(*((uint32_t*)&aData[8])); + + CSFLogVerbose( + LOGTAG, + "AudioConduit %p: Sending RTP Packet seq# %u, len %zu, SSRC %u (0x%x)", + this, seqno, aLength, ssrc, ssrc); + + if (!mTransportActive) { + CSFLogError(LOGTAG, "AudioConduit %p: RTP Packet Send Failed ", this); + return false; + } + + MediaPacket packet; + packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION); + packet.SetType(MediaPacket::RTP); + mSenderRtpSendEvent.Notify(std::move(packet)); + + // Parse the sequence number of the first rtp packet as base_seq. + const auto inserted = mRtpSendBaseSeqs_n.insert({ssrc, seqno}).second; + + if (inserted || aOptions.packet_id >= 0) { + int64_t now_ms = PR_Now() / 1000; + MOZ_ALWAYS_SUCCEEDS(mCallThread->Dispatch(NS_NewRunnableFunction( + __func__, [this, self = RefPtr(this), + packet_id = aOptions.packet_id, now_ms, ssrc, seqno] { + mRtpSendBaseSeqs.insert({ssrc, seqno}); + if (packet_id >= 0) { + if (mCall->Call()) { + // TODO: This notification should ideally happen after the + // transport layer has sent the packet on the wire. + mCall->Call()->OnSentPacket({packet_id, now_ms}); + } + } + }))); + } + return true; +} + +bool WebrtcAudioConduit::SendSenderRtcp(const uint8_t* aData, size_t aLength) { + CSFLogVerbose( + LOGTAG, + "AudioConduit %p: Sending RTCP SR Packet, len %zu, SSRC %u (0x%x)", this, + aLength, (uint32_t)ntohl(*((uint32_t*)&aData[4])), + (uint32_t)ntohl(*((uint32_t*)&aData[4]))); + + if (!mTransportActive) { + CSFLogError(LOGTAG, "%s RTCP SR Packet Send Failed ", __FUNCTION__); + return false; + } + + MediaPacket packet; + packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION); + packet.SetType(MediaPacket::RTCP); + mSenderRtcpSendEvent.Notify(std::move(packet)); + return true; +} + +bool WebrtcAudioConduit::SendReceiverRtcp(const uint8_t* aData, + size_t aLength) { + CSFLogVerbose( + LOGTAG, + "AudioConduit %p: Sending RTCP RR Packet, len %zu, SSRC %u (0x%x)", this, + aLength, (uint32_t)ntohl(*((uint32_t*)&aData[4])), + (uint32_t)ntohl(*((uint32_t*)&aData[4]))); + + if (!mTransportActive) { + CSFLogError(LOGTAG, "AudioConduit %p: RTCP RR Packet Send Failed", this); + return false; + } + + MediaPacket packet; + packet.Copy(aData, aLength, aLength + SRTP_MAX_EXPANSION); + packet.SetType(MediaPacket::RTCP); + mReceiverRtcpSendEvent.Notify(std::move(packet)); + return true; +} + +/** + * Supported Sampling Frequencies. + */ +bool WebrtcAudioConduit::IsSamplingFreqSupported(int freq) const { + return GetNum10msSamplesForFrequency(freq) != 0; +} + +std::vector WebrtcAudioConduit::GetUpstreamRtpSources() + const { + MOZ_ASSERT(NS_IsMainThread()); + return mRtpSources; +} + +/* Return block-length of 10 ms audio frame in number of samples */ +unsigned int WebrtcAudioConduit::GetNum10msSamplesForFrequency( + int samplingFreqHz) const { + switch (samplingFreqHz) { + case 16000: + return 160; // 160 samples + case 32000: + return 320; // 320 samples + case 44100: + return 441; // 441 samples + case 48000: + return 480; // 480 samples + default: + return 0; // invalid or unsupported + } +} + +/** + * Perform validation on the codecConfig to be applied. + * Verifies if the codec is already applied. + */ +MediaConduitErrorCode WebrtcAudioConduit::ValidateCodecConfig( + const AudioCodecConfig& codecInfo, bool send) { + if (codecInfo.mName.empty()) { + CSFLogError(LOGTAG, "%s Empty Payload Name ", __FUNCTION__); + return kMediaConduitMalformedArgument; + } + + // Only mono or stereo channels supported + if ((codecInfo.mChannels != 1) && (codecInfo.mChannels != 2)) { + CSFLogError(LOGTAG, "%s Channel Unsupported ", __FUNCTION__); + return kMediaConduitMalformedArgument; + } + + return kMediaConduitNoError; +} + +RtpExtList WebrtcAudioConduit::FilterExtensions(LocalDirection aDirection, + const RtpExtList& aExtensions) { + const bool isSend = aDirection == LocalDirection::kSend; + RtpExtList filteredExtensions; + + for (const auto& extension : aExtensions) { + // ssrc-audio-level RTP header extension + if (extension.uri == webrtc::RtpExtension::kAudioLevelUri) { + filteredExtensions.push_back( + webrtc::RtpExtension(extension.uri, extension.id)); + } + + // csrc-audio-level RTP header extension + if (extension.uri == webrtc::RtpExtension::kCsrcAudioLevelsUri) { + if (isSend) { + continue; + } + filteredExtensions.push_back( + webrtc::RtpExtension(extension.uri, extension.id)); + } + + // MID RTP header extension + if (extension.uri == webrtc::RtpExtension::kMidUri) { + if (!isSend) { + // TODO: recv mid support, see also bug 1727211 + continue; + } + filteredExtensions.push_back( + webrtc::RtpExtension(extension.uri, extension.id)); + } + } + + return filteredExtensions; +} + +webrtc::SdpAudioFormat WebrtcAudioConduit::CodecConfigToLibwebrtcFormat( + const AudioCodecConfig& aConfig) { + webrtc::SdpAudioFormat::Parameters parameters; + if (aConfig.mName == kOpusCodecName) { + if (aConfig.mChannels == 2) { + parameters[kCodecParamStereo] = kParamValueTrue; + } + if (aConfig.mFECEnabled) { + parameters[kCodecParamUseInbandFec] = kParamValueTrue; + } + if (aConfig.mDTXEnabled) { + parameters[kCodecParamUseDtx] = kParamValueTrue; + } + if (aConfig.mMaxPlaybackRate) { + parameters[kCodecParamMaxPlaybackRate] = + std::to_string(aConfig.mMaxPlaybackRate); + } + if (aConfig.mMaxAverageBitrate) { + parameters[kCodecParamMaxAverageBitrate] = + std::to_string(aConfig.mMaxAverageBitrate); + } + if (aConfig.mFrameSizeMs) { + parameters[kCodecParamPTime] = std::to_string(aConfig.mFrameSizeMs); + } + if (aConfig.mMinFrameSizeMs) { + parameters[kCodecParamMinPTime] = std::to_string(aConfig.mMinFrameSizeMs); + } + if (aConfig.mMaxFrameSizeMs) { + parameters[kCodecParamMaxPTime] = std::to_string(aConfig.mMaxFrameSizeMs); + } + if (aConfig.mCbrEnabled) { + parameters[kCodecParamCbr] = kParamValueTrue; + } + } + + return webrtc::SdpAudioFormat(aConfig.mName, aConfig.mFreq, aConfig.mChannels, + parameters); +} + +void WebrtcAudioConduit::DeleteSendStream() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(mLock.LockedForWritingByCurrentThread()); + + if (!mSendStream) { + return; + } + + mCall->Call()->DestroyAudioSendStream(mSendStream); + mSendStreamRunning = false; + mSendStream = nullptr; + + // Reset base_seqs in case ssrcs get re-used. + mRtpSendBaseSeqs.clear(); +} + +void WebrtcAudioConduit::CreateSendStream() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(mLock.LockedForWritingByCurrentThread()); + + if (mSendStream) { + return; + } + + mSendStream = mCall->Call()->CreateAudioSendStream(mSendStreamConfig); +} + +void WebrtcAudioConduit::DeleteRecvStream() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(mLock.LockedForWritingByCurrentThread()); + + if (!mRecvStream) { + return; + } + + mCall->Call()->DestroyAudioReceiveStream(mRecvStream); + mRecvStreamRunning = false; + mRecvStream = nullptr; +} + +void WebrtcAudioConduit::CreateRecvStream() { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + MOZ_ASSERT(mLock.LockedForWritingByCurrentThread()); + + if (mRecvStream) { + return; + } + + mRecvStream = mCall->Call()->CreateAudioReceiveStream(mRecvStreamConfig); + // Ensure that we set the jitter buffer target on this stream. + mRecvStream->SetBaseMinimumPlayoutDelayMs(mJitterBufferTargetMs); +} + +void WebrtcAudioConduit::SetJitterBufferTarget(DOMHighResTimeStamp aTargetMs) { + MOZ_RELEASE_ASSERT(aTargetMs <= std::numeric_limits::max()); + MOZ_RELEASE_ASSERT(aTargetMs >= 0); + + MOZ_ALWAYS_SUCCEEDS(mCallThread->Dispatch(NS_NewRunnableFunction( + __func__, + [this, self = RefPtr(this), targetMs = aTargetMs] { + mJitterBufferTargetMs = static_cast(targetMs); + if (mRecvStream) { + mRecvStream->SetBaseMinimumPlayoutDelayMs(targetMs); + } + }))); +} + +void WebrtcAudioConduit::DeliverPacket(rtc::CopyOnWriteBuffer packet, + PacketType type) { + // Currently unused. + MOZ_ASSERT(false); +} + +Maybe WebrtcAudioConduit::ActiveSendPayloadType() const { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + + auto stats = GetSenderStats(); + if (!stats) { + return Nothing(); + } + + if (!stats->codec_payload_type) { + return Nothing(); + } + + return Some(*stats->codec_payload_type); +} + +Maybe WebrtcAudioConduit::ActiveRecvPayloadType() const { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + + auto stats = GetReceiverStats(); + if (!stats) { + return Nothing(); + } + + if (!stats->codec_payload_type) { + return Nothing(); + } + + return Some(*stats->codec_payload_type); +} + +} // namespace mozilla diff --git a/dom/media/webrtc/libwebrtcglue/AudioConduit.h b/dom/media/webrtc/libwebrtcglue/AudioConduit.h new file mode 100644 index 0000000000..64bf5a59ec --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/AudioConduit.h @@ -0,0 +1,299 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef AUDIO_SESSION_H_ +#define AUDIO_SESSION_H_ + +#include "mozilla/Attributes.h" +#include "mozilla/ReentrantMonitor.h" +#include "mozilla/RWLock.h" +#include "mozilla/StateMirroring.h" +#include "mozilla/TimeStamp.h" + +#include "MediaConduitInterface.h" + +/** + * This file hosts several structures identifying different aspects of a RTP + * Session. + */ +namespace mozilla { + +struct DtmfEvent; + +/** + * Concrete class for Audio session. Hooks up + * - media-source and target to external transport + */ +class WebrtcAudioConduit : public AudioSessionConduit, + public webrtc::RtcpEventObserver { + public: + Maybe ActiveSendPayloadType() const override; + Maybe ActiveRecvPayloadType() const override; + + void OnRtpReceived(webrtc::RtpPacketReceived&& aPacket, + webrtc::RTPHeader&& aHeader); + + void OnRtcpBye() override; + void OnRtcpTimeout() override; + + void SetTransportActive(bool aActive) override; + + MediaEventSourceExc& SenderRtpSendEvent() override { + return mSenderRtpSendEvent; + } + MediaEventSourceExc& SenderRtcpSendEvent() override { + return mSenderRtcpSendEvent; + } + MediaEventSourceExc& ReceiverRtcpSendEvent() override { + return mReceiverRtcpSendEvent; + } + void ConnectReceiverRtpEvent( + MediaEventSourceExc& aEvent) + override { + mReceiverRtpEventListener = + aEvent.Connect(mCallThread, this, &WebrtcAudioConduit::OnRtpReceived); + } + + Maybe RtpSendBaseSeqFor(uint32_t aSsrc) const override; + + const dom::RTCStatsTimestampMaker& GetTimestampMaker() const override; + + void StopTransmitting(); + void StartTransmitting(); + void StopReceiving(); + void StartReceiving(); + + /** + * Function to deliver externally captured audio sample for encoding and + * transport + * @param frame [in]: AudioFrame in upstream's format for forwarding to the + * send stream. Ownership is passed along. + * NOTE: ConfigureSendMediaCodec() SHOULD be called before this function can + * be invoked. This ensures the inserted audio-samples can be transmitted by + * the conduit. + */ + MediaConduitErrorCode SendAudioFrame( + std::unique_ptr frame) override; + + /** + * Function to grab a decoded audio-sample from the media engine for + * rendering / playout of length 10 milliseconds. + * + * @param samplingFreqHz [in]: Frequency of the sampling for playback in + * Hertz (16000, 32000,..) + * @param frame [in/out]: Pointer to an AudioFrame to which audio data will be + * copied + * NOTE: This function should be invoked every 10 milliseconds for the best + * performance + * NOTE: ConfigureRecvMediaCodec() SHOULD be called before this function can + * be invoked + * This ensures the decoded samples are ready for reading and playout is + * enabled. + */ + MediaConduitErrorCode GetAudioFrame(int32_t samplingFreqHz, + webrtc::AudioFrame* frame) override; + + bool SendRtp(const uint8_t* aData, size_t aLength, + const webrtc::PacketOptions& aOptions) override; + bool SendSenderRtcp(const uint8_t* aData, size_t aLength) override; + bool SendReceiverRtcp(const uint8_t* aData, size_t aLength) override; + + bool HasCodecPluginID(uint64_t aPluginID) const override { return false; } + + void SetJitterBufferTarget(DOMHighResTimeStamp aTargetMs) override; + + void DeliverPacket(rtc::CopyOnWriteBuffer packet, PacketType type) override; + + RefPtr Shutdown() override; + + WebrtcAudioConduit(RefPtr aCall, + nsCOMPtr aStsThread); + + virtual ~WebrtcAudioConduit(); + + // Call thread. + void InitControl(AudioConduitControlInterface* aControl) override; + + // Handle a DTMF event from mControl.mOnDtmfEventListener. + void OnDtmfEvent(const DtmfEvent& aEvent); + + // Called when a parameter in mControl has changed. Call thread. + void OnControlConfigChange(); + + Ssrcs GetLocalSSRCs() const override; + Maybe GetRemoteSSRC() const override; + + void DisableSsrcChanges() override { + MOZ_ASSERT(mCallThread->IsOnCurrentThread()); + mAllowSsrcChange = false; + } + + private: + /** + * Override the remote ssrc configured on mRecvStreamConfig. + * + * Recreates and restarts the recv stream if needed. The overriden value is + * overwritten the next time the mControl.mRemoteSsrc mirror changes value. + * + * Call thread only. + */ + bool OverrideRemoteSSRC(uint32_t aSsrc); + + public: + void UnsetRemoteSSRC(uint32_t aSsrc) override {} + + Maybe GetReceiverStats() + const override; + Maybe GetSenderStats() const override; + Maybe GetCallStats() const override; + + bool IsSamplingFreqSupported(int freq) const override; + + MediaEventSource& RtcpByeEvent() override { return mRtcpByeEvent; } + MediaEventSource& RtcpTimeoutEvent() override { + return mRtcpTimeoutEvent; + } + MediaEventSource& RtpPacketEvent() override { return mRtpPacketEvent; } + + std::vector GetUpstreamRtpSources() const override; + + private: + WebrtcAudioConduit(const WebrtcAudioConduit& other) = delete; + void operator=(const WebrtcAudioConduit& other) = delete; + + // Generate block size in sample length for a given sampling frequency + unsigned int GetNum10msSamplesForFrequency(int samplingFreqHz) const; + + // Checks the codec to be applied + static MediaConduitErrorCode ValidateCodecConfig( + const AudioCodecConfig& codecInfo, bool send); + /** + * Of all extensions in aExtensions, returns a list of supported extensions. + */ + static RtpExtList FilterExtensions( + MediaSessionConduitLocalDirection aDirection, + const RtpExtList& aExtensions); + static webrtc::SdpAudioFormat CodecConfigToLibwebrtcFormat( + const AudioCodecConfig& aConfig); + + void CreateSendStream(); + void DeleteSendStream(); + void CreateRecvStream(); + void DeleteRecvStream(); + + // Are SSRC changes without signaling allowed or not. + // Call thread only. + bool mAllowSsrcChange = true; + + // Const so can be accessed on any thread. Most methods are called on the Call + // thread. + const RefPtr mCall; + + // Set up in the ctor and then not touched. Called through by the streams on + // any thread. + WebrtcSendTransport mSendTransport; + WebrtcReceiveTransport mRecvTransport; + + // Accessed only on the Call thread. + webrtc::AudioReceiveStreamInterface::Config mRecvStreamConfig; + + // Written only on the Call thread. Guarded by mLock, except for reads on the + // Call thread. + webrtc::AudioReceiveStreamInterface* mRecvStream; + + // Accessed only on the Call thread. + webrtc::AudioSendStream::Config mSendStreamConfig; + + // Written only on the Call thread. Guarded by mLock, except for reads on the + // Call thread. + webrtc::AudioSendStream* mSendStream; + + // If true => mSendStream started and not stopped + // Written only on the Call thread. + Atomic mSendStreamRunning; + // If true => mRecvStream started and not stopped + // Written only on the Call thread. + Atomic mRecvStreamRunning; + + // Accessed only on the Call thread. + bool mDtmfEnabled; + + mutable RWLock mLock MOZ_UNANNOTATED; + + // Call worker thread. All access to mCall->Call() happens here. + const RefPtr mCallThread; + + // Socket transport service thread. Any thread. + const nsCOMPtr mStsThread; + + // Target jitter buffer to be applied to the receive stream in milliseconds. + uint16_t mJitterBufferTargetMs = 0; + + struct Control { + // Mirrors and events that map to AudioConduitControlInterface for control. + // Call thread only. + Mirror mReceiving; + Mirror mTransmitting; + Mirror mLocalSsrcs; + Mirror mLocalCname; + Mirror mMid; + Mirror mRemoteSsrc; + Mirror mSyncGroup; + Mirror mLocalRecvRtpExtensions; + Mirror mLocalSendRtpExtensions; + Mirror> mSendCodec; + Mirror> mRecvCodecs; + Mirror> mFrameTransformerProxySend; + Mirror> mFrameTransformerProxyRecv; + MediaEventListener mOnDtmfEventListener; + + // For caching mRemoteSsrc, since another caller may change the remote ssrc + // in the stream config directly. + Ssrc mConfiguredRemoteSsrc = 0; + // For tracking changes to mSendCodec. + Maybe mConfiguredSendCodec; + // For tracking changes to mRecvCodecs. + std::vector mConfiguredRecvCodecs; + + // For change tracking. Callthread only. + RefPtr mConfiguredFrameTransformerProxySend; + RefPtr mConfiguredFrameTransformerProxyRecv; + + Control() = delete; + explicit Control(const RefPtr& aCallThread); + } mControl; + + // WatchManager allowing Mirrors to trigger functions that will update the + // webrtc.org configuration. + WatchManager mWatchManager; + + // Accessed from mStsThread. Last successfully polled RTT + Maybe mRttSec; + + // Call thread only. ssrc -> base_seq + std::map mRtpSendBaseSeqs; + // libwebrtc network thread only. ssrc -> base_seq. + // To track changes needed to mRtpSendBaseSeqs. + std::map mRtpSendBaseSeqs_n; + + // Written only on the main thread. Guarded by mLock, except for + // reads on the main thread. + std::vector mRtpSources; + + // Thread safe + Atomic mTransportActive = Atomic(false); + MediaEventProducer mRtcpByeEvent; + MediaEventProducer mRtcpTimeoutEvent; + MediaEventProducer mRtpPacketEvent; + MediaEventProducerExc mSenderRtpSendEvent; + MediaEventProducerExc mSenderRtcpSendEvent; + MediaEventProducerExc mReceiverRtcpSendEvent; + + // Assigned and revoked on mStsThread. Listeners for receiving packets. + MediaEventListener mReceiverRtpEventListener; // Rtp-receiving pipeline +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/webrtc/libwebrtcglue/CallWorkerThread.h b/dom/media/webrtc/libwebrtcglue/CallWorkerThread.h new file mode 100644 index 0000000000..12d21fbee4 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/CallWorkerThread.h @@ -0,0 +1,116 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_CALLWORKERTHREAD_H_ +#define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_CALLWORKERTHREAD_H_ + +#include "mozilla/AbstractThread.h" +#include "nsIDirectTaskDispatcher.h" +#include "TaskQueueWrapper.h" + +namespace mozilla { + +// Implements AbstractThread for running things on the webrtc TaskQueue. +// Webrtc TaskQueues are not refcounted so cannot implement AbstractThread +// directly. +class CallWorkerThread final : public AbstractThread, + public nsIDirectTaskDispatcher { + public: + NS_DECL_THREADSAFE_ISUPPORTS + NS_DECL_NSIDIRECTTASKDISPATCHER + + explicit CallWorkerThread( + UniquePtr> aWebrtcTaskQueue) + : AbstractThread(aWebrtcTaskQueue->mTaskQueue->SupportsTailDispatch()), + mWebrtcTaskQueue(std::move(aWebrtcTaskQueue)) {} + + // AbstractThread overrides + nsresult Dispatch(already_AddRefed aRunnable, + DispatchReason aReason) override; + bool IsCurrentThreadIn() const override; + TaskDispatcher& TailDispatcher() override; + nsIEventTarget* AsEventTarget() override; + NS_IMETHOD + DelayedDispatch(already_AddRefed aEvent, + uint32_t aDelayMs) override; + + NS_IMETHOD RegisterShutdownTask(nsITargetShutdownTask* aTask) override; + NS_IMETHOD UnregisterShutdownTask(nsITargetShutdownTask* aTask) override; + + const UniquePtr> + mWebrtcTaskQueue; + + protected: + ~CallWorkerThread() = default; +}; + +NS_IMPL_ISUPPORTS(CallWorkerThread, nsIDirectTaskDispatcher, + nsISerialEventTarget, nsIEventTarget); + +//----------------------------------------------------------------------------- +// AbstractThread +//----------------------------------------------------------------------------- + +nsresult CallWorkerThread::Dispatch(already_AddRefed aRunnable, + DispatchReason aReason) { + RefPtr runnable = aRunnable; + return mWebrtcTaskQueue->mTaskQueue->Dispatch( + mWebrtcTaskQueue->CreateTaskRunner(std::move(runnable)), aReason); +} + +bool CallWorkerThread::IsCurrentThreadIn() const { + return mWebrtcTaskQueue->mTaskQueue->IsOnCurrentThreadInfallible() && + mWebrtcTaskQueue->IsCurrent(); +} + +TaskDispatcher& CallWorkerThread::TailDispatcher() { + return mWebrtcTaskQueue->mTaskQueue->TailDispatcher(); +} + +nsIEventTarget* CallWorkerThread::AsEventTarget() { + return mWebrtcTaskQueue->mTaskQueue->AsEventTarget(); +} + +NS_IMETHODIMP +CallWorkerThread::DelayedDispatch(already_AddRefed aEvent, + uint32_t aDelayMs) { + RefPtr event = aEvent; + return mWebrtcTaskQueue->mTaskQueue->DelayedDispatch( + mWebrtcTaskQueue->CreateTaskRunner(std::move(event)), aDelayMs); +} + +NS_IMETHODIMP CallWorkerThread::RegisterShutdownTask( + nsITargetShutdownTask* aTask) { + return mWebrtcTaskQueue->mTaskQueue->RegisterShutdownTask(aTask); +} + +NS_IMETHODIMP CallWorkerThread::UnregisterShutdownTask( + nsITargetShutdownTask* aTask) { + return mWebrtcTaskQueue->mTaskQueue->UnregisterShutdownTask(aTask); +} + +//----------------------------------------------------------------------------- +// nsIDirectTaskDispatcher +//----------------------------------------------------------------------------- + +NS_IMETHODIMP +CallWorkerThread::DispatchDirectTask(already_AddRefed aEvent) { + nsCOMPtr event = aEvent; + return mWebrtcTaskQueue->mTaskQueue->DispatchDirectTask( + mWebrtcTaskQueue->CreateTaskRunner(std::move(event))); +} + +NS_IMETHODIMP CallWorkerThread::DrainDirectTasks() { + return mWebrtcTaskQueue->mTaskQueue->DrainDirectTasks(); +} + +NS_IMETHODIMP CallWorkerThread::HaveDirectTasks(bool* aValue) { + return mWebrtcTaskQueue->mTaskQueue->HaveDirectTasks(aValue); +} + +} // namespace mozilla + +#endif diff --git a/dom/media/webrtc/libwebrtcglue/CodecConfig.h b/dom/media/webrtc/libwebrtcglue/CodecConfig.h new file mode 100644 index 0000000000..023ea98783 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/CodecConfig.h @@ -0,0 +1,237 @@ + +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef CODEC_CONFIG_H_ +#define CODEC_CONFIG_H_ + +#include +#include + +#include "common/EncodingConstraints.h" + +namespace mozilla { + +/** + * Minimalistic Audio Codec Config Params + */ +struct AudioCodecConfig { + /* + * The data-types for these properties mimic the + * corresponding webrtc::CodecInst data-types. + */ + int mType; + std::string mName; + int mFreq; + int mChannels; + + bool mFECEnabled; + bool mDtmfEnabled; + uint32_t mFrameSizeMs; + uint32_t mMaxFrameSizeMs; + uint32_t mMinFrameSizeMs; + + // OPUS-specific + bool mDTXEnabled; + uint32_t mMaxAverageBitrate; + int mMaxPlaybackRate; + bool mCbrEnabled; + + AudioCodecConfig(int type, std::string name, int freq, int channels, + bool FECEnabled) + : mType(type), + mName(name), + mFreq(freq), + mChannels(channels), + mFECEnabled(FECEnabled), + mDtmfEnabled(false), + mFrameSizeMs(0), + mMaxFrameSizeMs(0), + mMinFrameSizeMs(0), + mDTXEnabled(false), + mMaxAverageBitrate(0), + mMaxPlaybackRate(0), + mCbrEnabled(false) {} + + bool operator==(const AudioCodecConfig& aOther) const { + return mType == aOther.mType && mName == aOther.mName && + mFreq == aOther.mFreq && mChannels == aOther.mChannels && + mFECEnabled == aOther.mFECEnabled && + mDtmfEnabled == aOther.mDtmfEnabled && + mFrameSizeMs == aOther.mFrameSizeMs && + mMaxFrameSizeMs == aOther.mMaxFrameSizeMs && + mMinFrameSizeMs == aOther.mMinFrameSizeMs && + mDTXEnabled == aOther.mDTXEnabled && + mMaxAverageBitrate == aOther.mMaxAverageBitrate && + mMaxPlaybackRate == aOther.mMaxPlaybackRate && + mCbrEnabled == aOther.mCbrEnabled; + } +}; + +/* + * Minimalistic video codec configuration + * More to be added later depending on the use-case + */ + +#define MAX_SPROP_LEN 128 + +// used for holding SDP negotiation results +struct VideoCodecConfigH264 { + char sprop_parameter_sets[MAX_SPROP_LEN]; + int packetization_mode; + int profile_level_id; + int tias_bw; + + bool operator==(const VideoCodecConfigH264& aOther) const { + return strncmp(sprop_parameter_sets, aOther.sprop_parameter_sets, + MAX_SPROP_LEN) == 0 && + packetization_mode == aOther.packetization_mode && + profile_level_id == aOther.profile_level_id && + tias_bw == aOther.tias_bw; + } +}; + +// class so the std::strings can get freed more easily/reliably +class VideoCodecConfig { + public: + /* + * The data-types for these properties mimic the + * corresponding webrtc::VideoCodec data-types. + */ + int mType; // payload type + std::string mName; + + std::vector mAckFbTypes; + std::vector mNackFbTypes; + std::vector mCcmFbTypes; + // Don't pass mOtherFbTypes from JsepVideoCodecDescription because we'd have + // to drag SdpRtcpFbAttributeList::Feedback along too. + bool mRembFbSet; + bool mFECFbSet; + bool mTransportCCFbSet; + + int mULPFECPayloadType; + int mREDPayloadType; + int mREDRTXPayloadType; + int mRTXPayloadType; + + uint32_t mTias; + EncodingConstraints mEncodingConstraints; + struct Encoding { + std::string rid; + EncodingConstraints constraints; + bool active = true; + // TODO(bug 1744116): Use = default here + bool operator==(const Encoding& aOther) const { + return rid == aOther.rid && constraints == aOther.constraints && + active == aOther.active; + } + }; + std::vector mEncodings; + std::string mSpropParameterSets; + uint8_t mProfile; + uint8_t mConstraints; + uint8_t mLevel; + uint8_t mPacketizationMode; + // TODO: add external negotiated SPS/PPS + + // TODO(bug 1744116): Use = default here + bool operator==(const VideoCodecConfig& aRhs) const { + return mType == aRhs.mType && mName == aRhs.mName && + mAckFbTypes == aRhs.mAckFbTypes && + mNackFbTypes == aRhs.mNackFbTypes && + mCcmFbTypes == aRhs.mCcmFbTypes && mRembFbSet == aRhs.mRembFbSet && + mFECFbSet == aRhs.mFECFbSet && + mTransportCCFbSet == aRhs.mTransportCCFbSet && + mULPFECPayloadType == aRhs.mULPFECPayloadType && + mREDPayloadType == aRhs.mREDPayloadType && + mREDRTXPayloadType == aRhs.mREDRTXPayloadType && + mRTXPayloadType == aRhs.mRTXPayloadType && mTias == aRhs.mTias && + mEncodingConstraints == aRhs.mEncodingConstraints && + mEncodings == aRhs.mEncodings && + mSpropParameterSets == aRhs.mSpropParameterSets && + mProfile == aRhs.mProfile && mConstraints == aRhs.mConstraints && + mLevel == aRhs.mLevel && + mPacketizationMode == aRhs.mPacketizationMode; + } + + VideoCodecConfig(int type, std::string name, + const EncodingConstraints& constraints, + const struct VideoCodecConfigH264* h264 = nullptr) + : mType(type), + mName(name), + mRembFbSet(false), + mFECFbSet(false), + mTransportCCFbSet(false), + mULPFECPayloadType(-1), + mREDPayloadType(-1), + mREDRTXPayloadType(-1), + mRTXPayloadType(-1), + mTias(0), + mEncodingConstraints(constraints), + mProfile(0x42), + mConstraints(0xE0), + mLevel(0x0C), + mPacketizationMode(1) { + if (h264) { + mProfile = (h264->profile_level_id & 0x00FF0000) >> 16; + mConstraints = (h264->profile_level_id & 0x0000FF00) >> 8; + mLevel = (h264->profile_level_id & 0x000000FF); + mPacketizationMode = h264->packetization_mode; + mSpropParameterSets = h264->sprop_parameter_sets; + } + } + + bool ResolutionEquals(const VideoCodecConfig& aConfig) const { + if (mEncodings.size() != aConfig.mEncodings.size()) { + return false; + } + for (size_t i = 0; i < mEncodings.size(); ++i) { + if (!mEncodings[i].constraints.ResolutionEquals( + aConfig.mEncodings[i].constraints)) { + return false; + } + } + return true; + } + + // Nothing seems to use this right now. Do we intend to support this + // someday? + bool RtcpFbAckIsSet(const std::string& type) const { + for (auto i = mAckFbTypes.begin(); i != mAckFbTypes.end(); ++i) { + if (*i == type) { + return true; + } + } + return false; + } + + bool RtcpFbNackIsSet(const std::string& type) const { + for (auto i = mNackFbTypes.begin(); i != mNackFbTypes.end(); ++i) { + if (*i == type) { + return true; + } + } + return false; + } + + bool RtcpFbCcmIsSet(const std::string& type) const { + for (auto i = mCcmFbTypes.begin(); i != mCcmFbTypes.end(); ++i) { + if (*i == type) { + return true; + } + } + return false; + } + + bool RtcpFbRembIsSet() const { return mRembFbSet; } + + bool RtcpFbFECIsSet() const { return mFECFbSet; } + + bool RtcpFbTransportCCIsSet() const { return mTransportCCFbSet; } + + bool RtxPayloadTypeIsSet() const { return mRTXPayloadType != -1; } +}; +} // namespace mozilla +#endif diff --git a/dom/media/webrtc/libwebrtcglue/FrameTransformer.cpp b/dom/media/webrtc/libwebrtcglue/FrameTransformer.cpp new file mode 100644 index 0000000000..23688a7d88 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/FrameTransformer.cpp @@ -0,0 +1,87 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#include "libwebrtcglue/FrameTransformer.h" +#include "api/frame_transformer_interface.h" +#include "mozilla/Mutex.h" +#include +#include +#include "api/scoped_refptr.h" +#include +#include "libwebrtcglue/FrameTransformerProxy.h" + +namespace mozilla { + +FrameTransformer::FrameTransformer(bool aVideo) + : webrtc::FrameTransformerInterface(), + mVideo(aVideo), + mCallbacksMutex("FrameTransformer::mCallbacksMutex"), + mProxyMutex("FrameTransformer::mProxyMutex") {} + +FrameTransformer::~FrameTransformer() { + if (mProxy) { + mProxy->SetLibwebrtcTransformer(nullptr); + } +} + +void FrameTransformer::Transform( + std::unique_ptr aFrame) { + MutexAutoLock lock(mProxyMutex); + if (mProxy) { + mProxy->Transform(std::move(aFrame)); + return; + } + + // No transformer, just passthrough + OnTransformedFrame(std::move(aFrame)); +} + +void FrameTransformer::RegisterTransformedFrameCallback( + rtc::scoped_refptr aCallback) { + MutexAutoLock lock(mCallbacksMutex); + mCallback = aCallback; +} + +void FrameTransformer::UnregisterTransformedFrameCallback() { + MutexAutoLock lock(mCallbacksMutex); + mCallback = nullptr; +} + +void FrameTransformer::RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr aCallback, + uint32_t aSsrc) { + MutexAutoLock lock(mCallbacksMutex); + mCallbacksBySsrc[aSsrc] = aCallback; +} + +void FrameTransformer::UnregisterTransformedFrameSinkCallback(uint32_t aSsrc) { + MutexAutoLock lock(mCallbacksMutex); + mCallbacksBySsrc.erase(aSsrc); +} + +void FrameTransformer::OnTransformedFrame( + std::unique_ptr aFrame) { + MutexAutoLock lock(mCallbacksMutex); + if (mCallback) { + mCallback->OnTransformedFrame(std::move(aFrame)); + } else if (auto it = mCallbacksBySsrc.find(aFrame->GetSsrc()); + it != mCallbacksBySsrc.end()) { + it->second->OnTransformedFrame(std::move(aFrame)); + } +} + +void FrameTransformer::SetProxy(FrameTransformerProxy* aProxy) { + MutexAutoLock lock(mProxyMutex); + if (mProxy) { + mProxy->SetLibwebrtcTransformer(nullptr); + } + mProxy = aProxy; + if (mProxy) { + mProxy->SetLibwebrtcTransformer(this); + } +} + +} // namespace mozilla diff --git a/dom/media/webrtc/libwebrtcglue/FrameTransformer.h b/dom/media/webrtc/libwebrtcglue/FrameTransformer.h new file mode 100644 index 0000000000..0c93d0f77f --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/FrameTransformer.h @@ -0,0 +1,79 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#ifndef MOZILLA_DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_FRAMETRANSFORMER_H_ +#define MOZILLA_DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_FRAMETRANSFORMER_H_ + +#include "api/frame_transformer_interface.h" +#include "libwebrtcglue/FrameTransformerProxy.h" +#include "nsISupportsImpl.h" +#include "mozilla/Mutex.h" +#include "jsapi/RTCRtpScriptTransformer.h" + +namespace mozilla { + +// There is one of these per RTCRtpSender and RTCRtpReceiver, for its entire +// lifetime. SetProxy is used to activate/deactivate it. In the inactive state +// (the default), this is just a synchronous passthrough. +class FrameTransformer : public webrtc::FrameTransformerInterface { + public: + explicit FrameTransformer(bool aVideo); + virtual ~FrameTransformer(); + + // This is set when RTCRtpSender/Receiver.transform is set, and unset when + // RTCRtpSender/Receiver.transform is unset. + void SetProxy(FrameTransformerProxy* aProxy); + + // If no proxy is set (ie; RTCRtpSender/Receiver.transform is not set), this + // synchronously calls OnTransformedFrame with no modifcation. If a proxy is + // set, we send the frame to it, and eventually that frame should come back + // to OnTransformedFrame. + void Transform( + std::unique_ptr aFrame) override; + void OnTransformedFrame( + std::unique_ptr aFrame); + + // When libwebrtc uses the same callback for all ssrcs + // (right now, this is used for audio, but we do not care in this class) + void RegisterTransformedFrameCallback( + rtc::scoped_refptr aCallback) override; + void UnregisterTransformedFrameCallback() override; + + // When libwebrtc uses a different callback for each ssrc + // (right now, this is used for video, but we do not care in this class) + void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr aCallback, + uint32_t aSsrc) override; + void UnregisterTransformedFrameSinkCallback(uint32_t aSsrc) override; + + bool IsVideo() const { return mVideo; } + + private: + const bool mVideo; + Mutex mCallbacksMutex; + // Written on a libwebrtc thread, read on the worker thread. + rtc::scoped_refptr mCallback + MOZ_GUARDED_BY(mCallbacksMutex); + std::map> + mCallbacksBySsrc MOZ_GUARDED_BY(mCallbacksMutex); + + Mutex mProxyMutex; + // Written on the call thread, read on a libwebrtc/gmp/mediadataencoder/call + // thread (which one depends on the media type and direction). Right now, + // these are: + // Send video: VideoStreamEncoder::encoder_queue_, + // WebrtcMediaDataEncoder::mTaskQueue, or GMP encoder thread. + // Recv video: Call::worker_thread_ + // Send audio: ChannelSend::encoder_queue_ + // Recv audio: ChannelReceive::worker_thread_ + // This should have little to no lock contention + // This corresponds to the RTCRtpScriptTransform/RTCRtpScriptTransformer. + RefPtr mProxy MOZ_GUARDED_BY(mProxyMutex); +}; // FrameTransformer + +} // namespace mozilla + +#endif // MOZILLA_DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_FRAMETRANSFORMER_H_ diff --git a/dom/media/webrtc/libwebrtcglue/FrameTransformerProxy.cpp b/dom/media/webrtc/libwebrtcglue/FrameTransformerProxy.cpp new file mode 100644 index 0000000000..f374cda699 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/FrameTransformerProxy.cpp @@ -0,0 +1,258 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#include "libwebrtcglue/FrameTransformerProxy.h" +#include "libwebrtcglue/FrameTransformer.h" +#include "mozilla/dom/RTCRtpSender.h" +#include "mozilla/dom/RTCRtpReceiver.h" +#include "mozilla/Logging.h" +#include "mozilla/Mutex.h" +#include "jsapi/RTCRtpScriptTransformer.h" +#include "nsThreadUtils.h" +#include "mozilla/Assertions.h" +#include +#include "mozilla/Maybe.h" +#include "mozilla/RefPtr.h" +#include "nscore.h" +#include "ErrorList.h" +#include "nsIRunnable.h" +#include "nsIEventTarget.h" +#include "api/frame_transformer_interface.h" +#include +#include "nsDebug.h" +#include "nsISupports.h" +#include + +namespace mozilla { + +LazyLogModule gFrameTransformerProxyLog("FrameTransformerProxy"); + +FrameTransformerProxy::FrameTransformerProxy() + : mMutex("FrameTransformerProxy::mMutex") {} + +FrameTransformerProxy::~FrameTransformerProxy() = default; + +void FrameTransformerProxy::SetScriptTransformer( + dom::RTCRtpScriptTransformer& aTransformer) { + MutexAutoLock lock(mMutex); + if (mReleaseScriptTransformerCalled) { + MOZ_LOG(gFrameTransformerProxyLog, LogLevel::Warning, + ("RTCRtpScriptTransformer is ready, but ReleaseScriptTransformer " + "has already been called.")); + // The mainthread side has torn down while the worker init was pending. + // Don't grab a reference to the worker thread, or the script transformer. + // Also, let the script transformer know that we do not need it after all. + aTransformer.NotifyReleased(); + return; + } + + MOZ_LOG(gFrameTransformerProxyLog, LogLevel::Info, + ("RTCRtpScriptTransformer is ready!")); + mWorkerThread = GetCurrentSerialEventTarget(); + MOZ_ASSERT(mWorkerThread); + + MOZ_ASSERT(!mScriptTransformer); + mScriptTransformer = &aTransformer; + while (!mQueue.empty()) { + mScriptTransformer->TransformFrame(std::move(mQueue.front())); + mQueue.pop_front(); + } +} + +Maybe FrameTransformerProxy::IsVideo() const { + MutexAutoLock lock(mMutex); + return mVideo; +} + +void FrameTransformerProxy::ReleaseScriptTransformer() { + MutexAutoLock lock(mMutex); + MOZ_LOG(gFrameTransformerProxyLog, LogLevel::Debug, ("In %s", __FUNCTION__)); + if (mReleaseScriptTransformerCalled) { + return; + } + mReleaseScriptTransformerCalled = true; + + if (mWorkerThread) { + mWorkerThread->Dispatch(NS_NewRunnableFunction( + __func__, [this, self = RefPtr(this)] { + if (mScriptTransformer) { + mScriptTransformer->NotifyReleased(); + mScriptTransformer = nullptr; + } + + // Make sure cycles are broken; this unset might have been caused by + // something other than the sender/receiver being unset. + GetMainThreadSerialEventTarget()->Dispatch( + NS_NewRunnableFunction(__func__, [this, self] { + MutexAutoLock lock(mMutex); + mSender = nullptr; + mReceiver = nullptr; + })); + })); + mWorkerThread = nullptr; + } +} + +void FrameTransformerProxy::SetLibwebrtcTransformer( + FrameTransformer* aLibwebrtcTransformer) { + MutexAutoLock lock(mMutex); + mLibwebrtcTransformer = aLibwebrtcTransformer; + if (mLibwebrtcTransformer) { + MOZ_LOG(gFrameTransformerProxyLog, LogLevel::Info, + ("mLibwebrtcTransformer is now set!")); + mVideo = Some(mLibwebrtcTransformer->IsVideo()); + } +} + +void FrameTransformerProxy::Transform( + std::unique_ptr aFrame) { + MutexAutoLock lock(mMutex); + MOZ_LOG(gFrameTransformerProxyLog, LogLevel::Debug, ("In %s", __FUNCTION__)); + if (!mWorkerThread && !mReleaseScriptTransformerCalled) { + MOZ_LOG( + gFrameTransformerProxyLog, LogLevel::Info, + ("In %s, queueing frame because RTCRtpScriptTransformer is not ready", + __FUNCTION__)); + // We are still waiting for the script transformer to be created on the + // worker thread. + mQueue.push_back(std::move(aFrame)); + return; + } + + if (mWorkerThread) { + MOZ_LOG(gFrameTransformerProxyLog, LogLevel::Debug, + ("Queueing call to RTCRtpScriptTransformer::TransformFrame")); + mWorkerThread->Dispatch(NS_NewRunnableFunction( + __func__, [this, self = RefPtr(this), + frame = std::move(aFrame)]() mutable { + if (NS_WARN_IF(!mScriptTransformer)) { + // Could happen due to errors. Is there some + // other processing we ought to do? + return; + } + mScriptTransformer->TransformFrame(std::move(frame)); + })); + } +} + +void FrameTransformerProxy::OnTransformedFrame( + std::unique_ptr aFrame) { + MutexAutoLock lock(mMutex); + // If the worker thread has changed, we drop the frame, to avoid frames + // arriving out of order. + if (mLibwebrtcTransformer) { + // This will lock, lock order is mMutex, FrameTransformer::mLibwebrtcMutex + mLibwebrtcTransformer->OnTransformedFrame(std::move(aFrame)); + } +} + +void FrameTransformerProxy::SetSender(dom::RTCRtpSender* aSender) { + { + MutexAutoLock lock(mMutex); + MOZ_ASSERT(!mReceiver); + mSender = aSender; + } + if (!aSender) { + MOZ_LOG(gFrameTransformerProxyLog, LogLevel::Info, ("Sender set to null")); + ReleaseScriptTransformer(); + } +} + +void FrameTransformerProxy::SetReceiver(dom::RTCRtpReceiver* aReceiver) { + { + MutexAutoLock lock(mMutex); + MOZ_ASSERT(!mSender); + mReceiver = aReceiver; + } + if (!aReceiver) { + MOZ_LOG(gFrameTransformerProxyLog, LogLevel::Info, + ("Receiver set to null")); + ReleaseScriptTransformer(); + } +} + +bool FrameTransformerProxy::RequestKeyFrame() { + { + // Spec wants this to reject synchronously if the RTCRtpScriptTransformer + // is not associated with a video receiver. This may change to an async + // check? + MutexAutoLock lock(mMutex); + if (!mReceiver || !mVideo.isSome() || !*mVideo) { + return false; + } + } + + // Thread hop to main, and then the conduit thread-hops to the call thread. + GetMainThreadSerialEventTarget()->Dispatch(NS_NewRunnableFunction( + __func__, [this, self = RefPtr(this)] { + MutexAutoLock lock(mMutex); + if (mReceiver && mVideo.isSome() && *mVideo) { + mReceiver->RequestKeyFrame(); + } + })); + return true; +} + +void FrameTransformerProxy::KeyFrameRequestDone(bool aSuccess) { + MutexAutoLock lock(mMutex); + if (mWorkerThread) { + mWorkerThread->Dispatch(NS_NewRunnableFunction( + __func__, [this, self = RefPtr(this), aSuccess] { + if (mScriptTransformer) { + mScriptTransformer->KeyFrameRequestDone(aSuccess); + } + })); + } +} + +bool FrameTransformerProxy::GenerateKeyFrame(const Maybe& aRid) { + { + // Spec wants this to reject synchronously if the RTCRtpScriptTransformer + // is not associated with a video sender. This may change to an async + // check? + MutexAutoLock lock(mMutex); + if (!mSender || !mVideo.isSome() || !*mVideo) { + return false; + } + } + + // Thread hop to main, and then the conduit thread-hops to the call thread. + GetMainThreadSerialEventTarget()->Dispatch(NS_NewRunnableFunction( + __func__, [this, self = RefPtr(this), aRid] { + MutexAutoLock lock(mMutex); + if (!mSender || !mVideo.isSome() || !*mVideo || + !mSender->GenerateKeyFrame(aRid)) { + CopyableErrorResult rv; + rv.ThrowInvalidStateError("Not sending video"); + if (mWorkerThread) { + mWorkerThread->Dispatch(NS_NewRunnableFunction( + __func__, + [this, self = RefPtr(this), aRid, rv] { + if (mScriptTransformer) { + mScriptTransformer->GenerateKeyFrameError(aRid, rv); + } + })); + } + } + })); + return true; +} + +void FrameTransformerProxy::GenerateKeyFrameError( + const Maybe& aRid, const CopyableErrorResult& aResult) { + MutexAutoLock lock(mMutex); + if (mWorkerThread) { + mWorkerThread->Dispatch(NS_NewRunnableFunction( + __func__, + [this, self = RefPtr(this), aRid, aResult] { + if (mScriptTransformer) { + mScriptTransformer->GenerateKeyFrameError(aRid, aResult); + } + })); + } +} + +} // namespace mozilla diff --git a/dom/media/webrtc/libwebrtcglue/FrameTransformerProxy.h b/dom/media/webrtc/libwebrtcglue/FrameTransformerProxy.h new file mode 100644 index 0000000000..72617fcde9 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/FrameTransformerProxy.h @@ -0,0 +1,124 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim: set ts=2 et sw=2 tw=80: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ + +#ifndef MOZILLA_DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_FRAMETRANSFORMERPROXY_H_ +#define MOZILLA_DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_FRAMETRANSFORMERPROXY_H_ + +#include "nsISupportsImpl.h" +#include "mozilla/Mutex.h" +#include "mozilla/Maybe.h" +#include +#include + +class nsIEventTarget; + +namespace webrtc { +class TransformableFrameInterface; +class VideoReceiveStreamInterface; +} // namespace webrtc + +namespace mozilla { + +class FrameTransformer; +class WebrtcVideoConduit; +class CopyableErrorResult; + +namespace dom { +class RTCRtpScriptTransformer; +class RTCRtpSender; +class RTCRtpReceiver; +} // namespace dom + +// This corresponds to a single RTCRtpScriptTransform (and its +// RTCRtpScriptTransformer, once that is created on the worker thread). This +// is intended to decouple threading/lifecycle/include-dependencies between +// FrameTransformer (on the libwebrtc side of things), RTCRtpScriptTransformer +// (on the worker side of things), RTCRtpScriptTransform and +// RTCRtpSender/Receiver (on the main thread), and prevents frames from being +// lost while we're setting things up on the worker. In other words, this +// handles the inconvenient stuff. +class FrameTransformerProxy { + public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(FrameTransformerProxy); + + FrameTransformerProxy(); + FrameTransformerProxy(const FrameTransformerProxy& aRhs) = delete; + FrameTransformerProxy(FrameTransformerProxy&& aRhs) = delete; + FrameTransformerProxy& operator=(const FrameTransformerProxy& aRhs) = delete; + FrameTransformerProxy& operator=(FrameTransformerProxy&& aRhs) = delete; + + // Called at most once (might not be called if the worker is shutting down), + // on the worker thread. + void SetScriptTransformer(dom::RTCRtpScriptTransformer& aTransformer); + + // Can be called from the worker thread (if the worker is shutting down), or + // main (if RTCRtpSender/RTCRtpReceiver is done with us). + void ReleaseScriptTransformer(); + + // RTCRtpScriptTransformer calls this when it is done transforming a frame. + void OnTransformedFrame( + std::unique_ptr aFrame); + + Maybe IsVideo() const; + + // Called by FrameTransformer, on main. Only one FrameTransformer will ever + // be registered over the lifetime of this object. This is where we route + // transformed frames. If this is set, we can also expect to receive calls to + // Transform. + void SetLibwebrtcTransformer(FrameTransformer* aLibwebrtcTransformer); + + // FrameTransformer calls this while we're registered with it (by + // SetLibwebrtcTransformer) + void Transform(std::unique_ptr aFrame); + + void SetSender(dom::RTCRtpSender* aSender); + void SetReceiver(dom::RTCRtpReceiver* aReceiver); + + // Called on worker thread + bool RequestKeyFrame(); + // Called on call thread + void KeyFrameRequestDone(bool aSuccess); + + bool GenerateKeyFrame(const Maybe& aRid); + void GenerateKeyFrameError(const Maybe& aRid, + const CopyableErrorResult& aResult); + + private: + virtual ~FrameTransformerProxy(); + + // Worker thread only. Set at most once. + // Does not need any mutex protection. + RefPtr mScriptTransformer; + + mutable Mutex mMutex; + // Written on the worker thread. Read on libwebrtc threads, mainthread, and + // the worker thread. + RefPtr mWorkerThread MOZ_GUARDED_BY(mMutex); + // We need a flag for this in case the ReleaseScriptTransformer call comes + // _before_ the script transformer is set, to disable SetScriptTransformer. + // Could be written on main or the worker thread. Read on main, worker, and + // libwebrtc threads. + bool mReleaseScriptTransformerCalled MOZ_GUARDED_BY(mMutex) = false; + // Used when frames arrive before the script transformer is created, which + // should be pretty rare. Accessed on worker and libwebrtc threads. + std::list> mQueue + MOZ_GUARDED_BY(mMutex); + // Written on main, read on the worker thread. + FrameTransformer* mLibwebrtcTransformer MOZ_GUARDED_BY(mMutex) = nullptr; + + // TODO: Will be used to route GenerateKeyFrame. Details TBD. + RefPtr mSender MOZ_GUARDED_BY(mMutex); + // Set on mainthread. This is where we route RequestKeyFrame calls from the + // worker thread. Mutex protected because spec wants sync errors if the + // receiver is not set (or the right type). If spec drops this requirement, + // this could be mainthread only and non-mutex-protected. + RefPtr mReceiver MOZ_GUARDED_BY(mMutex); + Maybe mVideo MOZ_GUARDED_BY(mMutex); +}; + +} // namespace mozilla + +#endif // MOZILLA_DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_FRAMETRANSFORMERPROXY_H_ diff --git a/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp b/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp new file mode 100644 index 0000000000..ccadd846e2 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.cpp @@ -0,0 +1,22 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "WebrtcGmpVideoCodec.h" +#include "GmpVideoCodec.h" + +namespace mozilla { + +WebrtcVideoEncoder* GmpVideoCodec::CreateEncoder( + const webrtc::SdpVideoFormat& aFormat, std::string aPCHandle) { + return new WebrtcVideoEncoderProxy( + new WebrtcGmpVideoEncoder(aFormat, std::move(aPCHandle))); +} + +WebrtcVideoDecoder* GmpVideoCodec::CreateDecoder(std::string aPCHandle, + TrackingId aTrackingId) { + return new WebrtcVideoDecoderProxy(std::move(aPCHandle), + std::move(aTrackingId)); +} + +} // namespace mozilla diff --git a/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h b/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h new file mode 100644 index 0000000000..caf125c809 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/GmpVideoCodec.h @@ -0,0 +1,27 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GMPVIDEOCODEC_H_ +#define GMPVIDEOCODEC_H_ + +#include + +#include "PerformanceRecorder.h" + +namespace mozilla { + +class WebrtcVideoDecoder; +class WebrtcVideoEncoder; + +class GmpVideoCodec { + public: + static WebrtcVideoEncoder* CreateEncoder( + const webrtc::SdpVideoFormat& aFormat, std::string aPCHandle); + static WebrtcVideoDecoder* CreateDecoder(std::string aPCHandle, + TrackingId aTrackingId); +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h b/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h new file mode 100644 index 0000000000..ab38d8d623 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/MediaConduitControl.h @@ -0,0 +1,79 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim:set ts=2 sw=2 sts=2 et cindent: */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_MEDIACONDUITCONTROL_H_ +#define DOM_MEDIA_WEBRTC_LIBWEBRTCGLUE_MEDIACONDUITCONTROL_H_ + +#include "jsapi/RTCDTMFSender.h" // For DtmfEvent +#include "mozilla/StateMirroring.h" +#include "RtpRtcpConfig.h" +#include +#include +#include "mozilla/Maybe.h" +#include "CodecConfig.h" // For Audio/VideoCodecConfig +#include "api/rtp_parameters.h" // For webrtc::RtpExtension +#include "api/video_codecs/video_codec.h" // For webrtc::VideoCodecMode +#include "FrameTransformerProxy.h" + +namespace mozilla { + +using RtpExtList = std::vector; +using Ssrc = uint32_t; +using Ssrcs = std::vector; + +/** + * These are the interfaces used to control the async conduits. Some parameters + * are common, and some are tied to the conduit type. See + * MediaSessionConduit::InitConduitControl for how they are used. + * + * Put simply, the implementer of the interfaces below may set its canonicals on + * any thread, and the conduits will react to those changes accordingly, on + * their dedicated worker thread. One instance of these interfaces could control + * multiple conduits as each canonical can connect to any number of mirrors. + */ + +class MediaConduitControlInterface { + public: + virtual Canonical& CanonicalReceiving() = 0; + virtual Canonical& CanonicalTransmitting() = 0; + virtual Canonical& CanonicalLocalSsrcs() = 0; + virtual Canonical& CanonicalLocalCname() = 0; + virtual Canonical& CanonicalMid() = 0; + virtual Canonical& CanonicalRemoteSsrc() = 0; + virtual Canonical& CanonicalSyncGroup() = 0; + virtual Canonical& CanonicalLocalRecvRtpExtensions() = 0; + virtual Canonical& CanonicalLocalSendRtpExtensions() = 0; + virtual Canonical>& + CanonicalFrameTransformerProxySend() = 0; + virtual Canonical>& + CanonicalFrameTransformerProxyRecv() = 0; +}; + +class AudioConduitControlInterface : public MediaConduitControlInterface { + public: + virtual Canonical>& CanonicalAudioSendCodec() = 0; + virtual Canonical>& + CanonicalAudioRecvCodecs() = 0; + virtual MediaEventSource& OnDtmfEvent() = 0; +}; + +class VideoConduitControlInterface : public MediaConduitControlInterface { + public: + virtual Canonical& CanonicalLocalVideoRtxSsrcs() = 0; + virtual Canonical& CanonicalRemoteVideoRtxSsrc() = 0; + virtual Canonical>& CanonicalVideoSendCodec() = 0; + virtual Canonical>& + CanonicalVideoSendRtpRtcpConfig() = 0; + virtual Canonical>& + CanonicalVideoRecvCodecs() = 0; + virtual Canonical>& + CanonicalVideoRecvRtpRtcpConfig() = 0; + virtual Canonical& CanonicalVideoCodecMode() = 0; +}; + +} // namespace mozilla + +#endif diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitErrors.h b/dom/media/webrtc/libwebrtcglue/MediaConduitErrors.h new file mode 100644 index 0000000000..34487d77a0 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/MediaConduitErrors.h @@ -0,0 +1,46 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef MEDIA_SESSION_ERRORS_H_ +#define MEDIA_SESSION_ERRORS_H_ + +namespace mozilla { +enum MediaConduitErrorCode { + kMediaConduitNoError = 0, // 0 for Success,greater than 0 imples error + kMediaConduitSessionNotInited = + 10100, // Session not initialized.10100 serves as + // base for the conduit errors + kMediaConduitMalformedArgument, // Malformed input to Conduit API + kMediaConduitCaptureError, // WebRTC capture APIs failed + kMediaConduitInvalidSendCodec, // Wrong Send codec + kMediaConduitInvalidReceiveCodec, // Wrong Recv Codec + kMediaConduitCodecInUse, // Already applied Codec + kMediaConduitInvalidRenderer, // Null or Wrong Renderer object + kMediaConduitRendererFail, // Add Render called multiple times + kMediaConduitSendingAlready, // Engine already trasmitting + kMediaConduitReceivingAlready, // Engine already receiving + kMediaConduitTransportRegistrationFail, // Null or wrong transport interface + kMediaConduitInvalidTransport, // Null or wrong transport interface + kMediaConduitChannelError, // Configuration Error + kMediaConduitSocketError, // Media Engine transport socket error + kMediaConduitRTPRTCPModuleError, // Couldn't start RTP/RTCP processing + kMediaConduitRTPProcessingFailed, // Processing incoming RTP frame failed + kMediaConduitUnknownError, // More information can be found in logs + kMediaConduitExternalRecordingError, // Couldn't start external recording + kMediaConduitRecordingError, // Runtime recording error + kMediaConduitExternalPlayoutError, // Couldn't start external playout + kMediaConduitPlayoutError, // Runtime playout error + kMediaConduitMTUError, // Can't set MTU + kMediaConduitRTCPStatusError, // Can't set RTCP mode + kMediaConduitKeyFrameRequestError, // Can't set KeyFrameRequest mode + kMediaConduitNACKStatusError, // Can't set NACK mode + kMediaConduitTMMBRStatusError, // Can't set TMMBR mode + kMediaConduitFECStatusError, // Can't set FEC mode + kMediaConduitHybridNACKFECStatusError, // Can't set Hybrid NACK / FEC mode + kMediaConduitVideoSendStreamError // WebRTC video send stream failure +}; + +} + +#endif diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.cpp b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.cpp new file mode 100644 index 0000000000..7e337953b5 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.cpp @@ -0,0 +1,152 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "MediaConduitInterface.h" + +#include "nsTArray.h" +#include "mozilla/Assertions.h" +#include "MainThreadUtils.h" +#include "SystemTime.h" + +#include "system_wrappers/include/clock.h" + +namespace mozilla { + +void MediaSessionConduit::GetRtpSources( + nsTArray& outSources) const { + MOZ_ASSERT(NS_IsMainThread()); + if (mSourcesUpdateNeeded) { + UpdateRtpSources(GetUpstreamRtpSources()); + OnSourcesUpdated(); + } + outSources.Clear(); + for (auto& [key, entry] : mSourcesCache) { + (void)key; + outSources.AppendElement(entry); + } + + struct TimestampComparator { + bool LessThan(const dom::RTCRtpSourceEntry& aLhs, + const dom::RTCRtpSourceEntry& aRhs) const { + // Sort descending! + return aLhs.mTimestamp > aRhs.mTimestamp; + } + + bool Equals(const dom::RTCRtpSourceEntry& aLhs, + const dom::RTCRtpSourceEntry& aRhs) const { + return aLhs.mTimestamp == aRhs.mTimestamp; + } + }; + + // *sigh* We have to re-sort this by JS timestamp; we can run into cases + // where the libwebrtc timestamps are not in exactly the same order as JS + // timestamps due to clock differences (wibbly-wobbly, timey-wimey stuff) + outSources.Sort(TimestampComparator()); +} + +static double rtpToDomAudioLevel(uint8_t aAudioLevel) { + if (aAudioLevel == 127) { + // Spec indicates that a value of 127 should be set to 0 + return 0; + } + + // All other values are calculated as 10^(-rfc_level/20) + return std::pow(10, -aAudioLevel / 20.0); +} + +void MediaSessionConduit::UpdateRtpSources( + const std::vector& aSources) const { + MOZ_ASSERT(NS_IsMainThread()); + // Empty out the cache; we'll copy things back as needed + auto cache = std::move(mSourcesCache); + + for (const auto& source : aSources) { + SourceKey key(source); + auto it = cache.find(key); + if (it != cache.end()) { + // This source entry was already in the cache, and should continue to be + // present in exactly the same form as before. This means we do _not_ + // want to perform the timestamp adjustment again, since it might yield a + // slightly different result. This is why we copy this entry from the old + // cache instead of simply rebuilding it, and is also why we key the + // cache based on timestamp (keying the cache based on timestamp also + // gets us the ordering we want, conveniently). + mSourcesCache[key] = it->second; + continue; + } + + // This is something we did not already have in the cache. + dom::RTCRtpSourceEntry domEntry; + domEntry.mSource = source.source_id(); + switch (source.source_type()) { + case webrtc::RtpSourceType::SSRC: + domEntry.mSourceType = dom::RTCRtpSourceEntryType::Synchronization; + break; + case webrtc::RtpSourceType::CSRC: + domEntry.mSourceType = dom::RTCRtpSourceEntryType::Contributing; + break; + default: + MOZ_CRASH("Unexpected RTCRtpSourceEntryType"); + } + + if (source.audio_level()) { + domEntry.mAudioLevel.Construct(rtpToDomAudioLevel(*source.audio_level())); + } + + // These timestamps are always **rounded** to milliseconds. That means they + // can jump up to half a millisecond into the future. We compensate for that + // here so that things seem consistent to js. + domEntry.mTimestamp = + dom::RTCStatsTimestamp::FromRealtime( + GetTimestampMaker(), + webrtc::Timestamp::Millis(source.timestamp().ms()) - + webrtc::TimeDelta::Micros(500)) + .ToDom(); + domEntry.mRtpTimestamp = source.rtp_timestamp(); + mSourcesCache[key] = domEntry; + } +} + +void MediaSessionConduit::OnSourcesUpdated() const { + MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(mSourcesUpdateNeeded); + mSourcesUpdateNeeded = false; + // Reset the updateNeeded flag and clear the cache in a direct task, i.e., + // as soon as the current task has finished. + AbstractThread::GetCurrent()->TailDispatcher().AddDirectTask( + NS_NewRunnableFunction( + __func__, [this, self = RefPtr(this)] { + mSourcesUpdateNeeded = true; + mSourcesCache.clear(); + })); +} + +void MediaSessionConduit::InsertAudioLevelForContributingSource( + const uint32_t aCsrcSource, const int64_t aTimestamp, + const uint32_t aRtpTimestamp, const bool aHasAudioLevel, + const uint8_t aAudioLevel) { + MOZ_ASSERT(NS_IsMainThread()); + + if (mSourcesUpdateNeeded) { + OnSourcesUpdated(); + } + + dom::RTCRtpSourceEntry domEntry; + domEntry.mSource = aCsrcSource; + domEntry.mSourceType = dom::RTCRtpSourceEntryType::Contributing; + domEntry.mTimestamp = aTimestamp; + domEntry.mRtpTimestamp = aRtpTimestamp; + if (aHasAudioLevel) { + domEntry.mAudioLevel.Construct(rtpToDomAudioLevel(aAudioLevel)); + } + + auto now = GetTimestampMaker().GetNow(); + webrtc::Timestamp convertedTimestamp = + now.ToRealtime() - webrtc::TimeDelta::Millis(now.ToDom() - aTimestamp); + + SourceKey key(convertedTimestamp.ms(), aCsrcSource); + mSourcesCache[key] = domEntry; +} + +} // namespace mozilla diff --git a/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h new file mode 100644 index 0000000000..0c0bda2879 --- /dev/null +++ b/dom/media/webrtc/libwebrtcglue/MediaConduitInterface.h @@ -0,0 +1,499 @@ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef MEDIA_CONDUIT_ABSTRACTION_ +#define MEDIA_CONDUIT_ABSTRACTION_ + +#include +#include +#include + +#include "CodecConfig.h" +#include "ImageContainer.h" +#include "jsapi/RTCStatsReport.h" +#include "MediaConduitErrors.h" +#include "mozilla/media/MediaUtils.h" +#include "mozilla/MozPromise.h" +#include "WebrtcVideoCodecFactory.h" +#include "nsTArray.h" +#include "mozilla/dom/RTCRtpSourcesBinding.h" +#include "PerformanceRecorder.h" +#include "transport/mediapacket.h" +#include "MediaConduitControl.h" + +// libwebrtc includes +#include "api/audio/audio_frame.h" +#include "api/call/transport.h" +#include "api/rtp_headers.h" +#include "api/rtp_parameters.h" +#include "api/transport/rtp/rtp_source.h" +#include "api/video/video_frame_buffer.h" +#include "call/audio_receive_stream.h" +#include "call/audio_send_stream.h" +#include "call/call_basic_stats.h" +#include "call/video_receive_stream.h" +#include "call/video_send_stream.h" +#include "rtc_base/copy_on_write_buffer.h" + +namespace webrtc { +class RtpPacketReceived; +class VideoFrame; +} // namespace webrtc + +namespace mozilla { +namespace dom { +struct RTCRtpSourceEntry; +} + +namespace dom { +struct RTCRtpSourceEntry; +} + +enum class MediaSessionConduitLocalDirection : int { kSend, kRecv }; + +class VideoSessionConduit; +class AudioSessionConduit; +class WebrtcCallWrapper; +class FrameTransformerProxy; + +/** + * 1. Abstract renderer for video data + * 2. This class acts as abstract interface between the video-engine and + * video-engine agnostic renderer implementation. + * 3. Concrete implementation of this interface is responsible for + * processing and/or rendering the obtained raw video frame to appropriate + * output , say,