summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/examples/unityplugin
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/examples/unityplugin')
-rw-r--r--third_party/libwebrtc/examples/unityplugin/ANDROID_INSTRUCTION33
-rw-r--r--third_party/libwebrtc/examples/unityplugin/DEPS4
-rw-r--r--third_party/libwebrtc/examples/unityplugin/README309
-rw-r--r--third_party/libwebrtc/examples/unityplugin/class_reference_holder.cc88
-rw-r--r--third_party/libwebrtc/examples/unityplugin/class_reference_holder.h38
-rw-r--r--third_party/libwebrtc/examples/unityplugin/java/src/org/webrtc/UnityUtility.java68
-rw-r--r--third_party/libwebrtc/examples/unityplugin/jni_onload.cc42
-rw-r--r--third_party/libwebrtc/examples/unityplugin/simple_peer_connection.cc586
-rw-r--r--third_party/libwebrtc/examples/unityplugin/simple_peer_connection.h135
-rw-r--r--third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.cc196
-rw-r--r--third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.h108
-rw-r--r--third_party/libwebrtc/examples/unityplugin/video_observer.cc44
-rw-r--r--third_party/libwebrtc/examples/unityplugin/video_observer.h35
13 files changed, 1686 insertions, 0 deletions
diff --git a/third_party/libwebrtc/examples/unityplugin/ANDROID_INSTRUCTION b/third_party/libwebrtc/examples/unityplugin/ANDROID_INSTRUCTION
new file mode 100644
index 0000000000..d5f7399bca
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/ANDROID_INSTRUCTION
@@ -0,0 +1,33 @@
+Instruction of running webrtc_unity_plugin on Android Unity
+
+1. On Linux machine, compile target webrtc_unity_plugin.
+ Checkout WebRTC codebase: fetch --nohooks webrtc_android
+ If you already have a checkout for linux, add target_os=”android” into .gclient file.
+ Run gclient sync
+ Run gn args out/Android, and again set target_os=”android” in the args.gn
+ Run ninja -C out/Android webrtc_unity_plugin
+
+2. On Linux machine, build target libwebrtc_unity under webrtc checkout. This is the java code for webrtc to work on Android.
+
+3. Copy libwebrtc_unity.jar and libwebrtc_unity_plugin.so into Unity project folder, under Assets/Plugins/Android folder.
+
+4. Rename libwebrtc_unity_plugin.so to libjingle_peerconnection_so.so. This is hacky, and the purpose is to let the java code in libwebrtc_unity.jar to find their JNI implementations. Simultaneously, in your C# wrapper script for the native plugin libjingle_peerconnection_so.so, the dll_path should be set to “jingle_peerconnection_so”.
+
+5. In the Unity Main Scene’s Start method, write the following code to initialize the Java environment for webrtc (otherwise, webrtc will not be able to access audio device or camera from C++ code):
+
+#if UNITY_ANDROID
+ AndroidJavaClass playerClass = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
+ AndroidJavaObject activity = playerClass.GetStatic<AndroidJavaObject>("currentActivity");
+ AndroidJavaClass utilityClass = new AndroidJavaClass("org.webrtc.UnityUtility");
+ utilityClass.CallStatic("InitializePeerConncectionFactory", new object[1] { activity });
+#endif
+
+6. Compile the unity project into an APK, and decompile the apk using apktool that you can download from https://ibotpeaches.github.io/Apktool/
+ Run apktool d apkname.apk.
+Then copy the AndroidManifest.xml in the decompiled folder to the Assets/Plugins/Android folder, and add two lines:
+ <uses-permission android:name="android.permission.RECORD_AUDIO" />
+ <uses-permission android:name="android.permission.CAMERA" />
+
+The purpose of using apktool is to get a well-written android manifest xml file. If you know how to write manifest file from scratch, you can skip using apktool.
+
+7. Compile the unity project into an APK again and deploy it to an android device.
diff --git a/third_party/libwebrtc/examples/unityplugin/DEPS b/third_party/libwebrtc/examples/unityplugin/DEPS
new file mode 100644
index 0000000000..604005ac73
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+modules/utility",
+ "+sdk",
+]
diff --git a/third_party/libwebrtc/examples/unityplugin/README b/third_party/libwebrtc/examples/unityplugin/README
new file mode 100644
index 0000000000..da8f07aa11
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/README
@@ -0,0 +1,309 @@
+This directory contains an example Unity native plugin for Windows OS and Android.
+
+The APIs use Platform Invoke (P/Invoke) technology as required by Unity native plugin.
+This plugin dll can also be used by Windows C# applications other than Unity.
+
+For detailed build instruction on Android, see ANDROID_INSTRUCTION
+
+An example of wrapping native plugin into a C# managed class in Unity is given as following:
+
+using System;
+using System.Collections.Generic;
+using System.Runtime.InteropServices;
+
+namespace SimplePeerConnectionM {
+ // A class for ice candidate.
+ public class IceCandidate {
+ public IceCandidate(string candidate, int sdpMlineIndex, string sdpMid) {
+ mCandidate = candidate;
+ mSdpMlineIndex = sdpMlineIndex;
+ mSdpMid = sdpMid;
+ }
+ string mCandidate;
+ int mSdpMlineIndex;
+ string mSdpMid;
+
+ public string Candidate {
+ get { return mCandidate; }
+ set { mCandidate = value; }
+ }
+
+ public int SdpMlineIndex {
+ get { return mSdpMlineIndex; }
+ set { mSdpMlineIndex = value; }
+ }
+
+ public string SdpMid {
+ get { return mSdpMid; }
+ set { mSdpMid = value; }
+ }
+ }
+
+ // A managed wrapper up class for the native c style peer connection APIs.
+ public class PeerConnectionM {
+ private const string dllPath = "webrtc_unity_plugin";
+
+ //create a peerconnection with turn servers
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern int CreatePeerConnection(string[] turnUrls, int noOfUrls,
+ string username, string credential);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool ClosePeerConnection(int peerConnectionId);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool AddStream(int peerConnectionId, bool audioOnly);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool AddDataChannel(int peerConnectionId);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool CreateOffer(int peerConnectionId);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool CreateAnswer(int peerConnectionId);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool SendDataViaDataChannel(int peerConnectionId, string data);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool SetAudioControl(int peerConnectionId, bool isMute, bool isRecord);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void LocalDataChannelReadyInternalDelegate();
+ public delegate void LocalDataChannelReadyDelegate(int id);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnLocalDataChannelReady(
+ int peerConnectionId, LocalDataChannelReadyInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void DataFromDataChannelReadyInternalDelegate(string s);
+ public delegate void DataFromDataChannelReadyDelegate(int id, string s);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnDataFromDataChannelReady(
+ int peerConnectionId, DataFromDataChannelReadyInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void FailureMessageInternalDelegate(string msg);
+ public delegate void FailureMessageDelegate(int id, string msg);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnFailure(int peerConnectionId,
+ FailureMessageInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void AudioBusReadyInternalDelegate(IntPtr data, int bitsPerSample,
+ int sampleRate, int numberOfChannels, int numberOfFrames);
+ public delegate void AudioBusReadyDelegate(int id, IntPtr data, int bitsPerSample,
+ int sampleRate, int numberOfChannels, int numberOfFrames);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnAudioBusReady(int peerConnectionId,
+ AudioBusReadyInternalDelegate callback);
+
+ // Video callbacks.
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void I420FrameReadyInternalDelegate(
+ IntPtr dataY, IntPtr dataU, IntPtr dataV,
+ int strideY, int strideU, int strideV,
+ uint width, uint height);
+ public delegate void I420FrameReadyDelegate(int id,
+ IntPtr dataY, IntPtr dataU, IntPtr dataV,
+ int strideY, int strideU, int strideV,
+ uint width, uint height);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnLocalI420FrameReady(int peerConnectionId,
+ I420FrameReadyInternalDelegate callback);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnRemoteI420FrameReady(int peerConnectionId,
+ I420FrameReadyInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void LocalSdpReadytoSendInternalDelegate(string type, string sdp);
+ public delegate void LocalSdpReadytoSendDelegate(int id, string type, string sdp);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnLocalSdpReadytoSend(int peerConnectionId,
+ LocalSdpReadytoSendInternalDelegate callback);
+
+ [UnmanagedFunctionPointer(CallingConvention.Cdecl)]
+ private delegate void IceCandidateReadytoSendInternalDelegate(
+ string candidate, int sdpMlineIndex, string sdpMid);
+ public delegate void IceCandidateReadytoSendDelegate(
+ int id, string candidate, int sdpMlineIndex, string sdpMid);
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool RegisterOnIceCandidateReadytoSend(
+ int peerConnectionId, IceCandidateReadytoSendInternalDelegate callback);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool SetRemoteDescription(int peerConnectionId, string type, string sdp);
+
+ [DllImport(dllPath, CallingConvention = CallingConvention.Cdecl)]
+ private static extern bool AddIceCandidate(int peerConnectionId, string sdp,
+ int sdpMlineindex, string sdpMid);
+
+ public PeerConnectionM(List<string> turnUrls, string username, string credential) {
+ string[] urls = turnUrls != null ? turnUrls.ToArray() : null;
+ int length = turnUrls != null ? turnUrls.Count : 0;
+ mPeerConnectionId = CreatePeerConnection(urls, length, username, credential);
+ RegisterCallbacks();
+ }
+
+ public void ClosePeerConnection() {
+ ClosePeerConnection(mPeerConnectionId);
+ mPeerConnectionId = -1;
+ }
+
+ // Return -1 if Peerconnection is not available.
+ public int GetUniqueId() {
+ return mPeerConnectionId;
+ }
+
+ public void AddStream(bool audioOnly) {
+ AddStream(mPeerConnectionId, audioOnly);
+ }
+
+ public void AddDataChannel() {
+ AddDataChannel(mPeerConnectionId);
+ }
+
+ public void CreateOffer() {
+ CreateOffer(mPeerConnectionId);
+ }
+
+ public void CreateAnswer() {
+ CreateAnswer(mPeerConnectionId);
+ }
+
+ public void SendDataViaDataChannel(string data) {
+ SendDataViaDataChannel(mPeerConnectionId, data);
+ }
+
+ public void SetAudioControl(bool isMute, bool isRecord) {
+ SetAudioControl(mPeerConnectionId, isMute, isRecord);
+ }
+
+ public void SetRemoteDescription(string type, string sdp) {
+ SetRemoteDescription(mPeerConnectionId, type, sdp);
+ }
+
+ public void AddIceCandidate(string candidate, int sdpMlineindex, string sdpMid) {
+ AddIceCandidate(mPeerConnectionId, candidate, sdpMlineindex, sdpMid);
+ }
+
+ private void RegisterCallbacks() {
+ localDataChannelReadyDelegate = new LocalDataChannelReadyInternalDelegate(
+ RaiseLocalDataChannelReady);
+ RegisterOnLocalDataChannelReady(mPeerConnectionId, localDataChannelReadyDelegate);
+
+ dataFromDataChannelReadyDelegate = new DataFromDataChannelReadyInternalDelegate(
+ RaiseDataFromDataChannelReady);
+ RegisterOnDataFromDataChannelReady(mPeerConnectionId, dataFromDataChannelReadyDelegate);
+
+ failureMessageDelegate = new FailureMessageInternalDelegate(RaiseFailureMessage);
+ RegisterOnFailure(mPeerConnectionId, failureMessageDelegate);
+
+ audioBusReadyDelegate = new AudioBusReadyInternalDelegate(RaiseAudioBusReady);
+ RegisterOnAudioBusReady(mPeerConnectionId, audioBusReadyDelegate);
+
+ localI420FrameReadyDelegate = new I420FrameReadyInternalDelegate(
+ RaiseLocalVideoFrameReady);
+ RegisterOnLocalI420FrameReady(mPeerConnectionId, localI420FrameReadyDelegate);
+
+ remoteI420FrameReadyDelegate = new I420FrameReadyInternalDelegate(
+ RaiseRemoteVideoFrameReady);
+ RegisterOnRemoteI420FrameReady(mPeerConnectionId, remoteI420FrameReadyDelegate);
+
+ localSdpReadytoSendDelegate = new LocalSdpReadytoSendInternalDelegate(
+ RaiseLocalSdpReadytoSend);
+ RegisterOnLocalSdpReadytoSend(mPeerConnectionId, localSdpReadytoSendDelegate);
+
+ iceCandidateReadytoSendDelegate =
+ new IceCandidateReadytoSendInternalDelegate(RaiseIceCandidateReadytoSend);
+ RegisterOnIceCandidateReadytoSend(
+ mPeerConnectionId, iceCandidateReadytoSendDelegate);
+ }
+
+ private void RaiseLocalDataChannelReady() {
+ if (OnLocalDataChannelReady != null)
+ OnLocalDataChannelReady(mPeerConnectionId);
+ }
+
+ private void RaiseDataFromDataChannelReady(string data) {
+ if (OnDataFromDataChannelReady != null)
+ OnDataFromDataChannelReady(mPeerConnectionId, data);
+ }
+
+ private void RaiseFailureMessage(string msg) {
+ if (OnFailureMessage != null)
+ OnFailureMessage(mPeerConnectionId, msg);
+ }
+
+ private void RaiseAudioBusReady(IntPtr data, int bitsPerSample,
+ int sampleRate, int numberOfChannels, int numberOfFrames) {
+ if (OnAudioBusReady != null)
+ OnAudioBusReady(mPeerConnectionId, data, bitsPerSample, sampleRate,
+ numberOfChannels, numberOfFrames);
+ }
+
+ private void RaiseLocalVideoFrameReady(
+ IntPtr dataY, IntPtr dataU, IntPtr dataV,
+ int strideY, int strideU, int strideV,
+ uint width, uint height) {
+ if (OnLocalVideoFrameReady != null)
+ OnLocalVideoFrameReady(mPeerConnectionId, dataY, dataU, dataV, strideY, strideU, strideV,
+ width, height);
+ }
+
+ private void RaiseRemoteVideoFrameReady(
+ IntPtr dataY, IntPtr dataU, IntPtr dataV,
+ int strideY, int strideU, int strideV,
+ uint width, uint height) {
+ if (OnRemoteVideoFrameReady != null)
+ OnRemoteVideoFrameReady(mPeerConnectionId, dataY, dataU, dataV, strideY, strideU, strideV,
+ width, height);
+ }
+
+
+ private void RaiseLocalSdpReadytoSend(string type, string sdp) {
+ if (OnLocalSdpReadytoSend != null)
+ OnLocalSdpReadytoSend(mPeerConnectionId, type, sdp);
+ }
+
+ private void RaiseIceCandidateReadytoSend(string candidate, int sdpMlineIndex, string sdpMid) {
+ if (OnIceCandidateReadytoSend != null)
+ OnIceCandidateReadytoSend(mPeerConnectionId, candidate, sdpMlineIndex, sdpMid);
+ }
+
+ public void AddQueuedIceCandidate(List<IceCandidate> iceCandidateQueue) {
+ if (iceCandidateQueue != null) {
+ foreach (IceCandidate ic in iceCandidateQueue) {
+ AddIceCandidate(mPeerConnectionId, ic.Candidate, ic.SdpMlineIndex, ic.SdpMid);
+ }
+ }
+ }
+
+ private LocalDataChannelReadyInternalDelegate localDataChannelReadyDelegate = null;
+ public event LocalDataChannelReadyDelegate OnLocalDataChannelReady;
+
+ private DataFromDataChannelReadyInternalDelegate dataFromDataChannelReadyDelegate = null;
+ public event DataFromDataChannelReadyDelegate OnDataFromDataChannelReady;
+
+ private FailureMessageInternalDelegate failureMessageDelegate = null;
+ public event FailureMessageDelegate OnFailureMessage;
+
+ private AudioBusReadyInternalDelegate audioBusReadyDelegate = null;
+ public event AudioBusReadyDelegate OnAudioBusReady;
+
+ private I420FrameReadyInternalDelegate localI420FrameReadyDelegate = null;
+ public event I420FrameReadyDelegate OnLocalVideoFrameReady;
+
+ private I420FrameReadyInternalDelegate remoteI420FrameReadyDelegate = null;
+ public event I420FrameReadyDelegate OnRemoteVideoFrameReady;
+
+ private LocalSdpReadytoSendInternalDelegate localSdpReadytoSendDelegate = null;
+ public event LocalSdpReadytoSendDelegate OnLocalSdpReadytoSend;
+
+ private IceCandidateReadytoSendInternalDelegate iceCandidateReadytoSendDelegate = null;
+ public event IceCandidateReadytoSendDelegate OnIceCandidateReadytoSend;
+
+ private int mPeerConnectionId = -1;
+ }
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/class_reference_holder.cc b/third_party/libwebrtc/examples/unityplugin/class_reference_holder.cc
new file mode 100644
index 0000000000..00ca772e76
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/class_reference_holder.cc
@@ -0,0 +1,88 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+#include "examples/unityplugin/class_reference_holder.h"
+
+#include <utility>
+
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace unity_plugin {
+
+// ClassReferenceHolder holds global reference to Java classes in app/webrtc.
+class ClassReferenceHolder {
+ public:
+ explicit ClassReferenceHolder(JNIEnv* jni);
+ ~ClassReferenceHolder();
+
+ void FreeReferences(JNIEnv* jni);
+ jclass GetClass(const std::string& name);
+
+ void LoadClass(JNIEnv* jni, const std::string& name);
+
+ private:
+ std::map<std::string, jclass> classes_;
+};
+
+// Allocated in LoadGlobalClassReferenceHolder(),
+// freed in FreeGlobalClassReferenceHolder().
+static ClassReferenceHolder* g_class_reference_holder = nullptr;
+
+void LoadGlobalClassReferenceHolder() {
+ RTC_CHECK(g_class_reference_holder == nullptr);
+ g_class_reference_holder = new ClassReferenceHolder(webrtc::jni::GetEnv());
+}
+
+void FreeGlobalClassReferenceHolder() {
+ g_class_reference_holder->FreeReferences(
+ webrtc::jni::AttachCurrentThreadIfNeeded());
+ delete g_class_reference_holder;
+ g_class_reference_holder = nullptr;
+}
+
+ClassReferenceHolder::ClassReferenceHolder(JNIEnv* jni) {
+ LoadClass(jni, "org/webrtc/UnityUtility");
+}
+
+ClassReferenceHolder::~ClassReferenceHolder() {
+ RTC_CHECK(classes_.empty()) << "Must call FreeReferences() before dtor!";
+}
+
+void ClassReferenceHolder::FreeReferences(JNIEnv* jni) {
+ for (std::map<std::string, jclass>::const_iterator it = classes_.begin();
+ it != classes_.end(); ++it) {
+ jni->DeleteGlobalRef(it->second);
+ }
+ classes_.clear();
+}
+
+jclass ClassReferenceHolder::GetClass(const std::string& name) {
+ std::map<std::string, jclass>::iterator it = classes_.find(name);
+ RTC_CHECK(it != classes_.end()) << "Unexpected GetClass() call for: " << name;
+ return it->second;
+}
+
+void ClassReferenceHolder::LoadClass(JNIEnv* jni, const std::string& name) {
+ jclass localRef = jni->FindClass(name.c_str());
+ CHECK_EXCEPTION(jni) << "error during FindClass: " << name;
+ RTC_CHECK(localRef) << name;
+ jclass globalRef = reinterpret_cast<jclass>(jni->NewGlobalRef(localRef));
+ CHECK_EXCEPTION(jni) << "error during NewGlobalRef: " << name;
+ RTC_CHECK(globalRef) << name;
+ bool inserted = classes_.insert(std::make_pair(name, globalRef)).second;
+ RTC_CHECK(inserted) << "Duplicate class name: " << name;
+}
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name) {
+ return g_class_reference_holder->GetClass(name);
+}
+
+} // namespace unity_plugin
diff --git a/third_party/libwebrtc/examples/unityplugin/class_reference_holder.h b/third_party/libwebrtc/examples/unityplugin/class_reference_holder.h
new file mode 100644
index 0000000000..884d471ceb
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/class_reference_holder.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This is a supplement of webrtc::jni::ClassReferenceHolder.
+// The purpose of this ClassReferenceHolder is to load the example
+// specific java class into JNI c++ side, so that our c++ code can
+// call those java functions.
+
+#ifndef EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_
+#define EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_
+
+#include <jni.h>
+
+#include <map>
+#include <string>
+#include <vector>
+
+namespace unity_plugin {
+
+// LoadGlobalClassReferenceHolder must be called in JNI_OnLoad.
+void LoadGlobalClassReferenceHolder();
+// FreeGlobalClassReferenceHolder must be called in JNI_UnLoad.
+void FreeGlobalClassReferenceHolder();
+
+// Returns a global reference guaranteed to be valid for the lifetime of the
+// process.
+jclass FindClass(JNIEnv* jni, const char* name);
+
+} // namespace unity_plugin
+
+#endif // EXAMPLES_UNITYPLUGIN_CLASS_REFERENCE_HOLDER_H_
diff --git a/third_party/libwebrtc/examples/unityplugin/java/src/org/webrtc/UnityUtility.java b/third_party/libwebrtc/examples/unityplugin/java/src/org/webrtc/UnityUtility.java
new file mode 100644
index 0000000000..bd8bbfa449
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/java/src/org/webrtc/UnityUtility.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc;
+
+import android.content.Context;
+import androidx.annotation.Nullable;
+
+public class UnityUtility {
+ private static final String VIDEO_CAPTURER_THREAD_NAME = "VideoCapturerThread";
+
+ public static SurfaceTextureHelper LoadSurfaceTextureHelper() {
+ final SurfaceTextureHelper surfaceTextureHelper =
+ SurfaceTextureHelper.create(VIDEO_CAPTURER_THREAD_NAME, null);
+ return surfaceTextureHelper;
+ }
+
+ private static boolean useCamera2() {
+ return Camera2Enumerator.isSupported(ContextUtils.getApplicationContext());
+ }
+
+ private static @Nullable VideoCapturer createCameraCapturer(CameraEnumerator enumerator) {
+ final String[] deviceNames = enumerator.getDeviceNames();
+
+ for (String deviceName : deviceNames) {
+ if (enumerator.isFrontFacing(deviceName)) {
+ VideoCapturer videoCapturer = enumerator.createCapturer(deviceName, null);
+
+ if (videoCapturer != null) {
+ return videoCapturer;
+ }
+ }
+ }
+
+ return null;
+ }
+
+ public static VideoCapturer LinkCamera(
+ long nativeTrackSource, SurfaceTextureHelper surfaceTextureHelper) {
+ VideoCapturer capturer =
+ createCameraCapturer(new Camera2Enumerator(ContextUtils.getApplicationContext()));
+
+ VideoSource videoSource = new VideoSource(nativeTrackSource);
+
+ capturer.initialize(surfaceTextureHelper, ContextUtils.getApplicationContext(),
+ videoSource.getCapturerObserver());
+
+ capturer.startCapture(720, 480, 30);
+ return capturer;
+ }
+
+ public static void StopCamera(VideoCapturer camera) throws InterruptedException {
+ camera.stopCapture();
+ camera.dispose();
+ }
+
+ public static void InitializePeerConncectionFactory(Context context) throws InterruptedException {
+ PeerConnectionFactory.initialize(
+ PeerConnectionFactory.InitializationOptions.builder(context).createInitializationOptions());
+ }
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/jni_onload.cc b/third_party/libwebrtc/examples/unityplugin/jni_onload.cc
new file mode 100644
index 0000000000..b9c92d5ef4
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/jni_onload.cc
@@ -0,0 +1,42 @@
+/*
+ * Copyright 2015 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <jni.h>
+#undef JNIEXPORT
+#define JNIEXPORT __attribute__((visibility("default")))
+
+#include "examples/unityplugin/class_reference_holder.h"
+#include "rtc_base/ssl_adapter.h"
+#include "sdk/android/native_api/jni/class_loader.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+
+namespace webrtc {
+namespace jni {
+
+extern "C" jint JNIEXPORT JNICALL JNI_OnLoad(JavaVM* jvm, void* reserved) {
+ jint ret = InitGlobalJniVariables(jvm);
+ RTC_DCHECK_GE(ret, 0);
+ if (ret < 0)
+ return -1;
+
+ RTC_CHECK(rtc::InitializeSSL()) << "Failed to InitializeSSL()";
+ webrtc::InitClassLoader(GetEnv());
+ unity_plugin::LoadGlobalClassReferenceHolder();
+
+ return ret;
+}
+
+extern "C" void JNIEXPORT JNICALL JNI_OnUnLoad(JavaVM* jvm, void* reserved) {
+ unity_plugin::FreeGlobalClassReferenceHolder();
+ RTC_CHECK(rtc::CleanupSSL()) << "Failed to CleanupSSL()";
+}
+
+} // namespace jni
+} // namespace webrtc
diff --git a/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.cc b/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.cc
new file mode 100644
index 0000000000..de49d5cd07
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.cc
@@ -0,0 +1,586 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/unityplugin/simple_peer_connection.h"
+
+#include <utility>
+
+#include "absl/memory/memory.h"
+#include "api/audio_codecs/builtin_audio_decoder_factory.h"
+#include "api/audio_codecs/builtin_audio_encoder_factory.h"
+#include "api/create_peerconnection_factory.h"
+#include "media/engine/internal_decoder_factory.h"
+#include "media/engine/internal_encoder_factory.h"
+#include "media/engine/multiplex_codec_factory.h"
+#include "modules/audio_device/include/audio_device.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "modules/video_capture/video_capture_factory.h"
+#include "pc/video_track_source.h"
+#include "test/vcm_capturer.h"
+
+#if defined(WEBRTC_ANDROID)
+#include "examples/unityplugin/class_reference_holder.h"
+#include "modules/utility/include/helpers_android.h"
+#include "sdk/android/src/jni/android_video_track_source.h"
+#include "sdk/android/src/jni/jni_helpers.h"
+#endif
+
+// Names used for media stream ids.
+const char kAudioLabel[] = "audio_label";
+const char kVideoLabel[] = "video_label";
+const char kStreamId[] = "stream_id";
+
+namespace {
+static int g_peer_count = 0;
+static std::unique_ptr<rtc::Thread> g_worker_thread;
+static std::unique_ptr<rtc::Thread> g_signaling_thread;
+static rtc::scoped_refptr<webrtc::PeerConnectionFactoryInterface>
+ g_peer_connection_factory;
+#if defined(WEBRTC_ANDROID)
+// Android case: the video track does not own the capturer, and it
+// relies on the app to dispose the capturer when the peerconnection
+// shuts down.
+static jobject g_camera = nullptr;
+#else
+class CapturerTrackSource : public webrtc::VideoTrackSource {
+ public:
+ static rtc::scoped_refptr<CapturerTrackSource> Create() {
+ const size_t kWidth = 640;
+ const size_t kHeight = 480;
+ const size_t kFps = 30;
+ const size_t kDeviceIndex = 0;
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer = absl::WrapUnique(
+ webrtc::test::VcmCapturer::Create(kWidth, kHeight, kFps, kDeviceIndex));
+ if (!capturer) {
+ return nullptr;
+ }
+ return rtc::make_ref_counted<CapturerTrackSource>(std::move(capturer));
+ }
+
+ protected:
+ explicit CapturerTrackSource(
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer)
+ : VideoTrackSource(/*remote=*/false), capturer_(std::move(capturer)) {}
+
+ private:
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source() override {
+ return capturer_.get();
+ }
+ std::unique_ptr<webrtc::test::VcmCapturer> capturer_;
+};
+
+#endif
+
+std::string GetEnvVarOrDefault(const char* env_var_name,
+ const char* default_value) {
+ std::string value;
+ const char* env_var = getenv(env_var_name);
+ if (env_var)
+ value = env_var;
+
+ if (value.empty())
+ value = default_value;
+
+ return value;
+}
+
+std::string GetPeerConnectionString() {
+ return GetEnvVarOrDefault("WEBRTC_CONNECT", "stun:stun.l.google.com:19302");
+}
+
+class DummySetSessionDescriptionObserver
+ : public webrtc::SetSessionDescriptionObserver {
+ public:
+ static rtc::scoped_refptr<DummySetSessionDescriptionObserver> Create() {
+ return rtc::make_ref_counted<DummySetSessionDescriptionObserver>();
+ }
+ virtual void OnSuccess() { RTC_LOG(LS_INFO) << __FUNCTION__; }
+ virtual void OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << ToString(error.type()) << ": "
+ << error.message();
+ }
+
+ protected:
+ DummySetSessionDescriptionObserver() {}
+ ~DummySetSessionDescriptionObserver() {}
+};
+
+} // namespace
+
+bool SimplePeerConnection::InitializePeerConnection(const char** turn_urls,
+ const int no_of_urls,
+ const char* username,
+ const char* credential,
+ bool is_receiver) {
+ RTC_DCHECK(peer_connection_.get() == nullptr);
+
+ if (g_peer_connection_factory == nullptr) {
+ g_worker_thread = rtc::Thread::Create();
+ g_worker_thread->Start();
+ g_signaling_thread = rtc::Thread::Create();
+ g_signaling_thread->Start();
+
+ g_peer_connection_factory = webrtc::CreatePeerConnectionFactory(
+ g_worker_thread.get(), g_worker_thread.get(), g_signaling_thread.get(),
+ nullptr, webrtc::CreateBuiltinAudioEncoderFactory(),
+ webrtc::CreateBuiltinAudioDecoderFactory(),
+ std::unique_ptr<webrtc::VideoEncoderFactory>(
+ new webrtc::MultiplexEncoderFactory(
+ std::make_unique<webrtc::InternalEncoderFactory>())),
+ std::unique_ptr<webrtc::VideoDecoderFactory>(
+ new webrtc::MultiplexDecoderFactory(
+ std::make_unique<webrtc::InternalDecoderFactory>())),
+ nullptr, nullptr);
+ }
+ if (!g_peer_connection_factory.get()) {
+ DeletePeerConnection();
+ return false;
+ }
+
+ g_peer_count++;
+ if (!CreatePeerConnection(turn_urls, no_of_urls, username, credential)) {
+ DeletePeerConnection();
+ return false;
+ }
+
+ mandatory_receive_ = is_receiver;
+ return peer_connection_.get() != nullptr;
+}
+
+bool SimplePeerConnection::CreatePeerConnection(const char** turn_urls,
+ const int no_of_urls,
+ const char* username,
+ const char* credential) {
+ RTC_DCHECK(g_peer_connection_factory.get() != nullptr);
+ RTC_DCHECK(peer_connection_.get() == nullptr);
+
+ local_video_observer_.reset(new VideoObserver());
+ remote_video_observer_.reset(new VideoObserver());
+
+ // Add the turn server.
+ if (turn_urls != nullptr) {
+ if (no_of_urls > 0) {
+ webrtc::PeerConnectionInterface::IceServer turn_server;
+ for (int i = 0; i < no_of_urls; i++) {
+ std::string url(turn_urls[i]);
+ if (url.length() > 0)
+ turn_server.urls.push_back(turn_urls[i]);
+ }
+
+ std::string user_name(username);
+ if (user_name.length() > 0)
+ turn_server.username = username;
+
+ std::string password(credential);
+ if (password.length() > 0)
+ turn_server.password = credential;
+
+ config_.servers.push_back(turn_server);
+ }
+ }
+
+ // Add the stun server.
+ webrtc::PeerConnectionInterface::IceServer stun_server;
+ stun_server.uri = GetPeerConnectionString();
+ config_.servers.push_back(stun_server);
+
+ auto result = g_peer_connection_factory->CreatePeerConnectionOrError(
+ config_, webrtc::PeerConnectionDependencies(this));
+ if (!result.ok()) {
+ peer_connection_ = nullptr;
+ return false;
+ }
+ peer_connection_ = result.MoveValue();
+ return true;
+}
+
+void SimplePeerConnection::DeletePeerConnection() {
+ g_peer_count--;
+
+#if defined(WEBRTC_ANDROID)
+ if (g_camera) {
+ JNIEnv* env = webrtc::jni::GetEnv();
+ jclass pc_factory_class =
+ unity_plugin::FindClass(env, "org/webrtc/UnityUtility");
+ jmethodID stop_camera_method = webrtc::GetStaticMethodID(
+ env, pc_factory_class, "StopCamera", "(Lorg/webrtc/VideoCapturer;)V");
+
+ env->CallStaticVoidMethod(pc_factory_class, stop_camera_method, g_camera);
+ CHECK_EXCEPTION(env);
+
+ g_camera = nullptr;
+ }
+#endif
+
+ CloseDataChannel();
+ peer_connection_ = nullptr;
+ active_streams_.clear();
+
+ if (g_peer_count == 0) {
+ g_peer_connection_factory = nullptr;
+ g_signaling_thread.reset();
+ g_worker_thread.reset();
+ }
+}
+
+bool SimplePeerConnection::CreateOffer() {
+ if (!peer_connection_.get())
+ return false;
+
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
+ if (mandatory_receive_) {
+ options.offer_to_receive_audio = true;
+ options.offer_to_receive_video = true;
+ }
+ peer_connection_->CreateOffer(this, options);
+ return true;
+}
+
+bool SimplePeerConnection::CreateAnswer() {
+ if (!peer_connection_.get())
+ return false;
+
+ webrtc::PeerConnectionInterface::RTCOfferAnswerOptions options;
+ if (mandatory_receive_) {
+ options.offer_to_receive_audio = true;
+ options.offer_to_receive_video = true;
+ }
+ peer_connection_->CreateAnswer(this, options);
+ return true;
+}
+
+void SimplePeerConnection::OnSuccess(
+ webrtc::SessionDescriptionInterface* desc) {
+ peer_connection_->SetLocalDescription(
+ DummySetSessionDescriptionObserver::Create().get(), desc);
+
+ std::string sdp;
+ desc->ToString(&sdp);
+
+ if (OnLocalSdpReady)
+ OnLocalSdpReady(desc->type().c_str(), sdp.c_str());
+}
+
+void SimplePeerConnection::OnFailure(webrtc::RTCError error) {
+ RTC_LOG(LS_ERROR) << ToString(error.type()) << ": " << error.message();
+
+ // TODO(hta): include error.type in the message
+ if (OnFailureMessage)
+ OnFailureMessage(error.message());
+}
+
+void SimplePeerConnection::OnIceCandidate(
+ const webrtc::IceCandidateInterface* candidate) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << candidate->sdp_mline_index();
+
+ std::string sdp;
+ if (!candidate->ToString(&sdp)) {
+ RTC_LOG(LS_ERROR) << "Failed to serialize candidate";
+ return;
+ }
+
+ if (OnIceCandidateReady)
+ OnIceCandidateReady(sdp.c_str(), candidate->sdp_mline_index(),
+ candidate->sdp_mid().c_str());
+}
+
+void SimplePeerConnection::RegisterOnLocalI420FrameReady(
+ I420FRAMEREADY_CALLBACK callback) {
+ if (local_video_observer_)
+ local_video_observer_->SetVideoCallback(callback);
+}
+
+void SimplePeerConnection::RegisterOnRemoteI420FrameReady(
+ I420FRAMEREADY_CALLBACK callback) {
+ if (remote_video_observer_)
+ remote_video_observer_->SetVideoCallback(callback);
+}
+
+void SimplePeerConnection::RegisterOnLocalDataChannelReady(
+ LOCALDATACHANNELREADY_CALLBACK callback) {
+ OnLocalDataChannelReady = callback;
+}
+
+void SimplePeerConnection::RegisterOnDataFromDataChannelReady(
+ DATAFROMEDATECHANNELREADY_CALLBACK callback) {
+ OnDataFromDataChannelReady = callback;
+}
+
+void SimplePeerConnection::RegisterOnFailure(FAILURE_CALLBACK callback) {
+ OnFailureMessage = callback;
+}
+
+void SimplePeerConnection::RegisterOnAudioBusReady(
+ AUDIOBUSREADY_CALLBACK callback) {
+ OnAudioReady = callback;
+}
+
+void SimplePeerConnection::RegisterOnLocalSdpReadytoSend(
+ LOCALSDPREADYTOSEND_CALLBACK callback) {
+ OnLocalSdpReady = callback;
+}
+
+void SimplePeerConnection::RegisterOnIceCandidateReadytoSend(
+ ICECANDIDATEREADYTOSEND_CALLBACK callback) {
+ OnIceCandidateReady = callback;
+}
+
+bool SimplePeerConnection::SetRemoteDescription(const char* type,
+ const char* sdp) {
+ if (!peer_connection_)
+ return false;
+
+ std::string remote_desc(sdp);
+ std::string desc_type(type);
+ webrtc::SdpParseError error;
+ webrtc::SessionDescriptionInterface* session_description(
+ webrtc::CreateSessionDescription(desc_type, remote_desc, &error));
+ if (!session_description) {
+ RTC_LOG(LS_WARNING) << "Can't parse received session description message. "
+ "SdpParseError was: "
+ << error.description;
+ return false;
+ }
+ RTC_LOG(LS_INFO) << " Received session description :" << remote_desc;
+ peer_connection_->SetRemoteDescription(
+ DummySetSessionDescriptionObserver::Create().get(), session_description);
+
+ return true;
+}
+
+bool SimplePeerConnection::AddIceCandidate(const char* candidate,
+ const int sdp_mlineindex,
+ const char* sdp_mid) {
+ if (!peer_connection_)
+ return false;
+
+ webrtc::SdpParseError error;
+ std::unique_ptr<webrtc::IceCandidateInterface> ice_candidate(
+ webrtc::CreateIceCandidate(sdp_mid, sdp_mlineindex, candidate, &error));
+ if (!ice_candidate.get()) {
+ RTC_LOG(LS_WARNING) << "Can't parse received candidate message. "
+ "SdpParseError was: "
+ << error.description;
+ return false;
+ }
+ if (!peer_connection_->AddIceCandidate(ice_candidate.get())) {
+ RTC_LOG(LS_WARNING) << "Failed to apply the received candidate";
+ return false;
+ }
+ RTC_LOG(LS_INFO) << " Received candidate :" << candidate;
+ return true;
+}
+
+void SimplePeerConnection::SetAudioControl(bool is_mute, bool is_record) {
+ is_mute_audio_ = is_mute;
+ is_record_audio_ = is_record;
+
+ SetAudioControl();
+}
+
+void SimplePeerConnection::SetAudioControl() {
+ if (!remote_stream_)
+ return;
+ webrtc::AudioTrackVector tracks = remote_stream_->GetAudioTracks();
+ if (tracks.empty())
+ return;
+
+ rtc::scoped_refptr<webrtc::AudioTrackInterface>& audio_track = tracks[0];
+ if (is_record_audio_)
+ audio_track->AddSink(this);
+ else
+ audio_track->RemoveSink(this);
+
+ for (auto& track : tracks) {
+ if (is_mute_audio_)
+ track->set_enabled(false);
+ else
+ track->set_enabled(true);
+ }
+}
+
+void SimplePeerConnection::OnAddStream(
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) {
+ RTC_LOG(LS_INFO) << __FUNCTION__ << " " << stream->id();
+ remote_stream_ = stream;
+ if (remote_video_observer_ && !remote_stream_->GetVideoTracks().empty()) {
+ remote_stream_->GetVideoTracks()[0]->AddOrUpdateSink(
+ remote_video_observer_.get(), rtc::VideoSinkWants());
+ }
+ SetAudioControl();
+}
+
+void SimplePeerConnection::AddStreams(bool audio_only) {
+ if (active_streams_.find(kStreamId) != active_streams_.end())
+ return; // Already added.
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream =
+ g_peer_connection_factory->CreateLocalMediaStream(kStreamId);
+
+ rtc::scoped_refptr<webrtc::AudioTrackInterface> audio_track(
+ g_peer_connection_factory->CreateAudioTrack(
+ kAudioLabel,
+ g_peer_connection_factory->CreateAudioSource(cricket::AudioOptions())
+ .get()));
+ stream->AddTrack(audio_track);
+
+ if (!audio_only) {
+#if defined(WEBRTC_ANDROID)
+ JNIEnv* env = webrtc::jni::GetEnv();
+ jclass pc_factory_class =
+ unity_plugin::FindClass(env, "org/webrtc/UnityUtility");
+ jmethodID load_texture_helper_method = webrtc::GetStaticMethodID(
+ env, pc_factory_class, "LoadSurfaceTextureHelper",
+ "()Lorg/webrtc/SurfaceTextureHelper;");
+ jobject texture_helper = env->CallStaticObjectMethod(
+ pc_factory_class, load_texture_helper_method);
+ CHECK_EXCEPTION(env);
+ RTC_DCHECK(texture_helper != nullptr)
+ << "Cannot get the Surface Texture Helper.";
+
+ auto source = rtc::make_ref_counted<webrtc::jni::AndroidVideoTrackSource>(
+ g_signaling_thread.get(), env, /*is_screencast=*/false,
+ /*align_timestamps=*/true);
+
+ // link with VideoCapturer (Camera);
+ jmethodID link_camera_method = webrtc::GetStaticMethodID(
+ env, pc_factory_class, "LinkCamera",
+ "(JLorg/webrtc/SurfaceTextureHelper;)Lorg/webrtc/VideoCapturer;");
+ jobject camera_tmp =
+ env->CallStaticObjectMethod(pc_factory_class, link_camera_method,
+ (jlong)source.get(), texture_helper);
+ CHECK_EXCEPTION(env);
+ g_camera = (jobject)env->NewGlobalRef(camera_tmp);
+
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ g_peer_connection_factory->CreateVideoTrack(source, kVideoLabel));
+ stream->AddTrack(video_track);
+#else
+ rtc::scoped_refptr<CapturerTrackSource> video_device =
+ CapturerTrackSource::Create();
+ if (video_device) {
+ rtc::scoped_refptr<webrtc::VideoTrackInterface> video_track(
+ g_peer_connection_factory->CreateVideoTrack(video_device,
+ kVideoLabel));
+
+ stream->AddTrack(video_track);
+ }
+#endif
+ if (local_video_observer_ && !stream->GetVideoTracks().empty()) {
+ stream->GetVideoTracks()[0]->AddOrUpdateSink(local_video_observer_.get(),
+ rtc::VideoSinkWants());
+ }
+ }
+
+ if (!peer_connection_->AddStream(stream.get())) {
+ RTC_LOG(LS_ERROR) << "Adding stream to PeerConnection failed";
+ }
+
+ typedef std::pair<std::string,
+ rtc::scoped_refptr<webrtc::MediaStreamInterface>>
+ MediaStreamPair;
+ active_streams_.insert(MediaStreamPair(stream->id(), stream));
+}
+
+bool SimplePeerConnection::CreateDataChannel() {
+ struct webrtc::DataChannelInit init;
+ init.ordered = true;
+ init.reliable = true;
+ auto result = peer_connection_->CreateDataChannelOrError("Hello", &init);
+ if (result.ok()) {
+ data_channel_ = result.MoveValue();
+ data_channel_->RegisterObserver(this);
+ RTC_LOG(LS_INFO) << "Succeeds to create data channel";
+ return true;
+ } else {
+ RTC_LOG(LS_INFO) << "Fails to create data channel";
+ return false;
+ }
+}
+
+void SimplePeerConnection::CloseDataChannel() {
+ if (data_channel_.get()) {
+ data_channel_->UnregisterObserver();
+ data_channel_->Close();
+ }
+ data_channel_ = nullptr;
+}
+
+bool SimplePeerConnection::SendDataViaDataChannel(const std::string& data) {
+ if (!data_channel_.get()) {
+ RTC_LOG(LS_INFO) << "Data channel is not established";
+ return false;
+ }
+ webrtc::DataBuffer buffer(data);
+ data_channel_->Send(buffer);
+ return true;
+}
+
+// Peerconnection observer
+void SimplePeerConnection::OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> channel) {
+ channel->RegisterObserver(this);
+}
+
+void SimplePeerConnection::OnStateChange() {
+ if (data_channel_) {
+ webrtc::DataChannelInterface::DataState state = data_channel_->state();
+ if (state == webrtc::DataChannelInterface::kOpen) {
+ if (OnLocalDataChannelReady)
+ OnLocalDataChannelReady();
+ RTC_LOG(LS_INFO) << "Data channel is open";
+ }
+ }
+}
+
+// A data buffer was successfully received.
+void SimplePeerConnection::OnMessage(const webrtc::DataBuffer& buffer) {
+ size_t size = buffer.data.size();
+ char* msg = new char[size + 1];
+ memcpy(msg, buffer.data.data(), size);
+ msg[size] = 0;
+ if (OnDataFromDataChannelReady)
+ OnDataFromDataChannelReady(msg);
+ delete[] msg;
+}
+
+// AudioTrackSinkInterface implementation.
+void SimplePeerConnection::OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) {
+ if (OnAudioReady)
+ OnAudioReady(audio_data, bits_per_sample, sample_rate,
+ static_cast<int>(number_of_channels),
+ static_cast<int>(number_of_frames));
+}
+
+std::vector<uint32_t> SimplePeerConnection::GetRemoteAudioTrackSsrcs() {
+ std::vector<rtc::scoped_refptr<webrtc::RtpReceiverInterface>> receivers =
+ peer_connection_->GetReceivers();
+
+ std::vector<uint32_t> ssrcs;
+ for (const auto& receiver : receivers) {
+ if (receiver->media_type() != cricket::MEDIA_TYPE_AUDIO)
+ continue;
+
+ std::vector<webrtc::RtpEncodingParameters> params =
+ receiver->GetParameters().encodings;
+
+ for (const auto& param : params) {
+ uint32_t ssrc = param.ssrc.value_or(0);
+ if (ssrc > 0)
+ ssrcs.push_back(ssrc);
+ }
+ }
+
+ return ssrcs;
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.h b/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.h
new file mode 100644
index 0000000000..de652ef118
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/simple_peer_connection.h
@@ -0,0 +1,135 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_
+#define EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "api/data_channel_interface.h"
+#include "api/media_stream_interface.h"
+#include "api/peer_connection_interface.h"
+#include "examples/unityplugin/unity_plugin_apis.h"
+#include "examples/unityplugin/video_observer.h"
+
+class SimplePeerConnection : public webrtc::PeerConnectionObserver,
+ public webrtc::CreateSessionDescriptionObserver,
+ public webrtc::DataChannelObserver,
+ public webrtc::AudioTrackSinkInterface {
+ public:
+ SimplePeerConnection() {}
+ ~SimplePeerConnection() {}
+
+ bool InitializePeerConnection(const char** turn_urls,
+ int no_of_urls,
+ const char* username,
+ const char* credential,
+ bool is_receiver);
+ void DeletePeerConnection();
+ void AddStreams(bool audio_only);
+ bool CreateDataChannel();
+ bool CreateOffer();
+ bool CreateAnswer();
+ bool SendDataViaDataChannel(const std::string& data);
+ void SetAudioControl(bool is_mute, bool is_record);
+
+ // Register callback functions.
+ void RegisterOnLocalI420FrameReady(I420FRAMEREADY_CALLBACK callback);
+ void RegisterOnRemoteI420FrameReady(I420FRAMEREADY_CALLBACK callback);
+ void RegisterOnLocalDataChannelReady(LOCALDATACHANNELREADY_CALLBACK callback);
+ void RegisterOnDataFromDataChannelReady(
+ DATAFROMEDATECHANNELREADY_CALLBACK callback);
+ void RegisterOnFailure(FAILURE_CALLBACK callback);
+ void RegisterOnAudioBusReady(AUDIOBUSREADY_CALLBACK callback);
+ void RegisterOnLocalSdpReadytoSend(LOCALSDPREADYTOSEND_CALLBACK callback);
+ void RegisterOnIceCandidateReadytoSend(
+ ICECANDIDATEREADYTOSEND_CALLBACK callback);
+ bool SetRemoteDescription(const char* type, const char* sdp);
+ bool AddIceCandidate(const char* sdp,
+ int sdp_mlineindex,
+ const char* sdp_mid);
+
+ protected:
+ // create a peerconneciton and add the turn servers info to the configuration.
+ bool CreatePeerConnection(const char** turn_urls,
+ int no_of_urls,
+ const char* username,
+ const char* credential);
+ void CloseDataChannel();
+ void SetAudioControl();
+
+ // PeerConnectionObserver implementation.
+ void OnSignalingChange(
+ webrtc::PeerConnectionInterface::SignalingState new_state) override {}
+ void OnAddStream(
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) override;
+ void OnRemoveStream(
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> stream) override {}
+ void OnDataChannel(
+ rtc::scoped_refptr<webrtc::DataChannelInterface> channel) override;
+ void OnRenegotiationNeeded() override {}
+ void OnIceConnectionChange(
+ webrtc::PeerConnectionInterface::IceConnectionState new_state) override {}
+ void OnIceGatheringChange(
+ webrtc::PeerConnectionInterface::IceGatheringState new_state) override {}
+ void OnIceCandidate(const webrtc::IceCandidateInterface* candidate) override;
+ void OnIceConnectionReceivingChange(bool receiving) override {}
+
+ // CreateSessionDescriptionObserver implementation.
+ void OnSuccess(webrtc::SessionDescriptionInterface* desc) override;
+ void OnFailure(webrtc::RTCError error) override;
+
+ // DataChannelObserver implementation.
+ void OnStateChange() override;
+ void OnMessage(const webrtc::DataBuffer& buffer) override;
+
+ // AudioTrackSinkInterface implementation.
+ void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) override;
+
+ // Get remote audio tracks ssrcs.
+ std::vector<uint32_t> GetRemoteAudioTrackSsrcs();
+
+ private:
+ rtc::scoped_refptr<webrtc::PeerConnectionInterface> peer_connection_;
+ rtc::scoped_refptr<webrtc::DataChannelInterface> data_channel_;
+ std::map<std::string, rtc::scoped_refptr<webrtc::MediaStreamInterface> >
+ active_streams_;
+
+ std::unique_ptr<VideoObserver> local_video_observer_;
+ std::unique_ptr<VideoObserver> remote_video_observer_;
+
+ rtc::scoped_refptr<webrtc::MediaStreamInterface> remote_stream_ = nullptr;
+ webrtc::PeerConnectionInterface::RTCConfiguration config_;
+
+ LOCALDATACHANNELREADY_CALLBACK OnLocalDataChannelReady = nullptr;
+ DATAFROMEDATECHANNELREADY_CALLBACK OnDataFromDataChannelReady = nullptr;
+ FAILURE_CALLBACK OnFailureMessage = nullptr;
+ AUDIOBUSREADY_CALLBACK OnAudioReady = nullptr;
+
+ LOCALSDPREADYTOSEND_CALLBACK OnLocalSdpReady = nullptr;
+ ICECANDIDATEREADYTOSEND_CALLBACK OnIceCandidateReady = nullptr;
+
+ bool is_mute_audio_ = false;
+ bool is_record_audio_ = false;
+ bool mandatory_receive_ = false;
+
+ // disallow copy-and-assign
+ SimplePeerConnection(const SimplePeerConnection&) = delete;
+ SimplePeerConnection& operator=(const SimplePeerConnection&) = delete;
+};
+
+#endif // EXAMPLES_UNITYPLUGIN_SIMPLE_PEER_CONNECTION_H_
diff --git a/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.cc b/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.cc
new file mode 100644
index 0000000000..6e34d7e1e0
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.cc
@@ -0,0 +1,196 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/unityplugin/unity_plugin_apis.h"
+
+#include <map>
+#include <string>
+
+#include "examples/unityplugin/simple_peer_connection.h"
+
+namespace {
+static int g_peer_connection_id = 1;
+static std::map<int, rtc::scoped_refptr<SimplePeerConnection>>
+ g_peer_connection_map;
+} // namespace
+
+int CreatePeerConnection(const char** turn_urls,
+ const int no_of_urls,
+ const char* username,
+ const char* credential,
+ bool mandatory_receive_video) {
+ g_peer_connection_map[g_peer_connection_id] =
+ rtc::make_ref_counted<SimplePeerConnection>();
+
+ if (!g_peer_connection_map[g_peer_connection_id]->InitializePeerConnection(
+ turn_urls, no_of_urls, username, credential, mandatory_receive_video))
+ return -1;
+
+ return g_peer_connection_id++;
+}
+
+bool ClosePeerConnection(int peer_connection_id) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->DeletePeerConnection();
+ g_peer_connection_map.erase(peer_connection_id);
+ return true;
+}
+
+bool AddStream(int peer_connection_id, bool audio_only) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->AddStreams(audio_only);
+ return true;
+}
+
+bool AddDataChannel(int peer_connection_id) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->CreateDataChannel();
+}
+
+bool CreateOffer(int peer_connection_id) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->CreateOffer();
+}
+
+bool CreateAnswer(int peer_connection_id) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->CreateAnswer();
+}
+
+bool SendDataViaDataChannel(int peer_connection_id, const char* data) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ std::string s(data);
+ g_peer_connection_map[peer_connection_id]->SendDataViaDataChannel(s);
+
+ return true;
+}
+
+bool SetAudioControl(int peer_connection_id, bool is_mute, bool is_record) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->SetAudioControl(is_mute,
+ is_record);
+ return true;
+}
+
+bool SetRemoteDescription(int peer_connection_id,
+ const char* type,
+ const char* sdp) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->SetRemoteDescription(type,
+ sdp);
+}
+
+bool AddIceCandidate(const int peer_connection_id,
+ const char* candidate,
+ const int sdp_mlineindex,
+ const char* sdp_mid) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ return g_peer_connection_map[peer_connection_id]->AddIceCandidate(
+ candidate, sdp_mlineindex, sdp_mid);
+}
+
+// Register callback functions.
+bool RegisterOnLocalI420FrameReady(int peer_connection_id,
+ I420FRAMEREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnLocalI420FrameReady(
+ callback);
+ return true;
+}
+
+bool RegisterOnRemoteI420FrameReady(int peer_connection_id,
+ I420FRAMEREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnRemoteI420FrameReady(
+ callback);
+ return true;
+}
+
+bool RegisterOnLocalDataChannelReady(int peer_connection_id,
+ LOCALDATACHANNELREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnLocalDataChannelReady(
+ callback);
+ return true;
+}
+
+bool RegisterOnDataFromDataChannelReady(
+ int peer_connection_id,
+ DATAFROMEDATECHANNELREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnDataFromDataChannelReady(
+ callback);
+ return true;
+}
+
+bool RegisterOnFailure(int peer_connection_id, FAILURE_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnFailure(callback);
+ return true;
+}
+
+bool RegisterOnAudioBusReady(int peer_connection_id,
+ AUDIOBUSREADY_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnAudioBusReady(callback);
+ return true;
+}
+
+// Singnaling channel related functions.
+bool RegisterOnLocalSdpReadytoSend(int peer_connection_id,
+ LOCALSDPREADYTOSEND_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnLocalSdpReadytoSend(
+ callback);
+ return true;
+}
+
+bool RegisterOnIceCandidateReadytoSend(
+ int peer_connection_id,
+ ICECANDIDATEREADYTOSEND_CALLBACK callback) {
+ if (!g_peer_connection_map.count(peer_connection_id))
+ return false;
+
+ g_peer_connection_map[peer_connection_id]->RegisterOnIceCandidateReadytoSend(
+ callback);
+ return true;
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.h b/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.h
new file mode 100644
index 0000000000..9790dc57b9
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/unity_plugin_apis.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file provides an example of unity native plugin APIs.
+
+#ifndef EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_
+#define EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_
+
+#include <stdint.h>
+
+// Definitions of callback functions.
+typedef void (*I420FRAMEREADY_CALLBACK)(const uint8_t* data_y,
+ const uint8_t* data_u,
+ const uint8_t* data_v,
+ const uint8_t* data_a,
+ int stride_y,
+ int stride_u,
+ int stride_v,
+ int stride_a,
+ uint32_t width,
+ uint32_t height);
+typedef void (*LOCALDATACHANNELREADY_CALLBACK)();
+typedef void (*DATAFROMEDATECHANNELREADY_CALLBACK)(const char* msg);
+typedef void (*FAILURE_CALLBACK)(const char* msg);
+typedef void (*LOCALSDPREADYTOSEND_CALLBACK)(const char* type, const char* sdp);
+typedef void (*ICECANDIDATEREADYTOSEND_CALLBACK)(const char* candidate,
+ int sdp_mline_index,
+ const char* sdp_mid);
+typedef void (*AUDIOBUSREADY_CALLBACK)(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ int number_of_channels,
+ int number_of_frames);
+
+#if defined(WEBRTC_WIN)
+#define WEBRTC_PLUGIN_API __declspec(dllexport)
+#elif defined(WEBRTC_ANDROID)
+#define WEBRTC_PLUGIN_API __attribute__((visibility("default")))
+#endif
+extern "C" {
+// Create a peerconnection and return a unique peer connection id.
+WEBRTC_PLUGIN_API int CreatePeerConnection(const char** turn_urls,
+ int no_of_urls,
+ const char* username,
+ const char* credential,
+ bool mandatory_receive_video);
+// Close a peerconnection.
+WEBRTC_PLUGIN_API bool ClosePeerConnection(int peer_connection_id);
+// Add a audio stream. If audio_only is true, the stream only has an audio
+// track and no video track.
+WEBRTC_PLUGIN_API bool AddStream(int peer_connection_id, bool audio_only);
+// Add a data channel to peer connection.
+WEBRTC_PLUGIN_API bool AddDataChannel(int peer_connection_id);
+// Create a peer connection offer.
+WEBRTC_PLUGIN_API bool CreateOffer(int peer_connection_id);
+// Create a peer connection answer.
+WEBRTC_PLUGIN_API bool CreateAnswer(int peer_connection_id);
+// Send data through data channel.
+WEBRTC_PLUGIN_API bool SendDataViaDataChannel(int peer_connection_id,
+ const char* data);
+// Set audio control. If is_mute=true, no audio will playout. If is_record=true,
+// AUDIOBUSREADY_CALLBACK will be called every 10 ms.
+WEBRTC_PLUGIN_API bool SetAudioControl(int peer_connection_id,
+ bool is_mute,
+ bool is_record);
+// Set remote sdp.
+WEBRTC_PLUGIN_API bool SetRemoteDescription(int peer_connection_id,
+ const char* type,
+ const char* sdp);
+// Add ice candidate.
+WEBRTC_PLUGIN_API bool AddIceCandidate(int peer_connection_id,
+ const char* candidate,
+ int sdp_mlineindex,
+ const char* sdp_mid);
+
+// Register callback functions.
+WEBRTC_PLUGIN_API bool RegisterOnLocalI420FrameReady(
+ int peer_connection_id,
+ I420FRAMEREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnRemoteI420FrameReady(
+ int peer_connection_id,
+ I420FRAMEREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnLocalDataChannelReady(
+ int peer_connection_id,
+ LOCALDATACHANNELREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnDataFromDataChannelReady(
+ int peer_connection_id,
+ DATAFROMEDATECHANNELREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnFailure(int peer_connection_id,
+ FAILURE_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnAudioBusReady(int peer_connection_id,
+ AUDIOBUSREADY_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnLocalSdpReadytoSend(
+ int peer_connection_id,
+ LOCALSDPREADYTOSEND_CALLBACK callback);
+WEBRTC_PLUGIN_API bool RegisterOnIceCandidateReadytoSend(
+ int peer_connection_id,
+ ICECANDIDATEREADYTOSEND_CALLBACK callback);
+}
+
+#endif // EXAMPLES_UNITYPLUGIN_UNITY_PLUGIN_APIS_H_
diff --git a/third_party/libwebrtc/examples/unityplugin/video_observer.cc b/third_party/libwebrtc/examples/unityplugin/video_observer.cc
new file mode 100644
index 0000000000..7e33b08e27
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/video_observer.cc
@@ -0,0 +1,44 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "examples/unityplugin/video_observer.h"
+
+void VideoObserver::SetVideoCallback(I420FRAMEREADY_CALLBACK callback) {
+ std::lock_guard<std::mutex> lock(mutex);
+ OnI420FrameReady = callback;
+}
+
+void VideoObserver::OnFrame(const webrtc::VideoFrame& frame) {
+ std::unique_lock<std::mutex> lock(mutex);
+ if (!OnI420FrameReady)
+ return;
+
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ frame.video_frame_buffer());
+
+ if (buffer->type() != webrtc::VideoFrameBuffer::Type::kI420A) {
+ rtc::scoped_refptr<webrtc::I420BufferInterface> i420_buffer =
+ buffer->ToI420();
+ OnI420FrameReady(i420_buffer->DataY(), i420_buffer->DataU(),
+ i420_buffer->DataV(), nullptr, i420_buffer->StrideY(),
+ i420_buffer->StrideU(), i420_buffer->StrideV(), 0,
+ frame.width(), frame.height());
+
+ } else {
+ // The buffer has alpha channel.
+ const webrtc::I420ABufferInterface* i420a_buffer = buffer->GetI420A();
+
+ OnI420FrameReady(i420a_buffer->DataY(), i420a_buffer->DataU(),
+ i420a_buffer->DataV(), i420a_buffer->DataA(),
+ i420a_buffer->StrideY(), i420a_buffer->StrideU(),
+ i420a_buffer->StrideV(), i420a_buffer->StrideA(),
+ frame.width(), frame.height());
+ }
+}
diff --git a/third_party/libwebrtc/examples/unityplugin/video_observer.h b/third_party/libwebrtc/examples/unityplugin/video_observer.h
new file mode 100644
index 0000000000..01ccd2191a
--- /dev/null
+++ b/third_party/libwebrtc/examples/unityplugin/video_observer.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_
+#define EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_
+
+#include <mutex>
+
+#include "api/media_stream_interface.h"
+#include "api/video/video_sink_interface.h"
+#include "examples/unityplugin/unity_plugin_apis.h"
+
+class VideoObserver : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ VideoObserver() {}
+ ~VideoObserver() {}
+ void SetVideoCallback(I420FRAMEREADY_CALLBACK callback);
+
+ protected:
+ // VideoSinkInterface implementation
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ private:
+ I420FRAMEREADY_CALLBACK OnI420FrameReady = nullptr;
+ std::mutex mutex;
+};
+
+#endif // EXAMPLES_UNITYPLUGIN_VIDEO_OBSERVER_H_