summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/webrtc/media/base
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/webrtc/media/base')
-rw-r--r--third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc118
-rw-r--r--third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.h84
-rw-r--r--third_party/libwebrtc/webrtc/media/base/audiosource.h49
-rw-r--r--third_party/libwebrtc/webrtc/media/base/codec.cc367
-rw-r--r--third_party/libwebrtc/webrtc/media/base/codec.h264
-rw-r--r--third_party/libwebrtc/webrtc/media/base/codec_unittest.cc326
-rw-r--r--third_party/libwebrtc/webrtc/media/base/cryptoparams.h17
-rw-r--r--third_party/libwebrtc/webrtc/media/base/device.h36
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakemediaengine.h978
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakenetworkinterface.h228
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakertp.cc66
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakertp.h140
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakevideocapturer.h152
-rw-r--r--third_party/libwebrtc/webrtc/media/base/fakevideorenderer.h142
-rw-r--r--third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc310
-rw-r--r--third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.h107
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediachannel.h1254
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediaconstants.cc113
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediaconstants.h137
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediaengine.cc40
-rw-r--r--third_party/libwebrtc/webrtc/media/base/mediaengine.h188
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtpdataengine.cc355
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtpdataengine.h111
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtpdataengine_unittest.cc377
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtputils.cc473
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtputils.h90
-rw-r--r--third_party/libwebrtc/webrtc/media/base/rtputils_unittest.cc353
-rw-r--r--third_party/libwebrtc/webrtc/media/base/streamparams.cc268
-rw-r--r--third_party/libwebrtc/webrtc/media/base/streamparams.h332
-rw-r--r--third_party/libwebrtc/webrtc/media/base/streamparams_unittest.cc310
-rw-r--r--third_party/libwebrtc/webrtc/media/base/test/mock_mediachannel.h35
-rw-r--r--third_party/libwebrtc/webrtc/media/base/testutils.cc170
-rw-r--r--third_party/libwebrtc/webrtc/media/base/testutils.h151
-rw-r--r--third_party/libwebrtc/webrtc/media/base/turnutils.cc127
-rw-r--r--third_party/libwebrtc/webrtc/media/base/turnutils.h30
-rw-r--r--third_party/libwebrtc/webrtc/media/base/turnutils_unittest.cc120
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videoadapter.cc293
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videoadapter.h104
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videoadapter_unittest.cc1096
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc131
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videobroadcaster.h70
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videobroadcaster_unittest.cc195
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocapturer.cc381
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocapturer.h289
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocapturer_unittest.cc786
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocapturerfactory.h32
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocommon.cc79
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocommon.h229
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videocommon_unittest.cc94
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videoengine_unittest.h951
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosinkinterface.h34
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosourcebase.cc58
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosourcebase.h48
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc17
-rw-r--r--third_party/libwebrtc/webrtc/media/base/videosourceinterface.h59
55 files changed, 13364 insertions, 0 deletions
diff --git a/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc b/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc
new file mode 100644
index 0000000000..5a7168bfb8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.cc
@@ -0,0 +1,118 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/adaptedvideotracksource.h"
+
+#include "api/video/i420_buffer.h"
+
+namespace rtc {
+
+AdaptedVideoTrackSource::AdaptedVideoTrackSource() {
+ thread_checker_.DetachFromThread();
+}
+
+AdaptedVideoTrackSource::AdaptedVideoTrackSource(int required_alignment)
+ : video_adapter_(required_alignment) {
+ thread_checker_.DetachFromThread();
+}
+
+bool AdaptedVideoTrackSource::GetStats(Stats* stats) {
+ rtc::CritScope lock(&stats_crit_);
+
+ if (!stats_) {
+ return false;
+ }
+
+ *stats = *stats_;
+ return true;
+}
+
+void AdaptedVideoTrackSource::OnFrame(const webrtc::VideoFrame& frame) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ frame.video_frame_buffer());
+ /* Note that this is a "best effort" approach to
+ wants.rotation_applied; apply_rotation_ can change from false to
+ true between the check of apply_rotation() and the call to
+ broadcaster_.OnFrame(), in which case we generate a frame with
+ pending rotation despite some sink with wants.rotation_applied ==
+ true was just added. The VideoBroadcaster enforces
+ synchronization for us in this case, by not passing the frame on
+ to sinks which don't want it. */
+ if (apply_rotation() && frame.rotation() != webrtc::kVideoRotation_0 &&
+ buffer->type() == webrtc::VideoFrameBuffer::Type::kI420) {
+ /* Apply pending rotation. */
+ broadcaster_.OnFrame(webrtc::VideoFrame(
+ webrtc::I420Buffer::Rotate(*buffer->GetI420(), frame.rotation()),
+ webrtc::kVideoRotation_0, frame.timestamp_us()));
+ } else {
+ broadcaster_.OnFrame(frame);
+ }
+}
+
+void AdaptedVideoTrackSource::AddOrUpdateSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+ broadcaster_.AddOrUpdateSink(sink, wants);
+ OnSinkWantsChanged(broadcaster_.wants());
+}
+
+void AdaptedVideoTrackSource::RemoveSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+ broadcaster_.RemoveSink(sink);
+ OnSinkWantsChanged(broadcaster_.wants());
+}
+
+bool AdaptedVideoTrackSource::apply_rotation() {
+ return broadcaster_.wants().rotation_applied;
+}
+
+void AdaptedVideoTrackSource::OnSinkWantsChanged(
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ video_adapter_.OnResolutionFramerateRequest(
+ wants.target_pixel_count, wants.max_pixel_count, wants.max_framerate_fps);
+}
+
+bool AdaptedVideoTrackSource::AdaptFrame(int width,
+ int height,
+ int64_t time_us,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y) {
+ {
+ rtc::CritScope lock(&stats_crit_);
+ stats_ = Stats{width, height};
+ }
+
+ if (!broadcaster_.frame_wanted()) {
+ return false;
+ }
+
+ if (!video_adapter_.AdaptFrameResolution(
+ width, height, time_us * rtc::kNumNanosecsPerMicrosec,
+ crop_width, crop_height, out_width, out_height)) {
+ broadcaster_.OnDiscardedFrame();
+ // VideoAdapter dropped the frame.
+ return false;
+ }
+
+ *crop_x = (width - *crop_width) / 2;
+ *crop_y = (height - *crop_height) / 2;
+ return true;
+}
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.h b/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.h
new file mode 100644
index 0000000000..0db381f5fb
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/adaptedvideotracksource.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
+#define MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
+
+#include "api/mediastreaminterface.h"
+#include "api/notifier.h"
+#include "media/base/videoadapter.h"
+#include "media/base/videobroadcaster.h"
+
+namespace rtc {
+
+// Base class for sources which needs video adaptation, e.g., video
+// capture sources. Sinks must be added and removed on one and only
+// one thread, while AdaptFrame and OnFrame may be called on any
+// thread.
+class AdaptedVideoTrackSource
+ : public webrtc::Notifier<webrtc::VideoTrackSourceInterface> {
+ public:
+ AdaptedVideoTrackSource();
+
+ protected:
+ // Allows derived classes to initialize |video_adapter_| with a custom
+ // alignment.
+ explicit AdaptedVideoTrackSource(int required_alignment);
+ // Checks the apply_rotation() flag. If the frame needs rotation, and it is a
+ // plain memory frame, it is rotated. Subclasses producing native frames must
+ // handle apply_rotation() themselves.
+ void OnFrame(const webrtc::VideoFrame& frame);
+
+ // Reports the appropriate frame size after adaptation. Returns true
+ // if a frame is wanted. Returns false if there are no interested
+ // sinks, or if the VideoAdapter decides to drop the frame.
+ bool AdaptFrame(int width,
+ int height,
+ int64_t time_us,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y);
+
+ // Returns the current value of the apply_rotation flag, derived
+ // from the VideoSinkWants of registered sinks. The value is derived
+ // from sinks' wants, in AddOrUpdateSink and RemoveSink. Beware that
+ // when using this method from a different thread, the value may
+ // become stale before it is used.
+ bool apply_rotation();
+
+ cricket::VideoAdapter* video_adapter() { return &video_adapter_; }
+
+ private:
+ // Implements rtc::VideoSourceInterface.
+ void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ // Part of VideoTrackSourceInterface.
+ bool GetStats(Stats* stats) override;
+
+ void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
+
+ rtc::ThreadChecker thread_checker_;
+
+ cricket::VideoAdapter video_adapter_;
+
+ rtc::CriticalSection stats_crit_;
+ rtc::Optional<Stats> stats_ RTC_GUARDED_BY(stats_crit_);
+
+ VideoBroadcaster broadcaster_;
+};
+
+} // namespace rtc
+
+#endif // MEDIA_BASE_ADAPTEDVIDEOTRACKSOURCE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/audiosource.h b/third_party/libwebrtc/webrtc/media/base/audiosource.h
new file mode 100644
index 0000000000..199b614850
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/audiosource.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_AUDIOSOURCE_H_
+#define MEDIA_BASE_AUDIOSOURCE_H_
+
+#include <cstddef>
+
+namespace cricket {
+
+// Abstract interface for providing the audio data.
+// TODO(deadbeef): Rename this to AudioSourceInterface, and rename
+// webrtc::AudioSourceInterface to AudioTrackSourceInterface.
+class AudioSource {
+ public:
+ class Sink {
+ public:
+ // Callback to receive data from the AudioSource.
+ virtual void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) = 0;
+
+ // Called when the AudioSource is going away.
+ virtual void OnClose() = 0;
+
+ protected:
+ virtual ~Sink() {}
+ };
+
+ // Sets a sink to the AudioSource. There can be only one sink connected
+ // to the source at a time.
+ virtual void SetSink(Sink* sink) = 0;
+
+ protected:
+ virtual ~AudioSource() {}
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_AUDIOSOURCE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/codec.cc b/third_party/libwebrtc/webrtc/media/base/codec.cc
new file mode 100644
index 0000000000..98e52d6848
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/codec.cc
@@ -0,0 +1,367 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/codec.h"
+
+#include <algorithm>
+#include <sstream>
+
+#include "media/base/h264_profile_level_id.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/stringencode.h"
+#include "rtc_base/stringutils.h"
+
+namespace cricket {
+
+FeedbackParams::FeedbackParams() = default;
+
+bool FeedbackParam::operator==(const FeedbackParam& other) const {
+ return _stricmp(other.id().c_str(), id().c_str()) == 0 &&
+ _stricmp(other.param().c_str(), param().c_str()) == 0;
+}
+
+bool FeedbackParams::operator==(const FeedbackParams& other) const {
+ return params_ == other.params_;
+}
+
+bool FeedbackParams::Has(const FeedbackParam& param) const {
+ return std::find(params_.begin(), params_.end(), param) != params_.end();
+}
+
+void FeedbackParams::Add(const FeedbackParam& param) {
+ if (param.id().empty()) {
+ return;
+ }
+ if (Has(param)) {
+ // Param already in |this|.
+ return;
+ }
+ params_.push_back(param);
+ RTC_CHECK(!HasDuplicateEntries());
+}
+
+void FeedbackParams::Intersect(const FeedbackParams& from) {
+ std::vector<FeedbackParam>::iterator iter_to = params_.begin();
+ while (iter_to != params_.end()) {
+ if (!from.Has(*iter_to)) {
+ iter_to = params_.erase(iter_to);
+ } else {
+ ++iter_to;
+ }
+ }
+}
+
+bool FeedbackParams::HasDuplicateEntries() const {
+ for (std::vector<FeedbackParam>::const_iterator iter = params_.begin();
+ iter != params_.end(); ++iter) {
+ for (std::vector<FeedbackParam>::const_iterator found = iter + 1;
+ found != params_.end(); ++found) {
+ if (*found == *iter) {
+ return true;
+ }
+ }
+ }
+ return false;
+}
+
+Codec::Codec(int id, const std::string& name, int clockrate)
+ : id(id), name(name), clockrate(clockrate) {}
+
+Codec::Codec() : id(0), clockrate(0) {}
+
+Codec::Codec(const Codec& c) = default;
+Codec::Codec(Codec&& c) = default;
+Codec::~Codec() = default;
+Codec& Codec::operator=(const Codec& c) = default;
+Codec& Codec::operator=(Codec&& c) = default;
+
+bool Codec::operator==(const Codec& c) const {
+ return this->id == c.id && // id is reserved in objective-c
+ name == c.name && clockrate == c.clockrate && params == c.params &&
+ feedback_params == c.feedback_params;
+}
+
+bool Codec::Matches(const Codec& codec) const {
+ // Match the codec id/name based on the typical static/dynamic name rules.
+ // Matching is case-insensitive.
+ const int kMaxStaticPayloadId = 95;
+ return (id <= kMaxStaticPayloadId || codec.id <= kMaxStaticPayloadId)
+ ? (id == codec.id)
+ : (_stricmp(name.c_str(), codec.name.c_str()) == 0);
+}
+
+bool Codec::GetParam(const std::string& name, std::string* out) const {
+ CodecParameterMap::const_iterator iter = params.find(name);
+ if (iter == params.end())
+ return false;
+ *out = iter->second;
+ return true;
+}
+
+bool Codec::GetParam(const std::string& name, int* out) const {
+ CodecParameterMap::const_iterator iter = params.find(name);
+ if (iter == params.end())
+ return false;
+ return rtc::FromString(iter->second, out);
+}
+
+void Codec::SetParam(const std::string& name, const std::string& value) {
+ params[name] = value;
+}
+
+void Codec::SetParam(const std::string& name, int value) {
+ params[name] = rtc::ToString(value);
+}
+
+bool Codec::RemoveParam(const std::string& name) {
+ return params.erase(name) == 1;
+}
+
+void Codec::AddFeedbackParam(const FeedbackParam& param) {
+ feedback_params.Add(param);
+}
+
+bool Codec::HasFeedbackParam(const FeedbackParam& param) const {
+ return feedback_params.Has(param);
+}
+
+void Codec::IntersectFeedbackParams(const Codec& other) {
+ feedback_params.Intersect(other.feedback_params);
+}
+
+webrtc::RtpCodecParameters Codec::ToCodecParameters() const {
+ webrtc::RtpCodecParameters codec_params;
+ codec_params.payload_type = id;
+ codec_params.name = name;
+ codec_params.clock_rate = clockrate;
+ return codec_params;
+}
+
+AudioCodec::AudioCodec(int id,
+ const std::string& name,
+ int clockrate,
+ int bitrate,
+ size_t channels)
+ : Codec(id, name, clockrate), bitrate(bitrate), channels(channels) {}
+
+AudioCodec::AudioCodec() : Codec(), bitrate(0), channels(0) {
+}
+
+AudioCodec::AudioCodec(const AudioCodec& c) = default;
+AudioCodec::AudioCodec(AudioCodec&& c) = default;
+AudioCodec& AudioCodec::operator=(const AudioCodec& c) = default;
+AudioCodec& AudioCodec::operator=(AudioCodec&& c) = default;
+
+bool AudioCodec::operator==(const AudioCodec& c) const {
+ return bitrate == c.bitrate && channels == c.channels && Codec::operator==(c);
+}
+
+bool AudioCodec::Matches(const AudioCodec& codec) const {
+ // If a nonzero clockrate is specified, it must match the actual clockrate.
+ // If a nonzero bitrate is specified, it must match the actual bitrate,
+ // unless the codec is VBR (0), where we just force the supplied value.
+ // The number of channels must match exactly, with the exception
+ // that channels=0 is treated synonymously as channels=1, per RFC
+ // 4566 section 6: " [The channels] parameter is OPTIONAL and may be
+ // omitted if the number of channels is one."
+ // Preference is ignored.
+ // TODO(juberti): Treat a zero clockrate as 8000Hz, the RTP default clockrate.
+ return Codec::Matches(codec) &&
+ ((codec.clockrate == 0 /*&& clockrate == 8000*/) ||
+ clockrate == codec.clockrate) &&
+ (codec.bitrate == 0 || bitrate <= 0 || bitrate == codec.bitrate) &&
+ ((codec.channels < 2 && channels < 2) || channels == codec.channels);
+}
+
+std::string AudioCodec::ToString() const {
+ std::ostringstream os;
+ os << "AudioCodec[" << id << ":" << name << ":" << clockrate << ":" << bitrate
+ << ":" << channels << "]";
+ return os.str();
+}
+
+webrtc::RtpCodecParameters AudioCodec::ToCodecParameters() const {
+ webrtc::RtpCodecParameters codec_params = Codec::ToCodecParameters();
+ codec_params.num_channels = static_cast<int>(channels);
+ codec_params.kind = MEDIA_TYPE_AUDIO;
+ return codec_params;
+}
+
+std::string VideoCodec::ToString() const {
+ std::ostringstream os;
+ os << "VideoCodec[" << id << ":" << name << "]";
+ return os.str();
+}
+
+webrtc::RtpCodecParameters VideoCodec::ToCodecParameters() const {
+ webrtc::RtpCodecParameters codec_params = Codec::ToCodecParameters();
+ codec_params.kind = MEDIA_TYPE_VIDEO;
+ return codec_params;
+}
+
+VideoCodec::VideoCodec(int id, const std::string& name)
+ : Codec(id, name, kVideoCodecClockrate) {
+ SetDefaultParameters();
+}
+
+VideoCodec::VideoCodec(const std::string& name) : VideoCodec(0 /* id */, name) {
+ SetDefaultParameters();
+}
+
+VideoCodec::VideoCodec() : Codec() {
+ clockrate = kVideoCodecClockrate;
+}
+
+VideoCodec::VideoCodec(const webrtc::SdpVideoFormat& c)
+ : Codec(0 /* id */, c.name, kVideoCodecClockrate) {
+ params = c.parameters;
+}
+
+VideoCodec::VideoCodec(const VideoCodec& c) = default;
+VideoCodec::VideoCodec(VideoCodec&& c) = default;
+VideoCodec& VideoCodec::operator=(const VideoCodec& c) = default;
+VideoCodec& VideoCodec::operator=(VideoCodec&& c) = default;
+
+void VideoCodec::SetDefaultParameters() {
+ if (_stricmp(kH264CodecName, name.c_str()) == 0) {
+ // This default is set for all H.264 codecs created because
+ // that was the default before packetization mode support was added.
+ // TODO(hta): Move this to the places that create VideoCodecs from
+ // SDP or from knowledge of implementation capabilities.
+ SetParam(kH264FmtpPacketizationMode, "1");
+ }
+}
+
+bool VideoCodec::operator==(const VideoCodec& c) const {
+ return Codec::operator==(c);
+}
+
+bool VideoCodec::Matches(const VideoCodec& other) const {
+ if (!Codec::Matches(other))
+ return false;
+ if (CodecNamesEq(name.c_str(), kH264CodecName))
+ return webrtc::H264::IsSameH264Profile(params, other.params);
+ return true;
+}
+
+VideoCodec VideoCodec::CreateRtxCodec(int rtx_payload_type,
+ int associated_payload_type) {
+ VideoCodec rtx_codec(rtx_payload_type, kRtxCodecName);
+ rtx_codec.SetParam(kCodecParamAssociatedPayloadType, associated_payload_type);
+ return rtx_codec;
+}
+
+VideoCodec::CodecType VideoCodec::GetCodecType() const {
+ const char* payload_name = name.c_str();
+ if (_stricmp(payload_name, kRedCodecName) == 0) {
+ return CODEC_RED;
+ }
+ if (_stricmp(payload_name, kUlpfecCodecName) == 0) {
+ return CODEC_ULPFEC;
+ }
+ if (_stricmp(payload_name, kFlexfecCodecName) == 0) {
+ return CODEC_FLEXFEC;
+ }
+ if (_stricmp(payload_name, kRtxCodecName) == 0) {
+ return CODEC_RTX;
+ }
+
+ return CODEC_VIDEO;
+}
+
+bool VideoCodec::ValidateCodecFormat() const {
+ if (id < 0 || id > 127) {
+ RTC_LOG(LS_ERROR) << "Codec with invalid payload type: " << ToString();
+ return false;
+ }
+ if (GetCodecType() != CODEC_VIDEO) {
+ return true;
+ }
+
+ // Video validation from here on.
+ int min_bitrate = -1;
+ int max_bitrate = -1;
+ if (GetParam(kCodecParamMinBitrate, &min_bitrate) &&
+ GetParam(kCodecParamMaxBitrate, &max_bitrate)) {
+ if (max_bitrate < min_bitrate) {
+ RTC_LOG(LS_ERROR) << "Codec with max < min bitrate: " << ToString();
+ return false;
+ }
+ }
+ return true;
+}
+
+DataCodec::DataCodec(int id, const std::string& name)
+ : Codec(id, name, kDataCodecClockrate) {}
+
+DataCodec::DataCodec() : Codec() {
+ clockrate = kDataCodecClockrate;
+}
+
+DataCodec::DataCodec(const DataCodec& c) = default;
+DataCodec::DataCodec(DataCodec&& c) = default;
+DataCodec& DataCodec::operator=(const DataCodec& c) = default;
+DataCodec& DataCodec::operator=(DataCodec&& c) = default;
+
+std::string DataCodec::ToString() const {
+ std::ostringstream os;
+ os << "DataCodec[" << id << ":" << name << "]";
+ return os.str();
+}
+
+bool HasNack(const Codec& codec) {
+ return codec.HasFeedbackParam(
+ FeedbackParam(kRtcpFbParamNack, kParamValueEmpty));
+}
+
+bool HasRemb(const Codec& codec) {
+ return codec.HasFeedbackParam(
+ FeedbackParam(kRtcpFbParamRemb, kParamValueEmpty));
+}
+
+bool HasTransportCc(const Codec& codec) {
+ return codec.HasFeedbackParam(
+ FeedbackParam(kRtcpFbParamTransportCc, kParamValueEmpty));
+}
+
+bool CodecNamesEq(const std::string& name1, const std::string& name2) {
+ return CodecNamesEq(name1.c_str(), name2.c_str());
+}
+
+bool CodecNamesEq(const char* name1, const char* name2) {
+ return _stricmp(name1, name2) == 0;
+}
+
+const VideoCodec* FindMatchingCodec(
+ const std::vector<VideoCodec>& supported_codecs,
+ const VideoCodec& codec) {
+ for (const VideoCodec& supported_codec : supported_codecs) {
+ if (IsSameCodec(codec.name, codec.params, supported_codec.name,
+ supported_codec.params)) {
+ return &supported_codec;
+ }
+ }
+ return nullptr;
+}
+
+bool IsSameCodec(const std::string& name1,
+ const CodecParameterMap& params1,
+ const std::string& name2,
+ const CodecParameterMap& params2) {
+ // If different names (case insensitive), then not same formats.
+ if (!CodecNamesEq(name1, name2))
+ return false;
+ // For every format besides H264, comparing names is enough.
+ return !CodecNamesEq(name1.c_str(), kH264CodecName) ||
+ webrtc::H264::IsSameH264Profile(params1, params2);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/codec.h b/third_party/libwebrtc/webrtc/media/base/codec.h
new file mode 100644
index 0000000000..6a2dcf4529
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/codec.h
@@ -0,0 +1,264 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_CODEC_H_
+#define MEDIA_BASE_CODEC_H_
+
+#include <map>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "api/rtpparameters.h"
+#include "api/video_codecs/sdp_video_format.h"
+#include "common_types.h" // NOLINT(build/include)
+#include "media/base/mediaconstants.h"
+
+namespace cricket {
+
+typedef std::map<std::string, std::string> CodecParameterMap;
+
+class FeedbackParam {
+ public:
+ FeedbackParam() = default;
+ FeedbackParam(const std::string& id, const std::string& param)
+ : id_(id),
+ param_(param) {
+ }
+ explicit FeedbackParam(const std::string& id)
+ : id_(id),
+ param_(kParamValueEmpty) {
+ }
+ bool operator==(const FeedbackParam& other) const;
+
+ const std::string& id() const { return id_; }
+ const std::string& param() const { return param_; }
+
+ private:
+ std::string id_; // e.g. "nack", "ccm"
+ std::string param_; // e.g. "", "rpsi", "fir"
+};
+
+class FeedbackParams {
+ public:
+ FeedbackParams();
+ bool operator==(const FeedbackParams& other) const;
+
+ bool Has(const FeedbackParam& param) const;
+ void Add(const FeedbackParam& param);
+
+ void Intersect(const FeedbackParams& from);
+
+ const std::vector<FeedbackParam>& params() const { return params_; }
+ private:
+ bool HasDuplicateEntries() const;
+
+ std::vector<FeedbackParam> params_;
+};
+
+struct Codec {
+ int id;
+ std::string name;
+ int clockrate;
+ CodecParameterMap params;
+ FeedbackParams feedback_params;
+
+ virtual ~Codec();
+
+ // Indicates if this codec is compatible with the specified codec.
+ bool Matches(const Codec& codec) const;
+
+ // Find the parameter for |name| and write the value to |out|.
+ bool GetParam(const std::string& name, std::string* out) const;
+ bool GetParam(const std::string& name, int* out) const;
+
+ void SetParam(const std::string& name, const std::string& value);
+ void SetParam(const std::string& name, int value);
+
+ // It is safe to input a non-existent parameter.
+ // Returns true if the parameter existed, false if it did not exist.
+ bool RemoveParam(const std::string& name);
+
+ bool HasFeedbackParam(const FeedbackParam& param) const;
+ void AddFeedbackParam(const FeedbackParam& param);
+
+ // Filter |this| feedbacks params such that only those shared by both |this|
+ // and |other| are kept.
+ void IntersectFeedbackParams(const Codec& other);
+
+ virtual webrtc::RtpCodecParameters ToCodecParameters() const;
+
+ Codec& operator=(const Codec& c);
+ Codec& operator=(Codec&& c);
+
+ bool operator==(const Codec& c) const;
+
+ bool operator!=(const Codec& c) const {
+ return !(*this == c);
+ }
+
+ protected:
+ // A Codec can't be created without a subclass.
+ // Creates a codec with the given parameters.
+ Codec(int id, const std::string& name, int clockrate);
+ // Creates an empty codec.
+ Codec();
+ Codec(const Codec& c);
+ Codec(Codec&& c);
+};
+
+struct AudioCodec : public Codec {
+ int bitrate;
+ size_t channels;
+
+ // Creates a codec with the given parameters.
+ AudioCodec(int id,
+ const std::string& name,
+ int clockrate,
+ int bitrate,
+ size_t channels);
+ // Creates an empty codec.
+ AudioCodec();
+ AudioCodec(const AudioCodec& c);
+ AudioCodec(AudioCodec&& c);
+ ~AudioCodec() override = default;
+
+ // Indicates if this codec is compatible with the specified codec.
+ bool Matches(const AudioCodec& codec) const;
+
+ std::string ToString() const;
+
+ webrtc::RtpCodecParameters ToCodecParameters() const override;
+
+ AudioCodec& operator=(const AudioCodec& c);
+ AudioCodec& operator=(AudioCodec&& c);
+
+ bool operator==(const AudioCodec& c) const;
+
+ bool operator!=(const AudioCodec& c) const {
+ return !(*this == c);
+ }
+};
+
+inline std::ostream& operator<<(std::ostream& os, const AudioCodec& ac) {
+ os << "{id: " << ac.id;
+ os << ", name: " << ac.name;
+ os << ", clockrate: " << ac.clockrate;
+ os << ", bitrate: " << ac.bitrate;
+ os << ", channels: " << ac.channels;
+ os << ", params: {";
+ const char* sep = "";
+ for (const auto& kv : ac.params) {
+ os << sep << kv.first << ": " << kv.second;
+ sep = ", ";
+ }
+ os << "}, feedback_params: {";
+ sep = "";
+ for (const FeedbackParam& fp : ac.feedback_params.params()) {
+ os << sep << fp.id() << ": " << fp.param();
+ sep = ", ";
+ }
+ os << "}}";
+ return os;
+}
+
+struct VideoCodec : public Codec {
+ // Creates a codec with the given parameters.
+ VideoCodec(int id, const std::string& name);
+ // Creates a codec with the given name and empty id.
+ explicit VideoCodec(const std::string& name);
+ // Creates an empty codec.
+ VideoCodec();
+ VideoCodec(const VideoCodec& c);
+ explicit VideoCodec(const webrtc::SdpVideoFormat& c);
+ VideoCodec(VideoCodec&& c);
+ ~VideoCodec() override = default;
+
+ // Indicates if this video codec is the same as the other video codec, e.g. if
+ // they are both VP8 or VP9, or if they are both H264 with the same H264
+ // profile. H264 levels however are not compared.
+ bool Matches(const VideoCodec& codec) const;
+
+ std::string ToString() const;
+
+ webrtc::RtpCodecParameters ToCodecParameters() const override;
+
+ VideoCodec& operator=(const VideoCodec& c);
+ VideoCodec& operator=(VideoCodec&& c);
+
+ bool operator==(const VideoCodec& c) const;
+
+ bool operator!=(const VideoCodec& c) const {
+ return !(*this == c);
+ }
+
+ static VideoCodec CreateRtxCodec(int rtx_payload_type,
+ int associated_payload_type);
+
+ enum CodecType {
+ CODEC_VIDEO,
+ CODEC_RED,
+ CODEC_ULPFEC,
+ CODEC_FLEXFEC,
+ CODEC_RTX,
+ };
+
+ CodecType GetCodecType() const;
+ // Validates a VideoCodec's payload type, dimensions and bitrates etc. If they
+ // don't make sense (such as max < min bitrate), and error is logged and
+ // ValidateCodecFormat returns false.
+ bool ValidateCodecFormat() const;
+
+ private:
+ void SetDefaultParameters();
+};
+
+struct DataCodec : public Codec {
+ DataCodec(int id, const std::string& name);
+ DataCodec();
+ DataCodec(const DataCodec& c);
+ DataCodec(DataCodec&& c);
+ ~DataCodec() override = default;
+
+ DataCodec& operator=(const DataCodec& c);
+ DataCodec& operator=(DataCodec&& c);
+
+ std::string ToString() const;
+};
+
+// Get the codec setting associated with |payload_type|. If there
+// is no codec associated with that payload type it returns nullptr.
+template <class Codec>
+const Codec* FindCodecById(const std::vector<Codec>& codecs, int payload_type) {
+ for (const auto& codec : codecs) {
+ if (codec.id == payload_type)
+ return &codec;
+ }
+ return nullptr;
+}
+
+bool CodecNamesEq(const std::string& name1, const std::string& name2);
+bool CodecNamesEq(const char* name1, const char* name2);
+bool HasNack(const Codec& codec);
+bool HasRemb(const Codec& codec);
+bool HasTransportCc(const Codec& codec);
+// Returns the first codec in |supported_codecs| that matches |codec|, or
+// nullptr if no codec matches.
+const VideoCodec* FindMatchingCodec(
+ const std::vector<VideoCodec>& supported_codecs,
+ const VideoCodec& codec);
+bool IsSameCodec(const std::string& name1,
+ const CodecParameterMap& params1,
+ const std::string& name2,
+ const CodecParameterMap& params2);
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_CODEC_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/codec_unittest.cc b/third_party/libwebrtc/webrtc/media/base/codec_unittest.cc
new file mode 100644
index 0000000000..03d8684c64
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/codec_unittest.cc
@@ -0,0 +1,326 @@
+/*
+ * Copyright (c) 2009 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/codec.h"
+#include "rtc_base/gunit.h"
+
+using cricket::AudioCodec;
+using cricket::Codec;
+using cricket::DataCodec;
+using cricket::FeedbackParam;
+using cricket::VideoCodec;
+using cricket::kCodecParamAssociatedPayloadType;
+using cricket::kCodecParamMaxBitrate;
+using cricket::kCodecParamMinBitrate;
+
+class TestCodec : public Codec {
+ public:
+ TestCodec(int id, const std::string& name, int clockrate)
+ : Codec(id, name, clockrate) {}
+ TestCodec() : Codec() {}
+ TestCodec(const TestCodec& c) : Codec(c) {}
+};
+
+TEST(CodecTest, TestCodecOperators) {
+ TestCodec c0(96, "D", 1000);
+ c0.SetParam("a", 1);
+
+ TestCodec c1 = c0;
+ EXPECT_TRUE(c1 == c0);
+
+ int param_value0;
+ int param_value1;
+ EXPECT_TRUE(c0.GetParam("a", &param_value0));
+ EXPECT_TRUE(c1.GetParam("a", &param_value1));
+ EXPECT_EQ(param_value0, param_value1);
+
+ c1.id = 86;
+ EXPECT_TRUE(c0 != c1);
+
+ c1 = c0;
+ c1.name = "x";
+ EXPECT_TRUE(c0 != c1);
+
+ c1 = c0;
+ c1.clockrate = 2000;
+ EXPECT_TRUE(c0 != c1);
+
+ c1 = c0;
+ c1.SetParam("a", 2);
+ EXPECT_TRUE(c0 != c1);
+
+ TestCodec c5;
+ TestCodec c6(0, "", 0);
+ EXPECT_TRUE(c5 == c6);
+}
+
+TEST(CodecTest, TestAudioCodecOperators) {
+ AudioCodec c0(96, "A", 44100, 20000, 2);
+ AudioCodec c1(95, "A", 44100, 20000, 2);
+ AudioCodec c2(96, "x", 44100, 20000, 2);
+ AudioCodec c3(96, "A", 48000, 20000, 2);
+ AudioCodec c4(96, "A", 44100, 10000, 2);
+ AudioCodec c5(96, "A", 44100, 20000, 1);
+ EXPECT_NE(c0, c1);
+ EXPECT_NE(c0, c2);
+ EXPECT_NE(c0, c3);
+ EXPECT_NE(c0, c4);
+ EXPECT_NE(c0, c5);
+
+ AudioCodec c7;
+ AudioCodec c8(0, "", 0, 0, 0);
+ AudioCodec c9 = c0;
+ EXPECT_EQ(c8, c7);
+ EXPECT_NE(c9, c7);
+ EXPECT_EQ(c9, c0);
+
+ AudioCodec c10(c0);
+ AudioCodec c11(c0);
+ AudioCodec c12(c0);
+ AudioCodec c13(c0);
+ c10.params["x"] = "abc";
+ c11.params["x"] = "def";
+ c12.params["y"] = "abc";
+ c13.params["x"] = "abc";
+ EXPECT_NE(c10, c0);
+ EXPECT_NE(c11, c0);
+ EXPECT_NE(c11, c10);
+ EXPECT_NE(c12, c0);
+ EXPECT_NE(c12, c10);
+ EXPECT_NE(c12, c11);
+ EXPECT_EQ(c13, c10);
+}
+
+TEST(CodecTest, TestAudioCodecMatches) {
+ // Test a codec with a static payload type.
+ AudioCodec c0(95, "A", 44100, 20000, 1);
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 20000, 1)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 20000, 0)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 44100, 0, 0)));
+ EXPECT_TRUE(c0.Matches(AudioCodec(95, "", 0, 0, 0)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(96, "", 44100, 20000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 55100, 20000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 44100, 30000, 1)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 44100, 20000, 2)));
+ EXPECT_FALSE(c0.Matches(AudioCodec(95, "", 55100, 30000, 2)));
+
+ // Test a codec with a dynamic payload type.
+ AudioCodec c1(96, "A", 44100, 20000, 1);
+ EXPECT_TRUE(c1.Matches(AudioCodec(96, "A", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(97, "A", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(96, "a", 0, 0, 0)));
+ EXPECT_TRUE(c1.Matches(AudioCodec(97, "a", 0, 0, 0)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(95, "A", 0, 0, 0)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(96, "", 44100, 20000, 2)));
+ EXPECT_FALSE(c1.Matches(AudioCodec(96, "A", 55100, 30000, 1)));
+
+ // Test a codec with a dynamic payload type, and auto bitrate.
+ AudioCodec c2(97, "A", 16000, 0, 1);
+ // Use default bitrate.
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 0, 1)));
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 0, 0)));
+ // Use explicit bitrate.
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, 32000, 1)));
+ // Backward compatibility with clients that might send "-1" (for default).
+ EXPECT_TRUE(c2.Matches(AudioCodec(97, "A", 16000, -1, 1)));
+
+ // Stereo doesn't match channels = 0.
+ AudioCodec c3(96, "A", 44100, 20000, 2);
+ EXPECT_TRUE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 2)));
+ EXPECT_FALSE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 1)));
+ EXPECT_FALSE(c3.Matches(AudioCodec(96, "A", 44100, 20000, 0)));
+}
+
+TEST(CodecTest, TestVideoCodecOperators) {
+ VideoCodec c0(96, "V");
+ VideoCodec c1(95, "V");
+ VideoCodec c2(96, "x");
+
+ EXPECT_TRUE(c0 != c1);
+ EXPECT_TRUE(c0 != c2);
+
+ VideoCodec c7;
+ VideoCodec c8(0, "");
+ VideoCodec c9 = c0;
+ EXPECT_TRUE(c8 == c7);
+ EXPECT_TRUE(c9 != c7);
+ EXPECT_TRUE(c9 == c0);
+
+ VideoCodec c10(c0);
+ VideoCodec c11(c0);
+ VideoCodec c12(c0);
+ VideoCodec c13(c0);
+ c10.params["x"] = "abc";
+ c11.params["x"] = "def";
+ c12.params["y"] = "abc";
+ c13.params["x"] = "abc";
+ EXPECT_TRUE(c10 != c0);
+ EXPECT_TRUE(c11 != c0);
+ EXPECT_TRUE(c11 != c10);
+ EXPECT_TRUE(c12 != c0);
+ EXPECT_TRUE(c12 != c10);
+ EXPECT_TRUE(c12 != c11);
+ EXPECT_TRUE(c13 == c10);
+}
+
+TEST(CodecTest, TestVideoCodecMatches) {
+ // Test a codec with a static payload type.
+ VideoCodec c0(95, "V");
+ EXPECT_TRUE(c0.Matches(VideoCodec(95, "")));
+ EXPECT_FALSE(c0.Matches(VideoCodec(96, "")));
+
+ // Test a codec with a dynamic payload type.
+ VideoCodec c1(96, "V");
+ EXPECT_TRUE(c1.Matches(VideoCodec(96, "V")));
+ EXPECT_TRUE(c1.Matches(VideoCodec(97, "V")));
+ EXPECT_TRUE(c1.Matches(VideoCodec(96, "v")));
+ EXPECT_TRUE(c1.Matches(VideoCodec(97, "v")));
+ EXPECT_FALSE(c1.Matches(VideoCodec(96, "")));
+ EXPECT_FALSE(c1.Matches(VideoCodec(95, "V")));
+}
+
+TEST(CodecTest, TestDataCodecMatches) {
+ // Test a codec with a static payload type.
+ DataCodec c0(95, "D");
+ EXPECT_TRUE(c0.Matches(DataCodec(95, "")));
+ EXPECT_FALSE(c0.Matches(DataCodec(96, "")));
+
+ // Test a codec with a dynamic payload type.
+ DataCodec c1(96, "D");
+ EXPECT_TRUE(c1.Matches(DataCodec(96, "D")));
+ EXPECT_TRUE(c1.Matches(DataCodec(97, "D")));
+ EXPECT_TRUE(c1.Matches(DataCodec(96, "d")));
+ EXPECT_TRUE(c1.Matches(DataCodec(97, "d")));
+ EXPECT_FALSE(c1.Matches(DataCodec(96, "")));
+ EXPECT_FALSE(c1.Matches(DataCodec(95, "D")));
+}
+
+TEST(CodecTest, TestSetParamGetParamAndRemoveParam) {
+ AudioCodec codec;
+ codec.SetParam("a", "1");
+ codec.SetParam("b", "x");
+
+ int int_value = 0;
+ EXPECT_TRUE(codec.GetParam("a", &int_value));
+ EXPECT_EQ(1, int_value);
+ EXPECT_FALSE(codec.GetParam("b", &int_value));
+ EXPECT_FALSE(codec.GetParam("c", &int_value));
+
+ std::string str_value;
+ EXPECT_TRUE(codec.GetParam("a", &str_value));
+ EXPECT_EQ("1", str_value);
+ EXPECT_TRUE(codec.GetParam("b", &str_value));
+ EXPECT_EQ("x", str_value);
+ EXPECT_FALSE(codec.GetParam("c", &str_value));
+ EXPECT_TRUE(codec.RemoveParam("a"));
+ EXPECT_FALSE(codec.RemoveParam("c"));
+}
+
+TEST(CodecTest, TestIntersectFeedbackParams) {
+ const FeedbackParam a1("a", "1");
+ const FeedbackParam b2("b", "2");
+ const FeedbackParam b3("b", "3");
+ const FeedbackParam c3("c", "3");
+ TestCodec c1;
+ c1.AddFeedbackParam(a1); // Only match with c2.
+ c1.AddFeedbackParam(b2); // Same param different values.
+ c1.AddFeedbackParam(c3); // Not in c2.
+ TestCodec c2;
+ c2.AddFeedbackParam(a1);
+ c2.AddFeedbackParam(b3);
+
+ c1.IntersectFeedbackParams(c2);
+ EXPECT_TRUE(c1.HasFeedbackParam(a1));
+ EXPECT_FALSE(c1.HasFeedbackParam(b2));
+ EXPECT_FALSE(c1.HasFeedbackParam(c3));
+}
+
+TEST(CodecTest, TestGetCodecType) {
+ // Codec type comparison should be case insenstive on names.
+ const VideoCodec codec(96, "V");
+ const VideoCodec rtx_codec(96, "rTx");
+ const VideoCodec ulpfec_codec(96, "ulpFeC");
+ const VideoCodec flexfec_codec(96, "FlExFeC-03");
+ const VideoCodec red_codec(96, "ReD");
+ EXPECT_EQ(VideoCodec::CODEC_VIDEO, codec.GetCodecType());
+ EXPECT_EQ(VideoCodec::CODEC_RTX, rtx_codec.GetCodecType());
+ EXPECT_EQ(VideoCodec::CODEC_ULPFEC, ulpfec_codec.GetCodecType());
+ EXPECT_EQ(VideoCodec::CODEC_FLEXFEC, flexfec_codec.GetCodecType());
+ EXPECT_EQ(VideoCodec::CODEC_RED, red_codec.GetCodecType());
+}
+
+TEST(CodecTest, TestCreateRtxCodec) {
+ VideoCodec rtx_codec = VideoCodec::CreateRtxCodec(96, 120);
+ EXPECT_EQ(96, rtx_codec.id);
+ EXPECT_EQ(VideoCodec::CODEC_RTX, rtx_codec.GetCodecType());
+ int associated_payload_type;
+ ASSERT_TRUE(rtx_codec.GetParam(kCodecParamAssociatedPayloadType,
+ &associated_payload_type));
+ EXPECT_EQ(120, associated_payload_type);
+}
+
+TEST(CodecTest, TestValidateCodecFormat) {
+ const VideoCodec codec(96, "V");
+ ASSERT_TRUE(codec.ValidateCodecFormat());
+
+ // Accept 0-127 as payload types.
+ VideoCodec low_payload_type = codec;
+ low_payload_type.id = 0;
+ VideoCodec high_payload_type = codec;
+ high_payload_type.id = 127;
+ ASSERT_TRUE(low_payload_type.ValidateCodecFormat());
+ EXPECT_TRUE(high_payload_type.ValidateCodecFormat());
+
+ // Reject negative payloads.
+ VideoCodec negative_payload_type = codec;
+ negative_payload_type.id = -1;
+ EXPECT_FALSE(negative_payload_type.ValidateCodecFormat());
+
+ // Reject too-high payloads.
+ VideoCodec too_high_payload_type = codec;
+ too_high_payload_type.id = 128;
+ EXPECT_FALSE(too_high_payload_type.ValidateCodecFormat());
+
+ // Reject codecs with min bitrate > max bitrate.
+ VideoCodec incorrect_bitrates = codec;
+ incorrect_bitrates.params[kCodecParamMinBitrate] = "100";
+ incorrect_bitrates.params[kCodecParamMaxBitrate] = "80";
+ EXPECT_FALSE(incorrect_bitrates.ValidateCodecFormat());
+
+ // Accept min bitrate == max bitrate.
+ VideoCodec equal_bitrates = codec;
+ equal_bitrates.params[kCodecParamMinBitrate] = "100";
+ equal_bitrates.params[kCodecParamMaxBitrate] = "100";
+ EXPECT_TRUE(equal_bitrates.ValidateCodecFormat());
+
+ // Accept min bitrate < max bitrate.
+ VideoCodec different_bitrates = codec;
+ different_bitrates.params[kCodecParamMinBitrate] = "99";
+ different_bitrates.params[kCodecParamMaxBitrate] = "100";
+ EXPECT_TRUE(different_bitrates.ValidateCodecFormat());
+}
+
+TEST(CodecTest, TestToCodecParameters) {
+ const VideoCodec v(96, "V");
+ webrtc::RtpCodecParameters codec_params_1 = v.ToCodecParameters();
+ EXPECT_EQ(96, codec_params_1.payload_type);
+ EXPECT_EQ(cricket::MEDIA_TYPE_VIDEO, codec_params_1.kind);
+ EXPECT_EQ("V", codec_params_1.name);
+ EXPECT_EQ(cricket::kVideoCodecClockrate, codec_params_1.clock_rate);
+ EXPECT_EQ(rtc::nullopt, codec_params_1.num_channels);
+
+ const AudioCodec a(97, "A", 44100, 20000, 2);
+ webrtc::RtpCodecParameters codec_params_2 = a.ToCodecParameters();
+ EXPECT_EQ(97, codec_params_2.payload_type);
+ EXPECT_EQ(cricket::MEDIA_TYPE_AUDIO, codec_params_2.kind);
+ EXPECT_EQ("A", codec_params_2.name);
+ EXPECT_EQ(44100, codec_params_2.clock_rate);
+ EXPECT_EQ(2, codec_params_2.num_channels);
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/cryptoparams.h b/third_party/libwebrtc/webrtc/media/base/cryptoparams.h
new file mode 100644
index 0000000000..9ba17eebba
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/cryptoparams.h
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(bugs.webrtc.org/7504): Remove.
+#ifndef MEDIA_BASE_CRYPTOPARAMS_H_
+#define MEDIA_BASE_CRYPTOPARAMS_H_
+
+#include "api/cryptoparams.h"
+
+#endif // MEDIA_BASE_CRYPTOPARAMS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/device.h b/third_party/libwebrtc/webrtc/media/base/device.h
new file mode 100644
index 0000000000..f47293823f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/device.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_DEVICE_H_
+#define MEDIA_BASE_DEVICE_H_
+
+#include <string>
+
+#include "rtc_base/stringencode.h"
+
+namespace cricket {
+
+// Used to represent an audio or video capture or render device.
+struct Device {
+ Device() {}
+ Device(const std::string& name, int id)
+ : name(name),
+ id(rtc::ToString(id)) {
+ }
+ Device(const std::string& name, const std::string& id)
+ : name(name), id(id) {}
+
+ std::string name;
+ std::string id;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_DEVICE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakemediaengine.h b/third_party/libwebrtc/webrtc/media/base/fakemediaengine.h
new file mode 100644
index 0000000000..38458f2e08
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakemediaengine.h
@@ -0,0 +1,978 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_FAKEMEDIAENGINE_H_
+#define MEDIA_BASE_FAKEMEDIAENGINE_H_
+
+#include <list>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+#include <tuple>
+#include <utility>
+#include <vector>
+
+#include "api/call/audio_sink.h"
+#include "media/base/audiosource.h"
+#include "media/base/mediaengine.h"
+#include "media/base/rtputils.h"
+#include "media/base/streamparams.h"
+#include "media/engine/webrtcvideoengine.h"
+#include "modules/audio_processing/include/audio_processing.h"
+#include "p2p/base/sessiondescription.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/networkroute.h"
+#include "rtc_base/ptr_util.h"
+#include "rtc_base/stringutils.h"
+
+using webrtc::RtpExtension;
+
+namespace cricket {
+
+class FakeMediaEngine;
+class FakeVideoEngine;
+class FakeVoiceEngine;
+
+// A common helper class that handles sending and receiving RTP/RTCP packets.
+template <class Base> class RtpHelper : public Base {
+ public:
+ RtpHelper()
+ : sending_(false),
+ playout_(false),
+ fail_set_send_codecs_(false),
+ fail_set_recv_codecs_(false),
+ send_ssrc_(0),
+ ready_to_send_(false),
+ transport_overhead_per_packet_(0),
+ num_network_route_changes_(0) {}
+ virtual ~RtpHelper() = default;
+ const std::vector<RtpExtension>& recv_extensions() {
+ return recv_extensions_;
+ }
+ const std::vector<RtpExtension>& send_extensions() {
+ return send_extensions_;
+ }
+ bool sending() const { return sending_; }
+ bool playout() const { return playout_; }
+ const std::list<std::string>& rtp_packets() const { return rtp_packets_; }
+ const std::list<std::string>& rtcp_packets() const { return rtcp_packets_; }
+
+ bool SendRtp(const void* data,
+ size_t len,
+ const rtc::PacketOptions& options) {
+ if (!sending_) {
+ return false;
+ }
+ rtc::CopyOnWriteBuffer packet(reinterpret_cast<const uint8_t*>(data), len,
+ kMaxRtpPacketLen);
+ return Base::SendPacket(&packet, options);
+ }
+ bool SendRtcp(const void* data, size_t len) {
+ rtc::CopyOnWriteBuffer packet(reinterpret_cast<const uint8_t*>(data), len,
+ kMaxRtpPacketLen);
+ return Base::SendRtcp(&packet, rtc::PacketOptions());
+ }
+
+ bool CheckRtp(const void* data, size_t len) {
+ bool success = !rtp_packets_.empty();
+ if (success) {
+ std::string packet = rtp_packets_.front();
+ rtp_packets_.pop_front();
+ success = (packet == std::string(static_cast<const char*>(data), len));
+ }
+ return success;
+ }
+ bool CheckRtcp(const void* data, size_t len) {
+ bool success = !rtcp_packets_.empty();
+ if (success) {
+ std::string packet = rtcp_packets_.front();
+ rtcp_packets_.pop_front();
+ success = (packet == std::string(static_cast<const char*>(data), len));
+ }
+ return success;
+ }
+ bool CheckNoRtp() { return rtp_packets_.empty(); }
+ bool CheckNoRtcp() { return rtcp_packets_.empty(); }
+ void set_fail_set_send_codecs(bool fail) { fail_set_send_codecs_ = fail; }
+ void set_fail_set_recv_codecs(bool fail) { fail_set_recv_codecs_ = fail; }
+ virtual bool AddSendStream(const StreamParams& sp) {
+ if (std::find(send_streams_.begin(), send_streams_.end(), sp) !=
+ send_streams_.end()) {
+ return false;
+ }
+ send_streams_.push_back(sp);
+ rtp_send_parameters_[sp.first_ssrc()] =
+ CreateRtpParametersWithOneEncoding();
+ return true;
+ }
+ virtual bool RemoveSendStream(uint32_t ssrc) {
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ rtp_send_parameters_.erase(parameters_iterator);
+ }
+ return RemoveStreamBySsrc(&send_streams_, ssrc);
+ }
+ virtual bool AddRecvStream(const StreamParams& sp) {
+ if (std::find(receive_streams_.begin(), receive_streams_.end(), sp) !=
+ receive_streams_.end()) {
+ return false;
+ }
+ receive_streams_.push_back(sp);
+ rtp_receive_parameters_[sp.first_ssrc()] =
+ CreateRtpParametersWithOneEncoding();
+ return true;
+ }
+ virtual bool RemoveRecvStream(uint32_t ssrc) {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
+ rtp_receive_parameters_.erase(parameters_iterator);
+ }
+ return RemoveStreamBySsrc(&receive_streams_, ssrc);
+ }
+
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const {
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ return parameters_iterator->second;
+ }
+ return webrtc::RtpParameters();
+ }
+ virtual bool SetRtpSendParameters(uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ auto parameters_iterator = rtp_send_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_send_parameters_.end()) {
+ parameters_iterator->second = parameters;
+ return true;
+ }
+ // Replicate the behavior of the real media channel: return false
+ // when setting parameters for unknown SSRCs.
+ return false;
+ }
+
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(uint32_t ssrc) const {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
+ return parameters_iterator->second;
+ }
+ return webrtc::RtpParameters();
+ }
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) {
+ auto parameters_iterator = rtp_receive_parameters_.find(ssrc);
+ if (parameters_iterator != rtp_receive_parameters_.end()) {
+ parameters_iterator->second = parameters;
+ return true;
+ }
+ // Replicate the behavior of the real media channel: return false
+ // when setting parameters for unknown SSRCs.
+ return false;
+ }
+
+ bool IsStreamMuted(uint32_t ssrc) const {
+ bool ret = muted_streams_.find(ssrc) != muted_streams_.end();
+ // If |ssrc = 0| check if the first send stream is muted.
+ if (!ret && ssrc == 0 && !send_streams_.empty()) {
+ return muted_streams_.find(send_streams_[0].first_ssrc()) !=
+ muted_streams_.end();
+ }
+ return ret;
+ }
+ const std::vector<StreamParams>& send_streams() const {
+ return send_streams_;
+ }
+ const std::vector<StreamParams>& recv_streams() const {
+ return receive_streams_;
+ }
+ bool HasRecvStream(uint32_t ssrc) const {
+ return GetStreamBySsrc(receive_streams_, ssrc) != nullptr;
+ }
+ bool HasSendStream(uint32_t ssrc) const {
+ return GetStreamBySsrc(send_streams_, ssrc) != nullptr;
+ }
+ // TODO(perkj): This is to support legacy unit test that only check one
+ // sending stream.
+ uint32_t send_ssrc() const {
+ if (send_streams_.empty())
+ return 0;
+ return send_streams_[0].first_ssrc();
+ }
+
+ // TODO(perkj): This is to support legacy unit test that only check one
+ // sending stream.
+ const std::string rtcp_cname() {
+ if (send_streams_.empty())
+ return "";
+ return send_streams_[0].cname;
+ }
+ const RtcpParameters& send_rtcp_parameters() { return send_rtcp_parameters_; }
+ const RtcpParameters& recv_rtcp_parameters() { return recv_rtcp_parameters_; }
+
+ bool ready_to_send() const {
+ return ready_to_send_;
+ }
+
+ int transport_overhead_per_packet() const {
+ return transport_overhead_per_packet_;
+ }
+
+ rtc::NetworkRoute last_network_route() const { return last_network_route_; }
+ int num_network_route_changes() const { return num_network_route_changes_; }
+ void set_num_network_route_changes(int changes) {
+ num_network_route_changes_ = changes;
+ }
+
+ protected:
+ bool MuteStream(uint32_t ssrc, bool mute) {
+ if (!HasSendStream(ssrc) && ssrc != 0) {
+ return false;
+ }
+ if (mute) {
+ muted_streams_.insert(ssrc);
+ } else {
+ muted_streams_.erase(ssrc);
+ }
+ return true;
+ }
+ bool set_sending(bool send) {
+ sending_ = send;
+ return true;
+ }
+ void set_playout(bool playout) { playout_ = playout; }
+ bool SetRecvRtpHeaderExtensions(const std::vector<RtpExtension>& extensions) {
+ recv_extensions_ = extensions;
+ return true;
+ }
+ bool SetSendRtpHeaderExtensions(const std::vector<RtpExtension>& extensions) {
+ send_extensions_ = extensions;
+ return true;
+ }
+ void set_send_rtcp_parameters(const RtcpParameters& params) {
+ send_rtcp_parameters_ = params;
+ }
+ void set_recv_rtcp_parameters(const RtcpParameters& params) {
+ recv_rtcp_parameters_ = params;
+ }
+ virtual void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) {
+ rtp_packets_.push_back(std::string(packet->data<char>(), packet->size()));
+ }
+ virtual void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) {
+ rtcp_packets_.push_back(std::string(packet->data<char>(), packet->size()));
+ }
+ virtual void OnReadyToSend(bool ready) {
+ ready_to_send_ = ready;
+ }
+
+ virtual void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) {
+ last_network_route_ = network_route;
+ ++num_network_route_changes_;
+ transport_overhead_per_packet_ = network_route.packet_overhead;
+ }
+ bool fail_set_send_codecs() const { return fail_set_send_codecs_; }
+ bool fail_set_recv_codecs() const { return fail_set_recv_codecs_; }
+
+ private:
+ bool sending_;
+ bool playout_;
+ std::vector<RtpExtension> recv_extensions_;
+ std::vector<RtpExtension> send_extensions_;
+ std::list<std::string> rtp_packets_;
+ std::list<std::string> rtcp_packets_;
+ std::vector<StreamParams> send_streams_;
+ std::vector<StreamParams> receive_streams_;
+ RtcpParameters send_rtcp_parameters_;
+ RtcpParameters recv_rtcp_parameters_;
+ std::set<uint32_t> muted_streams_;
+ std::map<uint32_t, webrtc::RtpParameters> rtp_send_parameters_;
+ std::map<uint32_t, webrtc::RtpParameters> rtp_receive_parameters_;
+ bool fail_set_send_codecs_;
+ bool fail_set_recv_codecs_;
+ uint32_t send_ssrc_;
+ std::string rtcp_cname_;
+ bool ready_to_send_;
+ int transport_overhead_per_packet_;
+ rtc::NetworkRoute last_network_route_;
+ int num_network_route_changes_;
+};
+
+class FakeVoiceMediaChannel : public RtpHelper<VoiceMediaChannel> {
+ public:
+ struct DtmfInfo {
+ DtmfInfo(uint32_t ssrc, int event_code, int duration)
+ : ssrc(ssrc),
+ event_code(event_code),
+ duration(duration) {}
+ uint32_t ssrc;
+ int event_code;
+ int duration;
+ };
+ explicit FakeVoiceMediaChannel(FakeVoiceEngine* engine,
+ const AudioOptions& options)
+ : engine_(engine), max_bps_(-1) {
+ output_scalings_[0] = 1.0; // For default channel.
+ SetOptions(options);
+ }
+ ~FakeVoiceMediaChannel();
+ const std::vector<AudioCodec>& recv_codecs() const { return recv_codecs_; }
+ const std::vector<AudioCodec>& send_codecs() const { return send_codecs_; }
+ const std::vector<AudioCodec>& codecs() const { return send_codecs(); }
+ const std::vector<DtmfInfo>& dtmf_info_queue() const {
+ return dtmf_info_queue_;
+ }
+ const AudioOptions& options() const { return options_; }
+ int max_bps() const { return max_bps_; }
+ virtual bool SetSendParameters(const AudioSendParameters& params) {
+ set_send_rtcp_parameters(params.rtcp);
+ return (SetSendCodecs(params.codecs) &&
+ SetSendRtpHeaderExtensions(params.extensions) &&
+ SetMaxSendBandwidth(params.max_bandwidth_bps) &&
+ SetOptions(params.options));
+ }
+
+ virtual bool SetRecvParameters(const AudioRecvParameters& params) {
+ set_recv_rtcp_parameters(params.rtcp);
+ return (SetRecvCodecs(params.codecs) &&
+ SetRecvRtpHeaderExtensions(params.extensions));
+ }
+
+ virtual void SetPlayout(bool playout) { set_playout(playout); }
+ virtual void SetSend(bool send) { set_sending(send); }
+ virtual bool SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const AudioOptions* options,
+ AudioSource* source) {
+ if (!SetLocalSource(ssrc, source)) {
+ return false;
+ }
+ if (!RtpHelper<VoiceMediaChannel>::MuteStream(ssrc, !enable)) {
+ return false;
+ }
+ if (enable && options) {
+ return SetOptions(*options);
+ }
+ return true;
+ }
+
+ bool HasSource(uint32_t ssrc) const {
+ return local_sinks_.find(ssrc) != local_sinks_.end();
+ }
+
+ virtual bool AddRecvStream(const StreamParams& sp) {
+ if (!RtpHelper<VoiceMediaChannel>::AddRecvStream(sp))
+ return false;
+ output_scalings_[sp.first_ssrc()] = 1.0;
+ return true;
+ }
+ virtual bool RemoveRecvStream(uint32_t ssrc) {
+ if (!RtpHelper<VoiceMediaChannel>::RemoveRecvStream(ssrc))
+ return false;
+ output_scalings_.erase(ssrc);
+ return true;
+ }
+
+ virtual bool GetActiveStreams(StreamList* streams) { return true; }
+ virtual int GetOutputLevel() { return 0; }
+
+ virtual bool CanInsertDtmf() {
+ for (std::vector<AudioCodec>::const_iterator it = send_codecs_.begin();
+ it != send_codecs_.end(); ++it) {
+ // Find the DTMF telephone event "codec".
+ if (_stricmp(it->name.c_str(), "telephone-event") == 0) {
+ return true;
+ }
+ }
+ return false;
+ }
+ virtual bool InsertDtmf(uint32_t ssrc,
+ int event_code,
+ int duration) {
+ dtmf_info_queue_.push_back(DtmfInfo(ssrc, event_code, duration));
+ return true;
+ }
+
+ virtual bool SetOutputVolume(uint32_t ssrc, double volume) {
+ if (0 == ssrc) {
+ std::map<uint32_t, double>::iterator it;
+ for (it = output_scalings_.begin(); it != output_scalings_.end(); ++it) {
+ it->second = volume;
+ }
+ return true;
+ } else if (output_scalings_.find(ssrc) != output_scalings_.end()) {
+ output_scalings_[ssrc] = volume;
+ return true;
+ }
+ return false;
+ }
+ bool GetOutputVolume(uint32_t ssrc, double* volume) {
+ if (output_scalings_.find(ssrc) == output_scalings_.end())
+ return false;
+ *volume = output_scalings_[ssrc];
+ return true;
+ }
+
+ virtual bool GetStats(VoiceMediaInfo* info) { return false; }
+
+ virtual void SetRawAudioSink(
+ uint32_t ssrc,
+ std::unique_ptr<webrtc::AudioSinkInterface> sink) {
+ sink_ = std::move(sink);
+ }
+
+ virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const {
+ return std::vector<webrtc::RtpSource>();
+ }
+
+ private:
+ class VoiceChannelAudioSink : public AudioSource::Sink {
+ public:
+ explicit VoiceChannelAudioSink(AudioSource* source) : source_(source) {
+ source_->SetSink(this);
+ }
+ virtual ~VoiceChannelAudioSink() {
+ if (source_) {
+ source_->SetSink(nullptr);
+ }
+ }
+ void OnData(const void* audio_data,
+ int bits_per_sample,
+ int sample_rate,
+ size_t number_of_channels,
+ size_t number_of_frames) override {}
+ void OnClose() override { source_ = nullptr; }
+ AudioSource* source() const { return source_; }
+
+ private:
+ AudioSource* source_;
+ };
+
+ bool SetRecvCodecs(const std::vector<AudioCodec>& codecs) {
+ if (fail_set_recv_codecs()) {
+ // Fake the failure in SetRecvCodecs.
+ return false;
+ }
+ recv_codecs_ = codecs;
+ return true;
+ }
+ bool SetSendCodecs(const std::vector<AudioCodec>& codecs) {
+ if (fail_set_send_codecs()) {
+ // Fake the failure in SetSendCodecs.
+ return false;
+ }
+ send_codecs_ = codecs;
+ return true;
+ }
+ bool SetMaxSendBandwidth(int bps) {
+ max_bps_ = bps;
+ return true;
+ }
+ bool SetOptions(const AudioOptions& options) {
+ // Does a "merge" of current options and set options.
+ options_.SetAll(options);
+ return true;
+ }
+ bool SetLocalSource(uint32_t ssrc, AudioSource* source) {
+ auto it = local_sinks_.find(ssrc);
+ if (source) {
+ if (it != local_sinks_.end()) {
+ RTC_CHECK(it->second->source() == source);
+ } else {
+ local_sinks_.insert(std::make_pair(
+ ssrc, rtc::MakeUnique<VoiceChannelAudioSink>(source)));
+ }
+ } else {
+ if (it != local_sinks_.end()) {
+ local_sinks_.erase(it);
+ }
+ }
+ return true;
+ }
+
+ FakeVoiceEngine* engine_;
+ std::vector<AudioCodec> recv_codecs_;
+ std::vector<AudioCodec> send_codecs_;
+ std::map<uint32_t, double> output_scalings_;
+ std::vector<DtmfInfo> dtmf_info_queue_;
+ AudioOptions options_;
+ std::map<uint32_t, std::unique_ptr<VoiceChannelAudioSink>> local_sinks_;
+ std::unique_ptr<webrtc::AudioSinkInterface> sink_;
+ int max_bps_;
+};
+
+// A helper function to compare the FakeVoiceMediaChannel::DtmfInfo.
+inline bool CompareDtmfInfo(const FakeVoiceMediaChannel::DtmfInfo& info,
+ uint32_t ssrc,
+ int event_code,
+ int duration) {
+ return (info.duration == duration && info.event_code == event_code &&
+ info.ssrc == ssrc);
+}
+
+class FakeVideoMediaChannel : public RtpHelper<VideoMediaChannel> {
+ public:
+ FakeVideoMediaChannel(FakeVideoEngine* engine, const VideoOptions& options)
+ : engine_(engine), max_bps_(-1) {
+ SetOptions(options);
+ }
+
+ ~FakeVideoMediaChannel();
+
+ const std::vector<VideoCodec>& recv_codecs() const { return recv_codecs_; }
+ const std::vector<VideoCodec>& send_codecs() const { return send_codecs_; }
+ const std::vector<VideoCodec>& codecs() const { return send_codecs(); }
+ bool rendering() const { return playout(); }
+ const VideoOptions& options() const { return options_; }
+ const std::map<uint32_t, rtc::VideoSinkInterface<webrtc::VideoFrame>*>&
+ sinks() const {
+ return sinks_;
+ }
+ int max_bps() const { return max_bps_; }
+ bool SetSendParameters(const VideoSendParameters& params) override {
+ set_send_rtcp_parameters(params.rtcp);
+ return (SetSendCodecs(params.codecs) &&
+ SetSendRtpHeaderExtensions(params.extensions) &&
+ SetMaxSendBandwidth(params.max_bandwidth_bps));
+ }
+ bool SetRecvParameters(const VideoRecvParameters& params) override {
+ set_recv_rtcp_parameters(params.rtcp);
+ return (SetRecvCodecs(params.codecs) &&
+ SetRecvRtpHeaderExtensions(params.extensions));
+ }
+ bool AddSendStream(const StreamParams& sp) override {
+ return RtpHelper<VideoMediaChannel>::AddSendStream(sp);
+ }
+ bool RemoveSendStream(uint32_t ssrc) override {
+ return RtpHelper<VideoMediaChannel>::RemoveSendStream(ssrc);
+ }
+
+ bool GetSendCodec(VideoCodec* send_codec) override {
+ if (send_codecs_.empty()) {
+ return false;
+ }
+ *send_codec = send_codecs_[0];
+ return true;
+ }
+ bool SetSink(uint32_t ssrc,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override {
+ if (ssrc != 0 && sinks_.find(ssrc) == sinks_.end()) {
+ return false;
+ }
+ if (ssrc != 0) {
+ sinks_[ssrc] = sink;
+ }
+ return true;
+ }
+ bool HasSink(uint32_t ssrc) const {
+ return sinks_.find(ssrc) != sinks_.end() && sinks_.at(ssrc) != nullptr;
+ }
+
+ bool SetSend(bool send) override { return set_sending(send); }
+ bool SetVideoSend(
+ uint32_t ssrc,
+ bool enable,
+ const VideoOptions* options,
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source) override {
+ if (!RtpHelper<VideoMediaChannel>::MuteStream(ssrc, !enable)) {
+ return false;
+ }
+ if (enable && options) {
+ if (!SetOptions(*options)) {
+ return false;
+ }
+ }
+ sources_[ssrc] = source;
+ return true;
+ }
+
+ bool HasSource(uint32_t ssrc) const {
+ return sources_.find(ssrc) != sources_.end() &&
+ sources_.at(ssrc) != nullptr;
+ }
+ bool AddRecvStream(const StreamParams& sp) override {
+ if (!RtpHelper<VideoMediaChannel>::AddRecvStream(sp))
+ return false;
+ sinks_[sp.first_ssrc()] = NULL;
+ return true;
+ }
+ bool RemoveRecvStream(uint32_t ssrc) override {
+ if (!RtpHelper<VideoMediaChannel>::RemoveRecvStream(ssrc))
+ return false;
+ sinks_.erase(ssrc);
+ return true;
+ }
+
+ void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) override {}
+ bool GetStats(VideoMediaInfo* info) override { return false; }
+
+ private:
+ bool SetRecvCodecs(const std::vector<VideoCodec>& codecs) {
+ if (fail_set_recv_codecs()) {
+ // Fake the failure in SetRecvCodecs.
+ return false;
+ }
+ recv_codecs_ = codecs;
+ return true;
+ }
+ bool SetSendCodecs(const std::vector<VideoCodec>& codecs) {
+ if (fail_set_send_codecs()) {
+ // Fake the failure in SetSendCodecs.
+ return false;
+ }
+ send_codecs_ = codecs;
+
+ return true;
+ }
+ bool SetOptions(const VideoOptions& options) {
+ options_ = options;
+ return true;
+ }
+ bool SetMaxSendBandwidth(int bps) {
+ max_bps_ = bps;
+ return true;
+ }
+
+ FakeVideoEngine* engine_;
+ std::vector<VideoCodec> recv_codecs_;
+ std::vector<VideoCodec> send_codecs_;
+ std::map<uint32_t, rtc::VideoSinkInterface<webrtc::VideoFrame>*> sinks_;
+ std::map<uint32_t, rtc::VideoSourceInterface<webrtc::VideoFrame>*> sources_;
+ VideoOptions options_;
+ int max_bps_;
+};
+
+// Dummy option class, needed for the DataTraits abstraction in
+// channel_unittest.c.
+class DataOptions {};
+
+class FakeDataMediaChannel : public RtpHelper<DataMediaChannel> {
+ public:
+ explicit FakeDataMediaChannel(void* unused, const DataOptions& options)
+ : send_blocked_(false), max_bps_(-1) {}
+ ~FakeDataMediaChannel() {}
+ const std::vector<DataCodec>& recv_codecs() const { return recv_codecs_; }
+ const std::vector<DataCodec>& send_codecs() const { return send_codecs_; }
+ const std::vector<DataCodec>& codecs() const { return send_codecs(); }
+ int max_bps() const { return max_bps_; }
+
+ virtual bool SetSendParameters(const DataSendParameters& params) {
+ set_send_rtcp_parameters(params.rtcp);
+ return (SetSendCodecs(params.codecs) &&
+ SetMaxSendBandwidth(params.max_bandwidth_bps));
+ }
+ virtual bool SetRecvParameters(const DataRecvParameters& params) {
+ set_recv_rtcp_parameters(params.rtcp);
+ return SetRecvCodecs(params.codecs);
+ }
+ virtual bool SetSend(bool send) { return set_sending(send); }
+ virtual bool SetReceive(bool receive) {
+ set_playout(receive);
+ return true;
+ }
+ virtual bool AddRecvStream(const StreamParams& sp) {
+ if (!RtpHelper<DataMediaChannel>::AddRecvStream(sp))
+ return false;
+ return true;
+ }
+ virtual bool RemoveRecvStream(uint32_t ssrc) {
+ if (!RtpHelper<DataMediaChannel>::RemoveRecvStream(ssrc))
+ return false;
+ return true;
+ }
+
+ virtual bool SendData(const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ SendDataResult* result) {
+ if (send_blocked_) {
+ *result = SDR_BLOCK;
+ return false;
+ } else {
+ last_sent_data_params_ = params;
+ last_sent_data_ = std::string(payload.data<char>(), payload.size());
+ return true;
+ }
+ }
+
+ SendDataParams last_sent_data_params() { return last_sent_data_params_; }
+ std::string last_sent_data() { return last_sent_data_; }
+ bool is_send_blocked() { return send_blocked_; }
+ void set_send_blocked(bool blocked) { send_blocked_ = blocked; }
+
+ private:
+ bool SetRecvCodecs(const std::vector<DataCodec>& codecs) {
+ if (fail_set_recv_codecs()) {
+ // Fake the failure in SetRecvCodecs.
+ return false;
+ }
+ recv_codecs_ = codecs;
+ return true;
+ }
+ bool SetSendCodecs(const std::vector<DataCodec>& codecs) {
+ if (fail_set_send_codecs()) {
+ // Fake the failure in SetSendCodecs.
+ return false;
+ }
+ send_codecs_ = codecs;
+ return true;
+ }
+ bool SetMaxSendBandwidth(int bps) {
+ max_bps_ = bps;
+ return true;
+ }
+
+ std::vector<DataCodec> recv_codecs_;
+ std::vector<DataCodec> send_codecs_;
+ SendDataParams last_sent_data_params_;
+ std::string last_sent_data_;
+ bool send_blocked_;
+ int max_bps_;
+};
+
+// A base class for all of the shared parts between FakeVoiceEngine
+// and FakeVideoEngine.
+class FakeBaseEngine {
+ public:
+ FakeBaseEngine()
+ : options_changed_(false),
+ fail_create_channel_(false) {}
+ void set_fail_create_channel(bool fail) { fail_create_channel_ = fail; }
+
+ RtpCapabilities GetCapabilities() const { return capabilities_; }
+ void set_rtp_header_extensions(const std::vector<RtpExtension>& extensions) {
+ capabilities_.header_extensions = extensions;
+ }
+
+ void set_rtp_header_extensions(
+ const std::vector<cricket::RtpHeaderExtension>& extensions) {
+ for (const cricket::RtpHeaderExtension& ext : extensions) {
+ RtpExtension webrtc_ext;
+ webrtc_ext.uri = ext.uri;
+ webrtc_ext.id = ext.id;
+ capabilities_.header_extensions.push_back(webrtc_ext);
+ }
+ }
+
+ protected:
+ // Flag used by optionsmessagehandler_unittest for checking whether any
+ // relevant setting has been updated.
+ // TODO(thaloun): Replace with explicit checks of before & after values.
+ bool options_changed_;
+ bool fail_create_channel_;
+ RtpCapabilities capabilities_;
+};
+
+class FakeVoiceEngine : public FakeBaseEngine {
+ public:
+ FakeVoiceEngine() {
+ // Add a fake audio codec. Note that the name must not be "" as there are
+ // sanity checks against that.
+ codecs_.push_back(AudioCodec(101, "fake_audio_codec", 0, 0, 1));
+ }
+ void Init() {}
+ rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const {
+ return rtc::scoped_refptr<webrtc::AudioState>();
+ }
+
+ VoiceMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const AudioOptions& options) {
+ if (fail_create_channel_) {
+ return nullptr;
+ }
+
+ FakeVoiceMediaChannel* ch = new FakeVoiceMediaChannel(this, options);
+ channels_.push_back(ch);
+ return ch;
+ }
+ FakeVoiceMediaChannel* GetChannel(size_t index) {
+ return (channels_.size() > index) ? channels_[index] : NULL;
+ }
+ void UnregisterChannel(VoiceMediaChannel* channel) {
+ channels_.erase(std::find(channels_.begin(), channels_.end(), channel));
+ }
+
+ // TODO(ossu): For proper testing, These should either individually settable
+ // or the voice engine should reference mockable factories.
+ const std::vector<AudioCodec>& send_codecs() { return codecs_; }
+ const std::vector<AudioCodec>& recv_codecs() { return codecs_; }
+ void SetCodecs(const std::vector<AudioCodec>& codecs) { codecs_ = codecs; }
+
+ int GetInputLevel() { return 0; }
+
+ bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) {
+ return false;
+ }
+
+ void StopAecDump() {}
+
+ bool StartRtcEventLog(rtc::PlatformFile file, int64_t max_size_bytes) {
+ return false;
+ }
+
+ void StopRtcEventLog() {}
+
+ private:
+ std::vector<FakeVoiceMediaChannel*> channels_;
+ std::vector<AudioCodec> codecs_;
+
+ friend class FakeMediaEngine;
+};
+
+class FakeVideoEngine : public FakeBaseEngine {
+ public:
+ FakeVideoEngine() : capture_(false) {
+ // Add a fake video codec. Note that the name must not be "" as there are
+ // sanity checks against that.
+ codecs_.push_back(VideoCodec(0, "fake_video_codec"));
+ }
+
+ bool SetOptions(const VideoOptions& options) {
+ options_ = options;
+ options_changed_ = true;
+ return true;
+ }
+
+ VideoMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options) {
+ if (fail_create_channel_) {
+ return nullptr;
+ }
+
+ FakeVideoMediaChannel* ch = new FakeVideoMediaChannel(this, options);
+ channels_.emplace_back(ch);
+ return ch;
+ }
+
+ FakeVideoMediaChannel* GetChannel(size_t index) {
+ return (channels_.size() > index) ? channels_[index] : nullptr;
+ }
+
+ void UnregisterChannel(VideoMediaChannel* channel) {
+ auto it = std::find(channels_.begin(), channels_.end(), channel);
+ RTC_DCHECK(it != channels_.end());
+ channels_.erase(it);
+ }
+
+ const std::vector<VideoCodec>& codecs() const { return codecs_; }
+
+ void SetCodecs(const std::vector<VideoCodec> codecs) { codecs_ = codecs; }
+
+ bool SetCapture(bool capture) {
+ capture_ = capture;
+ return true;
+ }
+
+ private:
+ std::vector<FakeVideoMediaChannel*> channels_;
+ std::vector<VideoCodec> codecs_;
+ bool capture_;
+ VideoOptions options_;
+
+ friend class FakeMediaEngine;
+};
+
+class FakeMediaEngine :
+ public CompositeMediaEngine<FakeVoiceEngine, FakeVideoEngine> {
+ public:
+ FakeMediaEngine()
+ : CompositeMediaEngine<FakeVoiceEngine, FakeVideoEngine>(std::tuple<>(),
+ std::tuple<>()) {
+ }
+
+ virtual ~FakeMediaEngine() {}
+
+ void SetAudioCodecs(const std::vector<AudioCodec>& codecs) {
+ voice().SetCodecs(codecs);
+ }
+ void SetVideoCodecs(const std::vector<VideoCodec>& codecs) {
+ video().SetCodecs(codecs);
+ }
+
+ void SetAudioRtpHeaderExtensions(
+ const std::vector<RtpExtension>& extensions) {
+ voice().set_rtp_header_extensions(extensions);
+ }
+ void SetVideoRtpHeaderExtensions(
+ const std::vector<RtpExtension>& extensions) {
+ video().set_rtp_header_extensions(extensions);
+ }
+
+ void SetAudioRtpHeaderExtensions(
+ const std::vector<cricket::RtpHeaderExtension>& extensions) {
+ voice().set_rtp_header_extensions(extensions);
+ }
+ void SetVideoRtpHeaderExtensions(
+ const std::vector<cricket::RtpHeaderExtension>& extensions) {
+ video().set_rtp_header_extensions(extensions);
+ }
+
+ FakeVoiceMediaChannel* GetVoiceChannel(size_t index) {
+ return voice().GetChannel(index);
+ }
+ FakeVideoMediaChannel* GetVideoChannel(size_t index) {
+ return video().GetChannel(index);
+ }
+
+ bool capture() const { return video().capture_; }
+ bool options_changed() const { return video().options_changed_; }
+ void clear_options_changed() { video().options_changed_ = false; }
+ void set_fail_create_channel(bool fail) {
+ voice().set_fail_create_channel(fail);
+ video().set_fail_create_channel(fail);
+ }
+};
+
+// Have to come afterwards due to declaration order
+inline FakeVoiceMediaChannel::~FakeVoiceMediaChannel() {
+ if (engine_) {
+ engine_->UnregisterChannel(this);
+ }
+}
+
+inline FakeVideoMediaChannel::~FakeVideoMediaChannel() {
+ if (engine_) {
+ engine_->UnregisterChannel(this);
+ }
+}
+
+class FakeDataEngine : public DataEngineInterface {
+ public:
+ virtual DataMediaChannel* CreateChannel(const MediaConfig& config) {
+ FakeDataMediaChannel* ch = new FakeDataMediaChannel(this, DataOptions());
+ channels_.push_back(ch);
+ return ch;
+ }
+
+ FakeDataMediaChannel* GetChannel(size_t index) {
+ return (channels_.size() > index) ? channels_[index] : NULL;
+ }
+
+ void UnregisterChannel(DataMediaChannel* channel) {
+ channels_.erase(std::find(channels_.begin(), channels_.end(), channel));
+ }
+
+ virtual void SetDataCodecs(const std::vector<DataCodec>& data_codecs) {
+ data_codecs_ = data_codecs;
+ }
+
+ virtual const std::vector<DataCodec>& data_codecs() { return data_codecs_; }
+
+ private:
+ std::vector<FakeDataMediaChannel*> channels_;
+ std::vector<DataCodec> data_codecs_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_FAKEMEDIAENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakenetworkinterface.h b/third_party/libwebrtc/webrtc/media/base/fakenetworkinterface.h
new file mode 100644
index 0000000000..3d98d1f917
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakenetworkinterface.h
@@ -0,0 +1,228 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_FAKENETWORKINTERFACE_H_
+#define MEDIA_BASE_FAKENETWORKINTERFACE_H_
+
+#include <map>
+#include <set>
+#include <vector>
+
+#include "media/base/mediachannel.h"
+#include "media/base/rtputils.h"
+#include "rtc_base/byteorder.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/dscp.h"
+#include "rtc_base/messagehandler.h"
+#include "rtc_base/messagequeue.h"
+#include "rtc_base/thread.h"
+
+namespace cricket {
+
+// Fake NetworkInterface that sends/receives RTP/RTCP packets.
+class FakeNetworkInterface : public MediaChannel::NetworkInterface,
+ public rtc::MessageHandler {
+ public:
+ FakeNetworkInterface()
+ : thread_(rtc::Thread::Current()),
+ dest_(NULL),
+ conf_(false),
+ sendbuf_size_(-1),
+ recvbuf_size_(-1),
+ dscp_(rtc::DSCP_NO_CHANGE) {
+ }
+
+ void SetDestination(MediaChannel* dest) { dest_ = dest; }
+
+ // Conference mode is a mode where instead of simply forwarding the packets,
+ // the transport will send multiple copies of the packet with the specified
+ // SSRCs. This allows us to simulate receiving media from multiple sources.
+ void SetConferenceMode(bool conf, const std::vector<uint32_t>& ssrcs) {
+ rtc::CritScope cs(&crit_);
+ conf_ = conf;
+ conf_sent_ssrcs_ = ssrcs;
+ }
+
+ int NumRtpBytes() {
+ rtc::CritScope cs(&crit_);
+ int bytes = 0;
+ for (size_t i = 0; i < rtp_packets_.size(); ++i) {
+ bytes += static_cast<int>(rtp_packets_[i].size());
+ }
+ return bytes;
+ }
+
+ int NumRtpBytes(uint32_t ssrc) {
+ rtc::CritScope cs(&crit_);
+ int bytes = 0;
+ GetNumRtpBytesAndPackets(ssrc, &bytes, NULL);
+ return bytes;
+ }
+
+ int NumRtpPackets() {
+ rtc::CritScope cs(&crit_);
+ return static_cast<int>(rtp_packets_.size());
+ }
+
+ int NumRtpPackets(uint32_t ssrc) {
+ rtc::CritScope cs(&crit_);
+ int packets = 0;
+ GetNumRtpBytesAndPackets(ssrc, NULL, &packets);
+ return packets;
+ }
+
+ int NumSentSsrcs() {
+ rtc::CritScope cs(&crit_);
+ return static_cast<int>(sent_ssrcs_.size());
+ }
+
+ // Note: callers are responsible for deleting the returned buffer.
+ const rtc::CopyOnWriteBuffer* GetRtpPacket(int index) {
+ rtc::CritScope cs(&crit_);
+ if (index >= NumRtpPackets()) {
+ return NULL;
+ }
+ return new rtc::CopyOnWriteBuffer(rtp_packets_[index]);
+ }
+
+ int NumRtcpPackets() {
+ rtc::CritScope cs(&crit_);
+ return static_cast<int>(rtcp_packets_.size());
+ }
+
+ // Note: callers are responsible for deleting the returned buffer.
+ const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index) {
+ rtc::CritScope cs(&crit_);
+ if (index >= NumRtcpPackets()) {
+ return NULL;
+ }
+ return new rtc::CopyOnWriteBuffer(rtcp_packets_[index]);
+ }
+
+ int sendbuf_size() const { return sendbuf_size_; }
+ int recvbuf_size() const { return recvbuf_size_; }
+ rtc::DiffServCodePoint dscp() const { return dscp_; }
+
+ protected:
+ virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ rtc::CritScope cs(&crit_);
+
+ uint32_t cur_ssrc = 0;
+ if (!GetRtpSsrc(packet->data(), packet->size(), &cur_ssrc)) {
+ return false;
+ }
+ sent_ssrcs_[cur_ssrc]++;
+
+ rtp_packets_.push_back(*packet);
+ if (conf_) {
+ for (size_t i = 0; i < conf_sent_ssrcs_.size(); ++i) {
+ if (!SetRtpSsrc(packet->data(), packet->size(),
+ conf_sent_ssrcs_[i])) {
+ return false;
+ }
+ PostMessage(ST_RTP, *packet);
+ }
+ } else {
+ PostMessage(ST_RTP, *packet);
+ }
+ return true;
+ }
+
+ virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ rtc::CritScope cs(&crit_);
+ rtcp_packets_.push_back(*packet);
+ if (!conf_) {
+ // don't worry about RTCP in conf mode for now
+ PostMessage(ST_RTCP, *packet);
+ }
+ return true;
+ }
+
+ virtual int SetOption(SocketType type, rtc::Socket::Option opt,
+ int option) {
+ if (opt == rtc::Socket::OPT_SNDBUF) {
+ sendbuf_size_ = option;
+ } else if (opt == rtc::Socket::OPT_RCVBUF) {
+ recvbuf_size_ = option;
+ } else if (opt == rtc::Socket::OPT_DSCP) {
+ dscp_ = static_cast<rtc::DiffServCodePoint>(option);
+ }
+ return 0;
+ }
+
+ void PostMessage(int id, const rtc::CopyOnWriteBuffer& packet) {
+ thread_->Post(RTC_FROM_HERE, this, id, rtc::WrapMessageData(packet));
+ }
+
+ virtual void OnMessage(rtc::Message* msg) {
+ rtc::TypedMessageData<rtc::CopyOnWriteBuffer>* msg_data =
+ static_cast<rtc::TypedMessageData<rtc::CopyOnWriteBuffer>*>(
+ msg->pdata);
+ if (dest_) {
+ if (msg->message_id == ST_RTP) {
+ dest_->OnPacketReceived(&msg_data->data(),
+ rtc::CreatePacketTime(0));
+ } else {
+ dest_->OnRtcpReceived(&msg_data->data(),
+ rtc::CreatePacketTime(0));
+ }
+ }
+ delete msg_data;
+ }
+
+ private:
+ void GetNumRtpBytesAndPackets(uint32_t ssrc, int* bytes, int* packets) {
+ if (bytes) {
+ *bytes = 0;
+ }
+ if (packets) {
+ *packets = 0;
+ }
+ uint32_t cur_ssrc = 0;
+ for (size_t i = 0; i < rtp_packets_.size(); ++i) {
+ if (!GetRtpSsrc(rtp_packets_[i].data(), rtp_packets_[i].size(),
+ &cur_ssrc)) {
+ return;
+ }
+ if (ssrc == cur_ssrc) {
+ if (bytes) {
+ *bytes += static_cast<int>(rtp_packets_[i].size());
+ }
+ if (packets) {
+ ++(*packets);
+ }
+ }
+ }
+ }
+
+ rtc::Thread* thread_;
+ MediaChannel* dest_;
+ bool conf_;
+ // The ssrcs used in sending out packets in conference mode.
+ std::vector<uint32_t> conf_sent_ssrcs_;
+ // Map to track counts of packets that have been sent per ssrc.
+ // This includes packets that are dropped.
+ std::map<uint32_t, uint32_t> sent_ssrcs_;
+ // Map to track packet-number that needs to be dropped per ssrc.
+ std::map<uint32_t, std::set<uint32_t> > drop_map_;
+ rtc::CriticalSection crit_;
+ std::vector<rtc::CopyOnWriteBuffer> rtp_packets_;
+ std::vector<rtc::CopyOnWriteBuffer> rtcp_packets_;
+ int sendbuf_size_;
+ int recvbuf_size_;
+ rtc::DiffServCodePoint dscp_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_FAKENETWORKINTERFACE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakertp.cc b/third_party/libwebrtc/webrtc/media/base/fakertp.cc
new file mode 100644
index 0000000000..be1631b73f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakertp.cc
@@ -0,0 +1,66 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+
+#include "media/base/fakertp.h"
+#include "rtc_base/gunit.h"
+
+void CompareHeaderExtensions(const char* packet1, size_t packet1_size,
+ const char* packet2, size_t packet2_size,
+ const std::vector<int> encrypted_headers, bool expect_equal) {
+ // Sanity check: packets must be large enough to contain the RTP header and
+ // extensions header.
+ RTC_CHECK_GE(packet1_size, 12 + 4);
+ RTC_CHECK_GE(packet2_size, 12 + 4);
+ // RTP extension headers are the same.
+ EXPECT_EQ(0, memcmp(packet1 + 12, packet2 + 12, 4));
+ // Check for one-byte header extensions.
+ EXPECT_EQ('\xBE', packet1[12]);
+ EXPECT_EQ('\xDE', packet1[13]);
+ // Determine position and size of extension headers.
+ size_t extension_words = packet1[14] << 8 | packet1[15];
+ const char* extension_data1 = packet1 + 12 + 4;
+ const char* extension_end1 = extension_data1 + extension_words * 4;
+ const char* extension_data2 = packet2 + 12 + 4;
+ // Sanity check: packets must be large enough to contain the RTP header
+ // extensions.
+ RTC_CHECK_GE(packet1_size, 12 + 4 + extension_words * 4);
+ RTC_CHECK_GE(packet2_size, 12 + 4 + extension_words * 4);
+ while (extension_data1 < extension_end1) {
+ uint8_t id = (*extension_data1 & 0xf0) >> 4;
+ uint8_t len = (*extension_data1 & 0x0f) +1;
+ extension_data1++;
+ extension_data2++;
+ EXPECT_LE(extension_data1, extension_end1);
+ if (id == 15) {
+ // Finished parsing.
+ break;
+ }
+
+ // The header extension doesn't get encrypted if the id is not in the
+ // list of header extensions to encrypt.
+ if (expect_equal ||
+ std::find(encrypted_headers.begin(), encrypted_headers.end(), id)
+ == encrypted_headers.end()) {
+ EXPECT_EQ(0, memcmp(extension_data1, extension_data2, len));
+ } else {
+ EXPECT_NE(0, memcmp(extension_data1, extension_data2, len));
+ }
+
+ extension_data1 += len;
+ extension_data2 += len;
+ // Skip padding.
+ while (extension_data1 < extension_end1 && *extension_data1 == 0) {
+ extension_data1++;
+ extension_data2++;
+ }
+ }
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/fakertp.h b/third_party/libwebrtc/webrtc/media/base/fakertp.h
new file mode 100644
index 0000000000..6786bab6d9
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakertp.h
@@ -0,0 +1,140 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Fake RTP and RTCP packets to use in unit tests.
+
+#ifndef MEDIA_BASE_FAKERTP_H_
+#define MEDIA_BASE_FAKERTP_H_
+
+#include <vector>
+
+// A typical PCMU RTP packet.
+// PT=0, SN=1, TS=0, SSRC=1
+// all data FF
+static const unsigned char kPcmuFrame[] = {
+ 0x80, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+};
+
+static const int kHeaderExtensionIDs[] = {1, 4};
+
+// A typical PCMU RTP packet with header extensions.
+// PT=0, SN=1, TS=0, SSRC=1
+// all data FF
+static const unsigned char kPcmuFrameWithExtensions[] = {
+ 0x90, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ // RFC 5285, section 4.2. One-Byte Header.
+ 0xBE, 0xDE,
+ // Header extension length 6 * 32 bits.
+ 0x00, 0x06,
+ // 8 bytes header id 1.
+ 0x17, 0x41, 0x42, 0x73, 0xA4, 0x75, 0x26, 0x27, 0x48,
+ // 3 bytes header id 2.
+ 0x22, 0x00, 0x00, 0xC8,
+ // 1 byte header id 3.
+ 0x30, 0x8E,
+ // 7 bytes header id 4.
+ 0x46, 0x55, 0x99, 0x63, 0x86, 0xB3, 0x95, 0xFB,
+ // 1 byte header padding.
+ 0x00,
+ // Payload data.
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+};
+
+// A typical Receiver Report RTCP packet.
+// PT=RR, LN=1, SSRC=1
+// send SSRC=2, all other fields 0
+static const unsigned char kRtcpReport[] = {
+ 0x80, 0xc9, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01,
+ 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
+};
+
+// PT = 97, TS = 0, Seq = 1, SSRC = 2
+// H264 - NRI = 1, Type = 1, bit stream = FF
+
+static const unsigned char kH264Packet[] = {
+ 0x80, 0x61, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02,
+ 0x21, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+ 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF,
+};
+
+// PT= 101, SN=2, TS=3, SSRC = 4
+static const unsigned char kDataPacket[] = {
+ 0x80, 0x65, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x04,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08,
+};
+
+// This expects both packets to be based on kPcmuFrameWithExtensions.
+// Header extensions with an id in "encrypted_headers" are expected to be
+// different in the packets unless "expect_equal" is set to "true".
+void CompareHeaderExtensions(const char* packet1, size_t packet1_size,
+ const char* packet2, size_t packet2_size,
+ const std::vector<int> encrypted_headers, bool expect_equal);
+
+#endif // MEDIA_BASE_FAKERTP_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakevideocapturer.h b/third_party/libwebrtc/webrtc/media/base/fakevideocapturer.h
new file mode 100644
index 0000000000..536fe16356
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakevideocapturer.h
@@ -0,0 +1,152 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_FAKEVIDEOCAPTURER_H_
+#define MEDIA_BASE_FAKEVIDEOCAPTURER_H_
+
+#include <string.h>
+
+#include <memory>
+#include <vector>
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "media/base/videocapturer.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/timeutils.h"
+
+namespace cricket {
+
+// Fake video capturer that allows the test to manually pump in frames.
+class FakeVideoCapturer : public cricket::VideoCapturer {
+ public:
+ explicit FakeVideoCapturer(bool is_screencast)
+ : running_(false),
+ initial_timestamp_(rtc::TimeNanos()),
+ next_timestamp_(rtc::kNumNanosecsPerMillisec),
+ is_screencast_(is_screencast),
+ rotation_(webrtc::kVideoRotation_0) {
+ // Default supported formats. Use ResetSupportedFormats to over write.
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(160, 120,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(60), cricket::FOURCC_I420));
+ ResetSupportedFormats(formats);
+ }
+ FakeVideoCapturer() : FakeVideoCapturer(false) {}
+
+ ~FakeVideoCapturer() {
+ SignalDestroyed(this);
+ }
+
+ void ResetSupportedFormats(const std::vector<cricket::VideoFormat>& formats) {
+ SetSupportedFormats(formats);
+ }
+ bool CaptureFrame() {
+ if (!GetCaptureFormat()) {
+ return false;
+ }
+ return CaptureCustomFrame(GetCaptureFormat()->width,
+ GetCaptureFormat()->height,
+ GetCaptureFormat()->interval,
+ GetCaptureFormat()->fourcc);
+ }
+ bool CaptureCustomFrame(int width, int height, uint32_t fourcc) {
+ // Default to 30fps.
+ return CaptureCustomFrame(width, height, rtc::kNumNanosecsPerSec / 30,
+ fourcc);
+ }
+ bool CaptureCustomFrame(int width,
+ int height,
+ int64_t timestamp_interval,
+ uint32_t fourcc) {
+ if (!running_) {
+ return false;
+ }
+ RTC_CHECK(fourcc == FOURCC_I420);
+ RTC_CHECK(width > 0);
+ RTC_CHECK(height > 0);
+
+ int adapted_width;
+ int adapted_height;
+ int crop_width;
+ int crop_height;
+ int crop_x;
+ int crop_y;
+
+ // TODO(nisse): It's a bit silly to have this logic in a fake
+ // class. Child classes of VideoCapturer are expected to call
+ // AdaptFrame, and the test case
+ // VideoCapturerTest.SinkWantsMaxPixelAndMaxPixelCountStepUp
+ // depends on this.
+ if (AdaptFrame(width, height,
+ next_timestamp_ / rtc::kNumNanosecsPerMicrosec,
+ next_timestamp_ / rtc::kNumNanosecsPerMicrosec,
+ &adapted_width, &adapted_height, &crop_width, &crop_height,
+ &crop_x, &crop_y, nullptr)) {
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer(
+ webrtc::I420Buffer::Create(adapted_width, adapted_height));
+ buffer->InitializeData();
+
+ OnFrame(webrtc::VideoFrame(
+ buffer, rotation_,
+ next_timestamp_ / rtc::kNumNanosecsPerMicrosec),
+ width, height);
+ }
+ next_timestamp_ += timestamp_interval;
+
+ return true;
+ }
+
+ sigslot::signal1<FakeVideoCapturer*> SignalDestroyed;
+
+ cricket::CaptureState Start(const cricket::VideoFormat& format) override {
+ SetCaptureFormat(&format);
+ running_ = true;
+ SetCaptureState(cricket::CS_RUNNING);
+ return cricket::CS_RUNNING;
+ }
+ void Stop() override {
+ running_ = false;
+ SetCaptureFormat(NULL);
+ SetCaptureState(cricket::CS_STOPPED);
+ }
+ bool IsRunning() override { return running_; }
+ bool IsScreencast() const override { return is_screencast_; }
+ bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) override {
+ fourccs->push_back(cricket::FOURCC_I420);
+ fourccs->push_back(cricket::FOURCC_MJPG);
+ return true;
+ }
+
+ void SetRotation(webrtc::VideoRotation rotation) {
+ rotation_ = rotation;
+ }
+
+ webrtc::VideoRotation GetRotation() { return rotation_; }
+
+ private:
+ bool running_;
+ int64_t initial_timestamp_;
+ int64_t next_timestamp_;
+ const bool is_screencast_;
+ webrtc::VideoRotation rotation_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_FAKEVIDEOCAPTURER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/fakevideorenderer.h b/third_party/libwebrtc/webrtc/media/base/fakevideorenderer.h
new file mode 100644
index 0000000000..630063057b
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/fakevideorenderer.h
@@ -0,0 +1,142 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_FAKEVIDEORENDERER_H_
+#define MEDIA_BASE_FAKEVIDEORENDERER_H_
+
+#include "api/video/video_frame.h"
+#include "media/base/videosinkinterface.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/logging.h"
+
+namespace cricket {
+
+// Faked video renderer that has a callback for actions on rendering.
+class FakeVideoRenderer : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ FakeVideoRenderer()
+ : errors_(0),
+ width_(0),
+ height_(0),
+ rotation_(webrtc::kVideoRotation_0),
+ timestamp_us_(0),
+ num_rendered_frames_(0),
+ black_frame_(false) {}
+
+ virtual void OnFrame(const webrtc::VideoFrame& frame) {
+ rtc::CritScope cs(&crit_);
+ // TODO(zhurunz) Check with VP8 team to see if we can remove this
+ // tolerance on Y values. Some unit tests produce Y values close
+ // to 16 rather than close to zero, for supposedly black frames.
+ // Largest value observed is 34, e.g., running
+ // PeerConnectionIntegrationTest.SendAndReceive16To9AspectRatio.
+ black_frame_ = CheckFrameColorYuv(0, 48, 128, 128, 128, 128, &frame);
+ // Treat unexpected frame size as error.
+ ++num_rendered_frames_;
+ width_ = frame.width();
+ height_ = frame.height();
+ rotation_ = frame.rotation();
+ timestamp_us_ = frame.timestamp_us();
+ }
+
+ int errors() const { return errors_; }
+ int width() const {
+ rtc::CritScope cs(&crit_);
+ return width_;
+ }
+ int height() const {
+ rtc::CritScope cs(&crit_);
+ return height_;
+ }
+ webrtc::VideoRotation rotation() const {
+ rtc::CritScope cs(&crit_);
+ return rotation_;
+ }
+
+ int64_t timestamp_us() const {
+ rtc::CritScope cs(&crit_);
+ return timestamp_us_;
+ }
+ int num_rendered_frames() const {
+ rtc::CritScope cs(&crit_);
+ return num_rendered_frames_;
+ }
+ bool black_frame() const {
+ rtc::CritScope cs(&crit_);
+ return black_frame_;
+ }
+
+ private:
+ static bool CheckFrameColorYuv(uint8_t y_min,
+ uint8_t y_max,
+ uint8_t u_min,
+ uint8_t u_max,
+ uint8_t v_min,
+ uint8_t v_max,
+ const webrtc::VideoFrame* frame) {
+ if (!frame || !frame->video_frame_buffer()) {
+ return false;
+ }
+ rtc::scoped_refptr<const webrtc::I420BufferInterface> i420_buffer =
+ frame->video_frame_buffer()->ToI420();
+ // Y
+ int y_width = frame->width();
+ int y_height = frame->height();
+ const uint8_t* y_plane = i420_buffer->DataY();
+ const uint8_t* y_pos = y_plane;
+ int32_t y_pitch = i420_buffer->StrideY();
+ for (int i = 0; i < y_height; ++i) {
+ for (int j = 0; j < y_width; ++j) {
+ uint8_t y_value = *(y_pos + j);
+ if (y_value < y_min || y_value > y_max) {
+ return false;
+ }
+ }
+ y_pos += y_pitch;
+ }
+ // U and V
+ int chroma_width = i420_buffer->ChromaWidth();
+ int chroma_height = i420_buffer->ChromaHeight();
+ const uint8_t* u_plane = i420_buffer->DataU();
+ const uint8_t* v_plane = i420_buffer->DataV();
+ const uint8_t* u_pos = u_plane;
+ const uint8_t* v_pos = v_plane;
+ int32_t u_pitch = i420_buffer->StrideU();
+ int32_t v_pitch = i420_buffer->StrideV();
+ for (int i = 0; i < chroma_height; ++i) {
+ for (int j = 0; j < chroma_width; ++j) {
+ uint8_t u_value = *(u_pos + j);
+ if (u_value < u_min || u_value > u_max) {
+ return false;
+ }
+ uint8_t v_value = *(v_pos + j);
+ if (v_value < v_min || v_value > v_max) {
+ return false;
+ }
+ }
+ u_pos += u_pitch;
+ v_pos += v_pitch;
+ }
+ return true;
+ }
+
+ int errors_;
+ int width_;
+ int height_;
+ webrtc::VideoRotation rotation_;
+ int64_t timestamp_us_;
+ int num_rendered_frames_;
+ bool black_frame_;
+ rtc::CriticalSection crit_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_FAKEVIDEORENDERER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc b/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc
new file mode 100644
index 0000000000..4731c18f40
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.cc
@@ -0,0 +1,310 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/h264_profile_level_id.h"
+
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+
+#include "rtc_base/arraysize.h"
+
+namespace webrtc {
+namespace H264 {
+
+namespace {
+
+const char kProfileLevelId[] = "profile-level-id";
+const char kLevelAsymmetryAllowed[] = "level-asymmetry-allowed";
+
+// For level_idc=11 and profile_idc=0x42, 0x4D, or 0x58, the constraint set3
+// flag specifies if level 1b or level 1.1 is used.
+const uint8_t kConstraintSet3Flag = 0x10;
+
+// Convert a string of 8 characters into a byte where the positions containing
+// character c will have their bit set. For example, c = 'x', str = "x1xx0000"
+// will return 0b10110000. constexpr is used so that the pattern table in
+// kProfilePatterns is statically initialized.
+constexpr uint8_t ByteMaskString(char c, const char (&str)[9]) {
+ return (str[0] == c) << 7 | (str[1] == c) << 6 | (str[2] == c) << 5 |
+ (str[3] == c) << 4 | (str[4] == c) << 3 | (str[5] == c) << 2 |
+ (str[6] == c) << 1 | (str[7] == c) << 0;
+}
+
+// Class for matching bit patterns such as "x1xx0000" where 'x' is allowed to be
+// either 0 or 1.
+class BitPattern {
+ public:
+ explicit constexpr BitPattern(const char (&str)[9])
+ : mask_(~ByteMaskString('x', str)),
+ masked_value_(ByteMaskString('1', str)) {}
+
+ bool IsMatch(uint8_t value) const { return masked_value_ == (value & mask_); }
+
+ private:
+ const uint8_t mask_;
+ const uint8_t masked_value_;
+};
+
+// Table for converting between profile_idc/profile_iop to H264::Profile.
+struct ProfilePattern {
+ const uint8_t profile_idc;
+ const BitPattern profile_iop;
+ const Profile profile;
+};
+
+// This is from https://tools.ietf.org/html/rfc6184#section-8.1.
+constexpr ProfilePattern kProfilePatterns[] = {
+ {0x42, BitPattern("x1xx0000"), kProfileConstrainedBaseline},
+ {0x4D, BitPattern("1xxx0000"), kProfileConstrainedBaseline},
+ {0x58, BitPattern("11xx0000"), kProfileConstrainedBaseline},
+ {0x42, BitPattern("x0xx0000"), kProfileBaseline},
+ {0x58, BitPattern("10xx0000"), kProfileBaseline},
+ {0x4D, BitPattern("0x0x0000"), kProfileMain},
+ {0x64, BitPattern("00000000"), kProfileHigh},
+ {0x64, BitPattern("00001100"), kProfileConstrainedHigh}};
+
+// Compare H264 levels and handle the level 1b case.
+bool IsLess(Level a, Level b) {
+ if (a == kLevel1_b)
+ return b != kLevel1 && b != kLevel1_b;
+ if (b == kLevel1_b)
+ return a == kLevel1;
+ return a < b;
+}
+
+Level Min(Level a, Level b) {
+ return IsLess(a, b) ? a : b;
+}
+
+bool IsLevelAsymmetryAllowed(const CodecParameterMap& params) {
+ const auto it = params.find(kLevelAsymmetryAllowed);
+ return it != params.end() && strcmp(it->second.c_str(), "1") == 0;
+}
+
+struct LevelConstraint {
+ const int max_macroblocks_per_second;
+ const int max_macroblock_frame_size;
+ const webrtc::H264::Level level;
+};
+
+// This is from ITU-T H.264 (02/2016) Table A-1 – Level limits.
+static constexpr LevelConstraint kLevelConstraints[] = {
+ {1485, 99, webrtc::H264::kLevel1},
+ {1485, 99, webrtc::H264::kLevel1_b},
+ {3000, 396, webrtc::H264::kLevel1_1},
+ {6000, 396, webrtc::H264::kLevel1_2},
+ {11880, 396, webrtc::H264::kLevel1_3},
+ {11880, 396, webrtc::H264::kLevel2},
+ {19800, 792, webrtc::H264::kLevel2_1},
+ {20250, 1620, webrtc::H264::kLevel2_2},
+ {40500, 1620, webrtc::H264::kLevel3},
+ {108000, 3600, webrtc::H264::kLevel3_1},
+ {216000, 5120, webrtc::H264::kLevel3_2},
+ {245760, 8192, webrtc::H264::kLevel4},
+ {245760, 8192, webrtc::H264::kLevel4_1},
+ {522240, 8704, webrtc::H264::kLevel4_2},
+ {589824, 22080, webrtc::H264::kLevel5},
+ {983040, 3684, webrtc::H264::kLevel5_1},
+ {2073600, 3684, webrtc::H264::kLevel5_2},
+};
+
+} // anonymous namespace
+
+rtc::Optional<ProfileLevelId> ParseProfileLevelId(const char* str) {
+ // The string should consist of 3 bytes in hexadecimal format.
+ if (strlen(str) != 6u)
+ return rtc::nullopt;
+ const uint32_t profile_level_id_numeric = strtol(str, nullptr, 16);
+ if (profile_level_id_numeric == 0)
+ return rtc::nullopt;
+
+ // Separate into three bytes.
+ const uint8_t level_idc =
+ static_cast<uint8_t>(profile_level_id_numeric & 0xFF);
+ const uint8_t profile_iop =
+ static_cast<uint8_t>((profile_level_id_numeric >> 8) & 0xFF);
+ const uint8_t profile_idc =
+ static_cast<uint8_t>((profile_level_id_numeric >> 16) & 0xFF);
+
+ // Parse level based on level_idc and constraint set 3 flag.
+ Level level;
+ switch (level_idc) {
+ case kLevel1_1:
+ level = (profile_iop & kConstraintSet3Flag) != 0 ? kLevel1_b : kLevel1_1;
+ break;
+ case kLevel1:
+ case kLevel1_2:
+ case kLevel1_3:
+ case kLevel2:
+ case kLevel2_1:
+ case kLevel2_2:
+ case kLevel3:
+ case kLevel3_1:
+ case kLevel3_2:
+ case kLevel4:
+ case kLevel4_1:
+ case kLevel4_2:
+ case kLevel5:
+ case kLevel5_1:
+ case kLevel5_2:
+ level = static_cast<Level>(level_idc);
+ break;
+ default:
+ // Unrecognized level_idc.
+ return rtc::nullopt;
+ }
+
+ // Parse profile_idc/profile_iop into a Profile enum.
+ for (const ProfilePattern& pattern : kProfilePatterns) {
+ if (profile_idc == pattern.profile_idc &&
+ pattern.profile_iop.IsMatch(profile_iop)) {
+ return ProfileLevelId(pattern.profile, level);
+ }
+ }
+
+ // Unrecognized profile_idc/profile_iop combination.
+ return rtc::nullopt;
+}
+
+rtc::Optional<Level> SupportedLevel(int max_frame_pixel_count, float max_fps) {
+ static const int kPixelsPerMacroblock = 16 * 16;
+
+ for (int i = arraysize(kLevelConstraints) - 1; i >= 0; --i) {
+ const LevelConstraint& level_constraint = kLevelConstraints[i];
+ if (level_constraint.max_macroblock_frame_size * kPixelsPerMacroblock <=
+ max_frame_pixel_count &&
+ level_constraint.max_macroblocks_per_second <=
+ max_fps * level_constraint.max_macroblock_frame_size) {
+ return level_constraint.level;
+ }
+ }
+
+ // No level supported.
+ return rtc::nullopt;
+}
+
+rtc::Optional<ProfileLevelId> ParseSdpProfileLevelId(
+ const CodecParameterMap& params) {
+ // TODO(magjed): The default should really be kProfileBaseline and kLevel1
+ // according to the spec: https://tools.ietf.org/html/rfc6184#section-8.1. In
+ // order to not break backwards compatibility with older versions of WebRTC
+ // where external codecs don't have any parameters, use
+ // kProfileConstrainedBaseline kLevel3_1 instead. This workaround will only be
+ // done in an interim period to allow external clients to update their code.
+ // http://crbug/webrtc/6337.
+ static const ProfileLevelId kDefaultProfileLevelId(
+ kProfileConstrainedBaseline, kLevel3_1);
+
+ const auto profile_level_id_it = params.find(kProfileLevelId);
+ return (profile_level_id_it == params.end())
+ ? kDefaultProfileLevelId
+ : ParseProfileLevelId(profile_level_id_it->second.c_str());
+}
+
+rtc::Optional<std::string> ProfileLevelIdToString(
+ const ProfileLevelId& profile_level_id) {
+ // Handle special case level == 1b.
+ if (profile_level_id.level == kLevel1_b) {
+ switch (profile_level_id.profile) {
+ case kProfileConstrainedBaseline:
+ return {"42f00b"};
+ case kProfileBaseline:
+ return {"42100b"};
+ case kProfileMain:
+ return {"4d100b"};
+ // Level 1b is not allowed for other profiles.
+ default:
+ return rtc::nullopt;
+ }
+ }
+
+ const char* profile_idc_iop_string;
+ switch (profile_level_id.profile) {
+ case kProfileConstrainedBaseline:
+ profile_idc_iop_string = "42e0";
+ break;
+ case kProfileBaseline:
+ profile_idc_iop_string = "4200";
+ break;
+ case kProfileMain:
+ profile_idc_iop_string = "4d00";
+ break;
+ case kProfileConstrainedHigh:
+ profile_idc_iop_string = "640c";
+ break;
+ case kProfileHigh:
+ profile_idc_iop_string = "6400";
+ break;
+ // Unrecognized profile.
+ default:
+ return rtc::nullopt;
+ }
+
+ char str[7];
+ snprintf(str, 7u, "%s%02x", profile_idc_iop_string, profile_level_id.level);
+ return {str};
+}
+
+// Set level according to https://tools.ietf.org/html/rfc6184#section-8.2.2.
+void GenerateProfileLevelIdForAnswer(
+ const CodecParameterMap& local_supported_params,
+ const CodecParameterMap& remote_offered_params,
+ CodecParameterMap* answer_params) {
+ // If both local and remote haven't set profile-level-id, they are both using
+ // the default profile. In this case, don't set profile-level-id in answer
+ // either.
+ if (!local_supported_params.count(kProfileLevelId) &&
+ !remote_offered_params.count(kProfileLevelId)) {
+ return;
+ }
+
+ // Parse profile-level-ids.
+ const rtc::Optional<ProfileLevelId> local_profile_level_id =
+ ParseSdpProfileLevelId(local_supported_params);
+ const rtc::Optional<ProfileLevelId> remote_profile_level_id =
+ ParseSdpProfileLevelId(remote_offered_params);
+ // The local and remote codec must have valid and equal H264 Profiles.
+ RTC_DCHECK(local_profile_level_id);
+ RTC_DCHECK(remote_profile_level_id);
+ RTC_DCHECK_EQ(local_profile_level_id->profile,
+ remote_profile_level_id->profile);
+
+ // Parse level information.
+ const bool level_asymmetry_allowed =
+ IsLevelAsymmetryAllowed(local_supported_params) &&
+ IsLevelAsymmetryAllowed(remote_offered_params);
+ const Level local_level = local_profile_level_id->level;
+ const Level remote_level = remote_profile_level_id->level;
+ const Level min_level = Min(local_level, remote_level);
+
+ // Determine answer level. When level asymmetry is not allowed, level upgrade
+ // is not allowed, i.e., the level in the answer must be equal to or lower
+ // than the level in the offer.
+ const Level answer_level = level_asymmetry_allowed ? local_level : min_level;
+
+ // Set the resulting profile-level-id in the answer parameters.
+ (*answer_params)[kProfileLevelId] = *ProfileLevelIdToString(
+ ProfileLevelId(local_profile_level_id->profile, answer_level));
+}
+
+bool IsSameH264Profile(const CodecParameterMap& params1,
+ const CodecParameterMap& params2) {
+ const rtc::Optional<webrtc::H264::ProfileLevelId> profile_level_id =
+ webrtc::H264::ParseSdpProfileLevelId(params1);
+ const rtc::Optional<webrtc::H264::ProfileLevelId> other_profile_level_id =
+ webrtc::H264::ParseSdpProfileLevelId(params2);
+ // Compare H264 profiles, but not levels.
+ return profile_level_id && other_profile_level_id &&
+ profile_level_id->profile == other_profile_level_id->profile;
+}
+
+} // namespace H264
+} // namespace webrtc
diff --git a/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.h b/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.h
new file mode 100644
index 0000000000..28899e53d9
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/h264_profile_level_id.h
@@ -0,0 +1,107 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_
+#define MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_
+
+#include <map>
+#include <string>
+
+#include "api/optional.h"
+#include "common_types.h" // NOLINT(build/include)
+
+namespace webrtc {
+namespace H264 {
+
+// Map containting SDP codec parameters.
+typedef std::map<std::string, std::string> CodecParameterMap;
+
+// All values are equal to ten times the level number, except level 1b which is
+// special.
+enum Level {
+ kLevel1_b = 0,
+ kLevel1 = 10,
+ kLevel1_1 = 11,
+ kLevel1_2 = 12,
+ kLevel1_3 = 13,
+ kLevel2 = 20,
+ kLevel2_1 = 21,
+ kLevel2_2 = 22,
+ kLevel3 = 30,
+ kLevel3_1 = 31,
+ kLevel3_2 = 32,
+ kLevel4 = 40,
+ kLevel4_1 = 41,
+ kLevel4_2 = 42,
+ kLevel5 = 50,
+ kLevel5_1 = 51,
+ kLevel5_2 = 52
+};
+
+struct ProfileLevelId {
+ ProfileLevelId(Profile profile, Level level)
+ : profile(profile), level(level) {}
+ Profile profile;
+ Level level;
+};
+
+// Parse profile level id that is represented as a string of 3 hex bytes.
+// Nothing will be returned if the string is not a recognized H264
+// profile level id.
+rtc::Optional<ProfileLevelId> ParseProfileLevelId(const char* str);
+
+// Parse profile level id that is represented as a string of 3 hex bytes
+// contained in an SDP key-value map. A default profile level id will be
+// returned if the profile-level-id key is missing. Nothing will be returned if
+// the key is present but the string is invalid.
+rtc::Optional<ProfileLevelId> ParseSdpProfileLevelId(
+ const CodecParameterMap& params);
+
+// Given that a decoder supports up to a given frame size (in pixels) at up to a
+// given number of frames per second, return the highest H.264 level where it
+// can guarantee that it will be able to support all valid encoded streams that
+// are within that level.
+rtc::Optional<Level> SupportedLevel(int max_frame_pixel_count, float max_fps);
+
+// Returns canonical string representation as three hex bytes of the profile
+// level id, or returns nothing for invalid profile level ids.
+rtc::Optional<std::string> ProfileLevelIdToString(
+ const ProfileLevelId& profile_level_id);
+
+// Generate codec parameters that will be used as answer in an SDP negotiation
+// based on local supported parameters and remote offered parameters. Both
+// |local_supported_params|, |remote_offered_params|, and |answer_params|
+// represent sendrecv media descriptions, i.e they are a mix of both encode and
+// decode capabilities. In theory, when the profile in |local_supported_params|
+// represent a strict superset of the profile in |remote_offered_params|, we
+// could limit the profile in |answer_params| to the profile in
+// |remote_offered_params|. However, to simplify the code, each supported H264
+// profile should be listed explicitly in the list of local supported codecs,
+// even if they are redundant. Then each local codec in the list should be
+// tested one at a time against the remote codec, and only when the profiles are
+// equal should this function be called. Therefore, this function does not need
+// to handle profile intersection, and the profile of |local_supported_params|
+// and |remote_offered_params| must be equal before calling this function. The
+// parameters that are used when negotiating are the level part of
+// profile-level-id and level-asymmetry-allowed.
+void GenerateProfileLevelIdForAnswer(
+ const CodecParameterMap& local_supported_params,
+ const CodecParameterMap& remote_offered_params,
+ CodecParameterMap* answer_params);
+
+// Returns true if the parameters have the same H264 profile, i.e. the same
+// H264::Profile (Baseline, High, etc).
+bool IsSameH264Profile(const CodecParameterMap& params1,
+ const CodecParameterMap& params2);
+
+} // namespace H264
+} // namespace webrtc
+
+#endif // MEDIA_BASE_H264_PROFILE_LEVEL_ID_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/mediachannel.h b/third_party/libwebrtc/webrtc/media/base/mediachannel.h
new file mode 100644
index 0000000000..4826b2dc4a
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediachannel.h
@@ -0,0 +1,1254 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_MEDIACHANNEL_H_
+#define MEDIA_BASE_MEDIACHANNEL_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "api/audio_codecs/audio_encoder.h"
+#include "api/optional.h"
+#include "api/rtpparameters.h"
+#include "api/rtpreceiverinterface.h"
+#include "api/video/video_timing.h"
+#include "call/video_config.h"
+#include "media/base/codec.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/streamparams.h"
+#include "media/base/videosinkinterface.h"
+#include "media/base/videosourceinterface.h"
+#include "modules/audio_processing/include/audio_processing_statistics.h"
+#include "rtc_base/asyncpacketsocket.h"
+#include "rtc_base/basictypes.h"
+#include "rtc_base/buffer.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/dscp.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/networkroute.h"
+#include "rtc_base/sigslot.h"
+#include "rtc_base/socket.h"
+#include "rtc_base/window.h"
+
+
+namespace rtc {
+class RateLimiter;
+class Timing;
+}
+
+namespace webrtc {
+class AudioSinkInterface;
+class VideoFrame;
+}
+
+namespace cricket {
+
+class AudioSource;
+class VideoCapturer;
+struct RtpHeader;
+struct VideoFormat;
+
+const int kScreencastDefaultFps = 5;
+
+template <class T>
+static std::string ToStringIfSet(const char* key, const rtc::Optional<T>& val) {
+ std::string str;
+ if (val) {
+ str = key;
+ str += ": ";
+ str += val ? rtc::ToString(*val) : "";
+ str += ", ";
+ }
+ return str;
+}
+
+template <class T>
+static std::string VectorToString(const std::vector<T>& vals) {
+ std::ostringstream ost;
+ ost << "[";
+ for (size_t i = 0; i < vals.size(); ++i) {
+ if (i > 0) {
+ ost << ", ";
+ }
+ ost << vals[i].ToString();
+ }
+ ost << "]";
+ return ost.str();
+}
+
+// Construction-time settings, passed on when creating
+// MediaChannels.
+struct MediaConfig {
+ // Set DSCP value on packets. This flag comes from the
+ // PeerConnection constraint 'googDscp'.
+ bool enable_dscp = false;
+
+ // Video-specific config.
+ struct Video {
+ // Enable WebRTC CPU Overuse Detection. This flag comes from the
+ // PeerConnection constraint 'googCpuOveruseDetection'.
+ bool enable_cpu_overuse_detection = true;
+
+ // Enable WebRTC suspension of video. No video frames will be sent
+ // when the bitrate is below the configured minimum bitrate. This
+ // flag comes from the PeerConnection constraint
+ // 'googSuspendBelowMinBitrate', and WebRtcVideoChannel copies it
+ // to VideoSendStream::Config::suspend_below_min_bitrate.
+ bool suspend_below_min_bitrate = false;
+
+ // Set to true if the renderer has an algorithm of frame selection.
+ // If the value is true, then WebRTC will hand over a frame as soon as
+ // possible without delay, and rendering smoothness is completely the duty
+ // of the renderer;
+ // If the value is false, then WebRTC is responsible to delay frame release
+ // in order to increase rendering smoothness.
+ //
+ // This flag comes from PeerConnection's RtcConfiguration, but is
+ // currently only set by the command line flag
+ // 'disable-rtc-smoothness-algorithm'.
+ // WebRtcVideoChannel::AddRecvStream copies it to the created
+ // WebRtcVideoReceiveStream, where it is returned by the
+ // SmoothsRenderedFrames method. This method is used by the
+ // VideoReceiveStream, where the value is passed on to the
+ // IncomingVideoStream constructor.
+ bool disable_prerenderer_smoothing = false;
+
+ // Enables periodic bandwidth probing in application-limited region.
+ bool periodic_alr_bandwidth_probing = false;
+ } video;
+
+ bool operator==(const MediaConfig& o) const {
+ return enable_dscp == o.enable_dscp &&
+ video.enable_cpu_overuse_detection ==
+ o.video.enable_cpu_overuse_detection &&
+ video.suspend_below_min_bitrate ==
+ o.video.suspend_below_min_bitrate &&
+ video.disable_prerenderer_smoothing ==
+ o.video.disable_prerenderer_smoothing &&
+ video.periodic_alr_bandwidth_probing ==
+ o.video.periodic_alr_bandwidth_probing;
+ }
+
+ bool operator!=(const MediaConfig& o) const { return !(*this == o); }
+};
+
+// Options that can be applied to a VoiceMediaChannel or a VoiceMediaEngine.
+// Used to be flags, but that makes it hard to selectively apply options.
+// We are moving all of the setting of options to structs like this,
+// but some things currently still use flags.
+struct AudioOptions {
+ void SetAll(const AudioOptions& change) {
+ SetFrom(&echo_cancellation, change.echo_cancellation);
+ SetFrom(&auto_gain_control, change.auto_gain_control);
+ SetFrom(&noise_suppression, change.noise_suppression);
+ SetFrom(&highpass_filter, change.highpass_filter);
+ SetFrom(&stereo_swapping, change.stereo_swapping);
+ SetFrom(&audio_jitter_buffer_max_packets,
+ change.audio_jitter_buffer_max_packets);
+ SetFrom(&audio_jitter_buffer_fast_accelerate,
+ change.audio_jitter_buffer_fast_accelerate);
+ SetFrom(&typing_detection, change.typing_detection);
+ SetFrom(&aecm_generate_comfort_noise, change.aecm_generate_comfort_noise);
+ SetFrom(&adjust_agc_delta, change.adjust_agc_delta);
+ SetFrom(&experimental_agc, change.experimental_agc);
+ SetFrom(&extended_filter_aec, change.extended_filter_aec);
+ SetFrom(&delay_agnostic_aec, change.delay_agnostic_aec);
+ SetFrom(&experimental_ns, change.experimental_ns);
+ SetFrom(&intelligibility_enhancer, change.intelligibility_enhancer);
+ SetFrom(&level_control, change.level_control);
+ SetFrom(&residual_echo_detector, change.residual_echo_detector);
+ SetFrom(&tx_agc_target_dbov, change.tx_agc_target_dbov);
+ SetFrom(&tx_agc_digital_compression_gain,
+ change.tx_agc_digital_compression_gain);
+ SetFrom(&tx_agc_limiter, change.tx_agc_limiter);
+ SetFrom(&combined_audio_video_bwe, change.combined_audio_video_bwe);
+ SetFrom(&audio_network_adaptor, change.audio_network_adaptor);
+ SetFrom(&audio_network_adaptor_config, change.audio_network_adaptor_config);
+ SetFrom(&level_control_initial_peak_level_dbfs,
+ change.level_control_initial_peak_level_dbfs);
+ }
+
+ bool operator==(const AudioOptions& o) const {
+ return echo_cancellation == o.echo_cancellation &&
+ auto_gain_control == o.auto_gain_control &&
+ noise_suppression == o.noise_suppression &&
+ highpass_filter == o.highpass_filter &&
+ stereo_swapping == o.stereo_swapping &&
+ audio_jitter_buffer_max_packets ==
+ o.audio_jitter_buffer_max_packets &&
+ audio_jitter_buffer_fast_accelerate ==
+ o.audio_jitter_buffer_fast_accelerate &&
+ typing_detection == o.typing_detection &&
+ aecm_generate_comfort_noise == o.aecm_generate_comfort_noise &&
+ experimental_agc == o.experimental_agc &&
+ extended_filter_aec == o.extended_filter_aec &&
+ delay_agnostic_aec == o.delay_agnostic_aec &&
+ experimental_ns == o.experimental_ns &&
+ intelligibility_enhancer == o.intelligibility_enhancer &&
+ level_control == o.level_control &&
+ residual_echo_detector == o.residual_echo_detector &&
+ adjust_agc_delta == o.adjust_agc_delta &&
+ tx_agc_target_dbov == o.tx_agc_target_dbov &&
+ tx_agc_digital_compression_gain ==
+ o.tx_agc_digital_compression_gain &&
+ tx_agc_limiter == o.tx_agc_limiter &&
+ combined_audio_video_bwe == o.combined_audio_video_bwe &&
+ audio_network_adaptor == o.audio_network_adaptor &&
+ audio_network_adaptor_config == o.audio_network_adaptor_config &&
+ level_control_initial_peak_level_dbfs ==
+ o.level_control_initial_peak_level_dbfs;
+ }
+ bool operator!=(const AudioOptions& o) const { return !(*this == o); }
+
+ std::string ToString() const {
+ std::ostringstream ost;
+ ost << "AudioOptions {";
+ ost << ToStringIfSet("aec", echo_cancellation);
+ ost << ToStringIfSet("agc", auto_gain_control);
+ ost << ToStringIfSet("ns", noise_suppression);
+ ost << ToStringIfSet("hf", highpass_filter);
+ ost << ToStringIfSet("swap", stereo_swapping);
+ ost << ToStringIfSet("audio_jitter_buffer_max_packets",
+ audio_jitter_buffer_max_packets);
+ ost << ToStringIfSet("audio_jitter_buffer_fast_accelerate",
+ audio_jitter_buffer_fast_accelerate);
+ ost << ToStringIfSet("typing", typing_detection);
+ ost << ToStringIfSet("comfort_noise", aecm_generate_comfort_noise);
+ ost << ToStringIfSet("agc_delta", adjust_agc_delta);
+ ost << ToStringIfSet("experimental_agc", experimental_agc);
+ ost << ToStringIfSet("extended_filter_aec", extended_filter_aec);
+ ost << ToStringIfSet("delay_agnostic_aec", delay_agnostic_aec);
+ ost << ToStringIfSet("experimental_ns", experimental_ns);
+ ost << ToStringIfSet("intelligibility_enhancer", intelligibility_enhancer);
+ ost << ToStringIfSet("level_control", level_control);
+ ost << ToStringIfSet("level_control_initial_peak_level_dbfs",
+ level_control_initial_peak_level_dbfs);
+ ost << ToStringIfSet("residual_echo_detector", residual_echo_detector);
+ ost << ToStringIfSet("tx_agc_target_dbov", tx_agc_target_dbov);
+ ost << ToStringIfSet("tx_agc_digital_compression_gain",
+ tx_agc_digital_compression_gain);
+ ost << ToStringIfSet("tx_agc_limiter", tx_agc_limiter);
+ ost << ToStringIfSet("combined_audio_video_bwe", combined_audio_video_bwe);
+ ost << ToStringIfSet("audio_network_adaptor", audio_network_adaptor);
+ // The adaptor config is a serialized proto buffer and therefore not human
+ // readable. So we comment out the following line.
+ // ost << ToStringIfSet("audio_network_adaptor_config",
+ // audio_network_adaptor_config);
+ ost << "}";
+ return ost.str();
+ }
+
+ // Audio processing that attempts to filter away the output signal from
+ // later inbound pickup.
+ rtc::Optional<bool> echo_cancellation;
+ // Audio processing to adjust the sensitivity of the local mic dynamically.
+ rtc::Optional<bool> auto_gain_control;
+ // Audio processing to filter out background noise.
+ rtc::Optional<bool> noise_suppression;
+ // Audio processing to remove background noise of lower frequencies.
+ rtc::Optional<bool> highpass_filter;
+ // Audio processing to swap the left and right channels.
+ rtc::Optional<bool> stereo_swapping;
+ // Audio receiver jitter buffer (NetEq) max capacity in number of packets.
+ rtc::Optional<int> audio_jitter_buffer_max_packets;
+ // Audio receiver jitter buffer (NetEq) fast accelerate mode.
+ rtc::Optional<bool> audio_jitter_buffer_fast_accelerate;
+ // Audio processing to detect typing.
+ rtc::Optional<bool> typing_detection;
+ rtc::Optional<bool> aecm_generate_comfort_noise;
+ rtc::Optional<int> adjust_agc_delta;
+ rtc::Optional<bool> experimental_agc;
+ rtc::Optional<bool> extended_filter_aec;
+ rtc::Optional<bool> delay_agnostic_aec;
+ rtc::Optional<bool> experimental_ns;
+ rtc::Optional<bool> intelligibility_enhancer;
+ rtc::Optional<bool> level_control;
+ // Specifies an optional initialization value for the level controller.
+ rtc::Optional<float> level_control_initial_peak_level_dbfs;
+ // Note that tx_agc_* only applies to non-experimental AGC.
+ rtc::Optional<bool> residual_echo_detector;
+ rtc::Optional<uint16_t> tx_agc_target_dbov;
+ rtc::Optional<uint16_t> tx_agc_digital_compression_gain;
+ rtc::Optional<bool> tx_agc_limiter;
+ // Enable combined audio+bandwidth BWE.
+ // TODO(pthatcher): This flag is set from the
+ // "googCombinedAudioVideoBwe", but not used anywhere. So delete it,
+ // and check if any other AudioOptions members are unused.
+ rtc::Optional<bool> combined_audio_video_bwe;
+ // Enable audio network adaptor.
+ rtc::Optional<bool> audio_network_adaptor;
+ // Config string for audio network adaptor.
+ rtc::Optional<std::string> audio_network_adaptor_config;
+
+ private:
+ template <typename T>
+ static void SetFrom(rtc::Optional<T>* s, const rtc::Optional<T>& o) {
+ if (o) {
+ *s = o;
+ }
+ }
+};
+
+// Options that can be applied to a VideoMediaChannel or a VideoMediaEngine.
+// Used to be flags, but that makes it hard to selectively apply options.
+// We are moving all of the setting of options to structs like this,
+// but some things currently still use flags.
+struct VideoOptions {
+ void SetAll(const VideoOptions& change) {
+ SetFrom(&video_noise_reduction, change.video_noise_reduction);
+ SetFrom(&screencast_min_bitrate_kbps, change.screencast_min_bitrate_kbps);
+ SetFrom(&is_screencast, change.is_screencast);
+ }
+
+ bool operator==(const VideoOptions& o) const {
+ return video_noise_reduction == o.video_noise_reduction &&
+ screencast_min_bitrate_kbps == o.screencast_min_bitrate_kbps &&
+ is_screencast == o.is_screencast;
+ }
+ bool operator!=(const VideoOptions& o) const { return !(*this == o); }
+
+ std::string ToString() const {
+ std::ostringstream ost;
+ ost << "VideoOptions {";
+ ost << ToStringIfSet("noise reduction", video_noise_reduction);
+ ost << ToStringIfSet("screencast min bitrate kbps",
+ screencast_min_bitrate_kbps);
+ ost << ToStringIfSet("is_screencast ", is_screencast);
+ ost << "}";
+ return ost.str();
+ }
+
+ // Enable denoising? This flag comes from the getUserMedia
+ // constraint 'googNoiseReduction', and WebRtcVideoEngine passes it
+ // on to the codec options. Disabled by default.
+ rtc::Optional<bool> video_noise_reduction;
+ // Force screencast to use a minimum bitrate. This flag comes from
+ // the PeerConnection constraint 'googScreencastMinBitrate'. It is
+ // copied to the encoder config by WebRtcVideoChannel.
+ rtc::Optional<int> screencast_min_bitrate_kbps;
+ // Set by screencast sources. Implies selection of encoding settings
+ // suitable for screencast. Most likely not the right way to do
+ // things, e.g., screencast of a text document and screencast of a
+ // youtube video have different needs.
+ rtc::Optional<bool> is_screencast;
+
+ private:
+ template <typename T>
+ static void SetFrom(rtc::Optional<T>* s, const rtc::Optional<T>& o) {
+ if (o) {
+ *s = o;
+ }
+ }
+};
+
+// TODO(isheriff): Remove this once client usage is fixed to use RtpExtension.
+struct RtpHeaderExtension {
+ RtpHeaderExtension() : id(0) {}
+ RtpHeaderExtension(const std::string& uri, int id) : uri(uri), id(id) {}
+
+ std::string ToString() const {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "uri: " << uri;
+ ost << ", id: " << id;
+ ost << "}";
+ return ost.str();
+ }
+
+ std::string uri;
+ int id;
+};
+
+class MediaChannel : public sigslot::has_slots<> {
+ public:
+ class NetworkInterface {
+ public:
+ enum SocketType { ST_RTP, ST_RTCP };
+ virtual bool SendPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) = 0;
+ virtual bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) = 0;
+ virtual int SetOption(SocketType type, rtc::Socket::Option opt,
+ int option) = 0;
+ virtual ~NetworkInterface() {}
+ };
+
+ explicit MediaChannel(const MediaConfig& config)
+ : enable_dscp_(config.enable_dscp), network_interface_(NULL) {}
+ MediaChannel() : enable_dscp_(false), network_interface_(NULL) {}
+ virtual ~MediaChannel() {}
+
+ // Sets the abstract interface class for sending RTP/RTCP data.
+ virtual void SetInterface(NetworkInterface *iface) {
+ rtc::CritScope cs(&network_interface_crit_);
+ network_interface_ = iface;
+ SetDscp(enable_dscp_ ? PreferredDscp() : rtc::DSCP_DEFAULT);
+ }
+ virtual rtc::DiffServCodePoint PreferredDscp() const {
+ return rtc::DSCP_DEFAULT;
+ }
+ // Called when a RTP packet is received.
+ virtual void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) = 0;
+ // Called when a RTCP packet is received.
+ virtual void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) = 0;
+ // Called when the socket's ability to send has changed.
+ virtual void OnReadyToSend(bool ready) = 0;
+ // Called when the network route used for sending packets changed.
+ virtual void OnNetworkRouteChanged(
+ const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) = 0;
+ // Creates a new outgoing media stream with SSRCs and CNAME as described
+ // by sp.
+ virtual bool AddSendStream(const StreamParams& sp) = 0;
+ // Removes an outgoing media stream.
+ // ssrc must be the first SSRC of the media stream if the stream uses
+ // multiple SSRCs.
+ virtual bool RemoveSendStream(uint32_t ssrc) = 0;
+ // Creates a new incoming media stream with SSRCs and CNAME as described
+ // by sp.
+ virtual bool AddRecvStream(const StreamParams& sp) = 0;
+ // Removes an incoming media stream.
+ // ssrc must be the first SSRC of the media stream if the stream uses
+ // multiple SSRCs.
+ virtual bool RemoveRecvStream(uint32_t ssrc) = 0;
+
+ // Returns the absoulte sendtime extension id value from media channel.
+ virtual int GetRtpSendTimeExtnId() const {
+ return -1;
+ }
+
+ // Base method to send packet using NetworkInterface.
+ bool SendPacket(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ return DoSendPacket(packet, false, options);
+ }
+
+ bool SendRtcp(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketOptions& options) {
+ return DoSendPacket(packet, true, options);
+ }
+
+ int SetOption(NetworkInterface::SocketType type,
+ rtc::Socket::Option opt,
+ int option) {
+ rtc::CritScope cs(&network_interface_crit_);
+ if (!network_interface_)
+ return -1;
+
+ return network_interface_->SetOption(type, opt, option);
+ }
+
+ private:
+ // This method sets DSCP |value| on both RTP and RTCP channels.
+ int SetDscp(rtc::DiffServCodePoint value) {
+ int ret;
+ ret = SetOption(NetworkInterface::ST_RTP,
+ rtc::Socket::OPT_DSCP,
+ value);
+ if (ret == 0) {
+ ret = SetOption(NetworkInterface::ST_RTCP,
+ rtc::Socket::OPT_DSCP,
+ value);
+ }
+ return ret;
+ }
+
+ bool DoSendPacket(rtc::CopyOnWriteBuffer* packet,
+ bool rtcp,
+ const rtc::PacketOptions& options) {
+ rtc::CritScope cs(&network_interface_crit_);
+ if (!network_interface_)
+ return false;
+
+ return (!rtcp) ? network_interface_->SendPacket(packet, options)
+ : network_interface_->SendRtcp(packet, options);
+ }
+
+ const bool enable_dscp_;
+ // |network_interface_| can be accessed from the worker_thread and
+ // from any MediaEngine threads. This critical section is to protect accessing
+ // of network_interface_ object.
+ rtc::CriticalSection network_interface_crit_;
+ NetworkInterface* network_interface_;
+};
+
+// The stats information is structured as follows:
+// Media are represented by either MediaSenderInfo or MediaReceiverInfo.
+// Media contains a vector of SSRC infos that are exclusively used by this
+// media. (SSRCs shared between media streams can't be represented.)
+
+// Information about an SSRC.
+// This data may be locally recorded, or received in an RTCP SR or RR.
+struct SsrcSenderInfo {
+ SsrcSenderInfo()
+ : ssrc(0),
+ timestamp(0) {
+ }
+ uint32_t ssrc;
+ double timestamp; // NTP timestamp, represented as seconds since epoch.
+};
+
+struct SsrcReceiverInfo {
+ SsrcReceiverInfo()
+ : ssrc(0),
+ timestamp(0) {
+ }
+ uint32_t ssrc;
+ double timestamp;
+};
+
+struct MediaSenderInfo {
+ MediaSenderInfo()
+ : bytes_sent(0),
+ packets_sent(0),
+ packets_lost(0),
+ fraction_lost(0.0),
+ rtt_ms(0) {
+ }
+ void add_ssrc(const SsrcSenderInfo& stat) {
+ local_stats.push_back(stat);
+ }
+ // Temporary utility function for call sites that only provide SSRC.
+ // As more info is added into SsrcSenderInfo, this function should go away.
+ void add_ssrc(uint32_t ssrc) {
+ SsrcSenderInfo stat;
+ stat.ssrc = ssrc;
+ add_ssrc(stat);
+ }
+ // Utility accessor for clients that are only interested in ssrc numbers.
+ std::vector<uint32_t> ssrcs() const {
+ std::vector<uint32_t> retval;
+ for (std::vector<SsrcSenderInfo>::const_iterator it = local_stats.begin();
+ it != local_stats.end(); ++it) {
+ retval.push_back(it->ssrc);
+ }
+ return retval;
+ }
+ // Utility accessor for clients that make the assumption only one ssrc
+ // exists per media.
+ // This will eventually go away.
+ uint32_t ssrc() const {
+ if (local_stats.size() > 0) {
+ return local_stats[0].ssrc;
+ } else {
+ return 0;
+ }
+ }
+ int64_t bytes_sent;
+ int packets_sent;
+ int packets_lost;
+ float fraction_lost;
+ int64_t rtt_ms;
+ std::string codec_name;
+ rtc::Optional<int> codec_payload_type;
+ std::vector<SsrcSenderInfo> local_stats;
+ std::vector<SsrcReceiverInfo> remote_stats;
+};
+
+struct MediaReceiverInfo {
+ MediaReceiverInfo()
+ : bytes_rcvd(0),
+ packets_rcvd(0),
+ packets_lost(0),
+ fraction_lost(0.0) {
+ }
+ void add_ssrc(const SsrcReceiverInfo& stat) {
+ local_stats.push_back(stat);
+ }
+ // Temporary utility function for call sites that only provide SSRC.
+ // As more info is added into SsrcSenderInfo, this function should go away.
+ void add_ssrc(uint32_t ssrc) {
+ SsrcReceiverInfo stat;
+ stat.ssrc = ssrc;
+ add_ssrc(stat);
+ }
+ std::vector<uint32_t> ssrcs() const {
+ std::vector<uint32_t> retval;
+ for (std::vector<SsrcReceiverInfo>::const_iterator it = local_stats.begin();
+ it != local_stats.end(); ++it) {
+ retval.push_back(it->ssrc);
+ }
+ return retval;
+ }
+ // Utility accessor for clients that make the assumption only one ssrc
+ // exists per media.
+ // This will eventually go away.
+ uint32_t ssrc() const {
+ if (local_stats.size() > 0) {
+ return local_stats[0].ssrc;
+ } else {
+ return 0;
+ }
+ }
+
+ int64_t bytes_rcvd;
+ int packets_rcvd;
+ int packets_lost;
+ float fraction_lost;
+ std::string codec_name;
+ rtc::Optional<int> codec_payload_type;
+ std::vector<SsrcReceiverInfo> local_stats;
+ std::vector<SsrcSenderInfo> remote_stats;
+};
+
+struct VoiceSenderInfo : public MediaSenderInfo {
+ VoiceSenderInfo()
+ : ext_seqnum(0),
+ jitter_ms(0),
+ audio_level(0),
+ total_input_energy(0.0),
+ total_input_duration(0.0),
+ aec_quality_min(0.0),
+ echo_delay_median_ms(0),
+ echo_delay_std_ms(0),
+ echo_return_loss(0),
+ echo_return_loss_enhancement(0),
+ residual_echo_likelihood(0.0f),
+ residual_echo_likelihood_recent_max(0.0f),
+ typing_noise_detected(false) {}
+
+ int ext_seqnum;
+ int jitter_ms;
+ int audio_level;
+ // See description of "totalAudioEnergy" in the WebRTC stats spec:
+ // https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats-totalaudioenergy
+ double total_input_energy;
+ double total_input_duration;
+ // TODO(bugs.webrtc.org/8572): Remove APM stats from this struct, since they
+ // are no longer needed now that we have apm_statistics.
+ float aec_quality_min;
+ int echo_delay_median_ms;
+ int echo_delay_std_ms;
+ int echo_return_loss;
+ int echo_return_loss_enhancement;
+ float residual_echo_likelihood;
+ float residual_echo_likelihood_recent_max;
+ bool typing_noise_detected;
+ webrtc::ANAStats ana_statistics;
+ webrtc::AudioProcessingStats apm_statistics;
+};
+
+struct VoiceReceiverInfo : public MediaReceiverInfo {
+ VoiceReceiverInfo()
+ : ext_seqnum(0),
+ jitter_ms(0),
+ jitter_buffer_ms(0),
+ jitter_buffer_preferred_ms(0),
+ delay_estimate_ms(0),
+ audio_level(0),
+ total_output_energy(0.0),
+ total_samples_received(0),
+ total_output_duration(0.0),
+ concealed_samples(0),
+ concealment_events(0),
+ jitter_buffer_delay_seconds(0),
+ expand_rate(0),
+ speech_expand_rate(0),
+ secondary_decoded_rate(0),
+ secondary_discarded_rate(0),
+ accelerate_rate(0),
+ preemptive_expand_rate(0),
+ decoding_calls_to_silence_generator(0),
+ decoding_calls_to_neteq(0),
+ decoding_normal(0),
+ decoding_plc(0),
+ decoding_cng(0),
+ decoding_plc_cng(0),
+ decoding_muted_output(0),
+ capture_start_ntp_time_ms(-1) {}
+
+ int ext_seqnum;
+ int jitter_ms;
+ int jitter_buffer_ms;
+ int jitter_buffer_preferred_ms;
+ int delay_estimate_ms;
+ int audio_level;
+ // Stats below correspond to similarly-named fields in the WebRTC stats spec.
+ // https://w3c.github.io/webrtc-stats/#dom-rtcmediastreamtrackstats
+ double total_output_energy;
+ uint64_t total_samples_received;
+ double total_output_duration;
+ uint64_t concealed_samples;
+ uint64_t concealment_events;
+ double jitter_buffer_delay_seconds;
+ // Stats below DO NOT correspond directly to anything in the WebRTC stats
+ // fraction of synthesized audio inserted through expansion.
+ float expand_rate;
+ // fraction of synthesized speech inserted through expansion.
+ float speech_expand_rate;
+ // fraction of data out of secondary decoding, including FEC and RED.
+ float secondary_decoded_rate;
+ // Fraction of secondary data, including FEC and RED, that is discarded.
+ // Discarding of secondary data can be caused by the reception of the primary
+ // data, obsoleting the secondary data. It can also be caused by early
+ // or late arrival of secondary data. This metric is the percentage of
+ // discarded secondary data since last query of receiver info.
+ float secondary_discarded_rate;
+ // Fraction of data removed through time compression.
+ float accelerate_rate;
+ // Fraction of data inserted through time stretching.
+ float preemptive_expand_rate;
+ int decoding_calls_to_silence_generator;
+ int decoding_calls_to_neteq;
+ int decoding_normal;
+ int decoding_plc;
+ int decoding_cng;
+ int decoding_plc_cng;
+ int decoding_muted_output;
+ // Estimated capture start time in NTP time in ms.
+ int64_t capture_start_ntp_time_ms;
+};
+
+struct VideoSenderInfo : public MediaSenderInfo {
+ VideoSenderInfo()
+ : packets_cached(0),
+ firs_rcvd(0),
+ plis_rcvd(0),
+ nacks_rcvd(0),
+ send_frame_width(0),
+ send_frame_height(0),
+ framerate_input(0),
+ framerate_sent(0),
+ nominal_bitrate(0),
+ preferred_bitrate(0),
+ adapt_reason(0),
+ adapt_changes(0),
+ avg_encode_ms(0),
+ encode_usage_percent(0),
+ frames_encoded(0),
+ has_entered_low_resolution(false),
+ content_type(webrtc::VideoContentType::UNSPECIFIED) {}
+
+ std::vector<SsrcGroup> ssrc_groups;
+ // TODO(hbos): Move this to |VideoMediaInfo::send_codecs|?
+ std::string encoder_implementation_name;
+ int packets_cached;
+ int firs_rcvd;
+ int plis_rcvd;
+ int nacks_rcvd;
+ int send_frame_width;
+ int send_frame_height;
+ int framerate_input;
+ int framerate_sent;
+ int nominal_bitrate;
+ int preferred_bitrate;
+ int adapt_reason;
+ int adapt_changes;
+ int avg_encode_ms;
+ int encode_usage_percent;
+ uint32_t frames_encoded;
+ bool has_entered_low_resolution;
+ rtc::Optional<uint64_t> qp_sum;
+ webrtc::VideoContentType content_type;
+};
+
+struct VideoReceiverInfo : public MediaReceiverInfo {
+ VideoReceiverInfo()
+ : packets_concealed(0),
+ firs_sent(0),
+ plis_sent(0),
+ nacks_sent(0),
+ frame_width(0),
+ frame_height(0),
+ framerate_rcvd(0),
+ framerate_decoded(0),
+ framerate_output(0),
+ framerate_render_input(0),
+ framerate_render_output(0),
+ frames_received(0),
+ frames_decoded(0),
+ frames_rendered(0),
+ interframe_delay_max_ms(-1),
+ content_type(webrtc::VideoContentType::UNSPECIFIED),
+ decode_ms(0),
+ max_decode_ms(0),
+ jitter_buffer_ms(0),
+ min_playout_delay_ms(0),
+ render_delay_ms(0),
+ target_delay_ms(0),
+ current_delay_ms(0),
+ capture_start_ntp_time_ms(-1) {}
+
+ std::vector<SsrcGroup> ssrc_groups;
+ // TODO(hbos): Move this to |VideoMediaInfo::receive_codecs|?
+ std::string decoder_implementation_name;
+ int packets_concealed;
+ int firs_sent;
+ int plis_sent;
+ int nacks_sent;
+ int frame_width;
+ int frame_height;
+ int framerate_rcvd;
+ int framerate_decoded;
+ int framerate_output;
+ // Framerate as sent to the renderer.
+ int framerate_render_input;
+ // Framerate that the renderer reports.
+ int framerate_render_output;
+ uint32_t frames_received;
+ uint32_t frames_decoded;
+ uint32_t frames_rendered;
+ rtc::Optional<uint64_t> qp_sum;
+ int64_t interframe_delay_max_ms;
+
+ webrtc::VideoContentType content_type;
+
+ // All stats below are gathered per-VideoReceiver, but some will be correlated
+ // across MediaStreamTracks. NOTE(hta): when sinking stats into per-SSRC
+ // structures, reflect this in the new layout.
+
+ // Current frame decode latency.
+ int decode_ms;
+ // Maximum observed frame decode latency.
+ int max_decode_ms;
+ // Jitter (network-related) latency.
+ int jitter_buffer_ms;
+ // Requested minimum playout latency.
+ int min_playout_delay_ms;
+ // Requested latency to account for rendering delay.
+ int render_delay_ms;
+ // Target overall delay: network+decode+render, accounting for
+ // min_playout_delay_ms.
+ int target_delay_ms;
+ // Current overall delay, possibly ramping towards target_delay_ms.
+ int current_delay_ms;
+
+ // Estimated capture start time in NTP time in ms.
+ int64_t capture_start_ntp_time_ms;
+
+ // Timing frame info: all important timestamps for a full lifetime of a
+ // single 'timing frame'.
+ rtc::Optional<webrtc::TimingFrameInfo> timing_frame_info;
+};
+
+struct DataSenderInfo : public MediaSenderInfo {
+ DataSenderInfo()
+ : ssrc(0) {
+ }
+
+ uint32_t ssrc;
+};
+
+struct DataReceiverInfo : public MediaReceiverInfo {
+ DataReceiverInfo()
+ : ssrc(0) {
+ }
+
+ uint32_t ssrc;
+};
+
+struct BandwidthEstimationInfo {
+ BandwidthEstimationInfo()
+ : available_send_bandwidth(0),
+ available_recv_bandwidth(0),
+ target_enc_bitrate(0),
+ actual_enc_bitrate(0),
+ retransmit_bitrate(0),
+ transmit_bitrate(0),
+ bucket_delay(0) {
+ }
+
+ int available_send_bandwidth;
+ int available_recv_bandwidth;
+ int target_enc_bitrate;
+ int actual_enc_bitrate;
+ int retransmit_bitrate;
+ int transmit_bitrate;
+ int64_t bucket_delay;
+};
+
+// Maps from payload type to |RtpCodecParameters|.
+typedef std::map<int, webrtc::RtpCodecParameters> RtpCodecParametersMap;
+
+struct VoiceMediaInfo {
+ void Clear() {
+ senders.clear();
+ receivers.clear();
+ send_codecs.clear();
+ receive_codecs.clear();
+ }
+ std::vector<VoiceSenderInfo> senders;
+ std::vector<VoiceReceiverInfo> receivers;
+ RtpCodecParametersMap send_codecs;
+ RtpCodecParametersMap receive_codecs;
+};
+
+struct VideoMediaInfo {
+ void Clear() {
+ senders.clear();
+ receivers.clear();
+ bw_estimations.clear();
+ send_codecs.clear();
+ receive_codecs.clear();
+ }
+ std::vector<VideoSenderInfo> senders;
+ std::vector<VideoReceiverInfo> receivers;
+ // Deprecated.
+ // TODO(holmer): Remove once upstream projects no longer use this.
+ std::vector<BandwidthEstimationInfo> bw_estimations;
+ RtpCodecParametersMap send_codecs;
+ RtpCodecParametersMap receive_codecs;
+};
+
+struct DataMediaInfo {
+ void Clear() {
+ senders.clear();
+ receivers.clear();
+ }
+ std::vector<DataSenderInfo> senders;
+ std::vector<DataReceiverInfo> receivers;
+};
+
+struct RtcpParameters {
+ bool reduced_size = false;
+};
+
+template <class Codec>
+struct RtpParameters {
+ virtual std::string ToString() const {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "codecs: " << VectorToString(codecs) << ", ";
+ ost << "extensions: " << VectorToString(extensions);
+ ost << "}";
+ return ost.str();
+ }
+
+ std::vector<Codec> codecs;
+ std::vector<webrtc::RtpExtension> extensions;
+ // TODO(pthatcher): Add streams.
+ RtcpParameters rtcp;
+ virtual ~RtpParameters() = default;
+};
+
+// TODO(deadbeef): Rename to RtpSenderParameters, since they're intended to
+// encapsulate all the parameters needed for an RtpSender.
+template <class Codec>
+struct RtpSendParameters : RtpParameters<Codec> {
+ std::string ToString() const override {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "codecs: " << VectorToString(this->codecs) << ", ";
+ ost << "extensions: " << VectorToString(this->extensions) << ", ";
+ ost << "max_bandwidth_bps: " << max_bandwidth_bps << ", ";
+ ost << "}";
+ return ost.str();
+ }
+
+ int max_bandwidth_bps = -1;
+};
+
+struct AudioSendParameters : RtpSendParameters<AudioCodec> {
+ std::string ToString() const override {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "codecs: " << VectorToString(this->codecs) << ", ";
+ ost << "extensions: " << VectorToString(this->extensions) << ", ";
+ ost << "max_bandwidth_bps: " << max_bandwidth_bps << ", ";
+ ost << "options: " << options.ToString();
+ ost << "}";
+ return ost.str();
+ }
+
+ AudioOptions options;
+};
+
+struct AudioRecvParameters : RtpParameters<AudioCodec> {
+};
+
+class VoiceMediaChannel : public MediaChannel {
+ public:
+ enum Error {
+ ERROR_NONE = 0, // No error.
+ ERROR_OTHER, // Other errors.
+ ERROR_REC_DEVICE_OPEN_FAILED = 100, // Could not open mic.
+ ERROR_REC_DEVICE_MUTED, // Mic was muted by OS.
+ ERROR_REC_DEVICE_SILENT, // No background noise picked up.
+ ERROR_REC_DEVICE_SATURATION, // Mic input is clipping.
+ ERROR_REC_DEVICE_REMOVED, // Mic was removed while active.
+ ERROR_REC_RUNTIME_ERROR, // Processing is encountering errors.
+ ERROR_REC_SRTP_ERROR, // Generic SRTP failure.
+ ERROR_REC_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_REC_TYPING_NOISE_DETECTED, // Typing noise is detected.
+ ERROR_PLAY_DEVICE_OPEN_FAILED = 200, // Could not open playout.
+ ERROR_PLAY_DEVICE_MUTED, // Playout muted by OS.
+ ERROR_PLAY_DEVICE_REMOVED, // Playout removed while active.
+ ERROR_PLAY_RUNTIME_ERROR, // Errors in voice processing.
+ ERROR_PLAY_SRTP_ERROR, // Generic SRTP failure.
+ ERROR_PLAY_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_PLAY_SRTP_REPLAY, // Packet replay detected.
+ };
+
+ VoiceMediaChannel() {}
+ explicit VoiceMediaChannel(const MediaConfig& config)
+ : MediaChannel(config) {}
+ virtual ~VoiceMediaChannel() {}
+ virtual bool SetSendParameters(const AudioSendParameters& params) = 0;
+ virtual bool SetRecvParameters(const AudioRecvParameters& params) = 0;
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
+ virtual bool SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ // Get the receive parameters for the incoming stream identified by |ssrc|.
+ // If |ssrc| is 0, retrieve the receive parameters for the default receive
+ // stream, which is used when SSRCs are not signaled. Note that calling with
+ // an |ssrc| of 0 will return encoding parameters with an unset |ssrc|
+ // member.
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(
+ uint32_t ssrc) const = 0;
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ // Starts or stops playout of received audio.
+ virtual void SetPlayout(bool playout) = 0;
+ // Starts or stops sending (and potentially capture) of local audio.
+ virtual void SetSend(bool send) = 0;
+ // Configure stream for sending.
+ virtual bool SetAudioSend(uint32_t ssrc,
+ bool enable,
+ const AudioOptions* options,
+ AudioSource* source) = 0;
+ // Gets current energy levels for all incoming streams.
+ typedef std::vector<std::pair<uint32_t, int>> StreamList;
+ virtual bool GetActiveStreams(StreamList* actives) = 0;
+ // Get the current energy level of the stream sent to the speaker.
+ virtual int GetOutputLevel() = 0;
+ // Set speaker output volume of the specified ssrc.
+ virtual bool SetOutputVolume(uint32_t ssrc, double volume) = 0;
+ // Returns if the telephone-event has been negotiated.
+ virtual bool CanInsertDtmf() = 0;
+ // Send a DTMF |event|. The DTMF out-of-band signal will be used.
+ // The |ssrc| should be either 0 or a valid send stream ssrc.
+ // The valid value for the |event| are 0 to 15 which corresponding to
+ // DTMF event 0-9, *, #, A-D.
+ virtual bool InsertDtmf(uint32_t ssrc, int event, int duration) = 0;
+ // Gets quality stats for the channel.
+ virtual bool GetStats(VoiceMediaInfo* info) = 0;
+
+ virtual void SetRawAudioSink(
+ uint32_t ssrc,
+ std::unique_ptr<webrtc::AudioSinkInterface> sink) = 0;
+
+ virtual std::vector<webrtc::RtpSource> GetSources(uint32_t ssrc) const = 0;
+};
+
+// TODO(deadbeef): Rename to VideoSenderParameters, since they're intended to
+// encapsulate all the parameters needed for a video RtpSender.
+struct VideoSendParameters : RtpSendParameters<VideoCodec> {
+ // Use conference mode? This flag comes from the remote
+ // description's SDP line 'a=x-google-flag:conference', copied over
+ // by VideoChannel::SetRemoteContent_w, and ultimately used by
+ // conference mode screencast logic in
+ // WebRtcVideoChannel::WebRtcVideoSendStream::CreateVideoEncoderConfig.
+ // The special screencast behaviour is disabled by default.
+ bool conference_mode = false;
+};
+
+// TODO(deadbeef): Rename to VideoReceiverParameters, since they're intended to
+// encapsulate all the parameters needed for a video RtpReceiver.
+struct VideoRecvParameters : RtpParameters<VideoCodec> {
+};
+
+class VideoMediaChannel : public MediaChannel {
+ public:
+ enum Error {
+ ERROR_NONE = 0, // No error.
+ ERROR_OTHER, // Other errors.
+ ERROR_REC_DEVICE_OPEN_FAILED = 100, // Could not open camera.
+ ERROR_REC_DEVICE_NO_DEVICE, // No camera.
+ ERROR_REC_DEVICE_IN_USE, // Device is in already use.
+ ERROR_REC_DEVICE_REMOVED, // Device is removed.
+ ERROR_REC_SRTP_ERROR, // Generic sender SRTP failure.
+ ERROR_REC_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_REC_CPU_MAX_CANT_DOWNGRADE, // Can't downgrade capture anymore.
+ ERROR_PLAY_SRTP_ERROR = 200, // Generic receiver SRTP failure.
+ ERROR_PLAY_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_PLAY_SRTP_REPLAY, // Packet replay detected.
+ };
+
+ VideoMediaChannel() {}
+ explicit VideoMediaChannel(const MediaConfig& config)
+ : MediaChannel(config) {}
+ virtual ~VideoMediaChannel() {}
+
+ virtual bool SetSendParameters(const VideoSendParameters& params) = 0;
+ virtual bool SetRecvParameters(const VideoRecvParameters& params) = 0;
+ virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0;
+ virtual bool SetRtpSendParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ // Get the receive parameters for the incoming stream identified by |ssrc|.
+ // If |ssrc| is 0, retrieve the receive parameters for the default receive
+ // stream, which is used when SSRCs are not signaled. Note that calling with
+ // an |ssrc| of 0 will return encoding parameters with an unset |ssrc|
+ // member.
+ virtual webrtc::RtpParameters GetRtpReceiveParameters(
+ uint32_t ssrc) const = 0;
+ virtual bool SetRtpReceiveParameters(
+ uint32_t ssrc,
+ const webrtc::RtpParameters& parameters) = 0;
+ // Gets the currently set codecs/payload types to be used for outgoing media.
+ virtual bool GetSendCodec(VideoCodec* send_codec) = 0;
+ // Starts or stops transmission (and potentially capture) of local video.
+ virtual bool SetSend(bool send) = 0;
+ // Configure stream for sending and register a source.
+ // The |ssrc| must correspond to a registered send stream.
+ virtual bool SetVideoSend(
+ uint32_t ssrc,
+ bool enable,
+ const VideoOptions* options,
+ rtc::VideoSourceInterface<webrtc::VideoFrame>* source) = 0;
+ // Sets the sink object to be used for the specified stream.
+ // If SSRC is 0, the sink is used for the 'default' stream.
+ virtual bool SetSink(uint32_t ssrc,
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) = 0;
+ // This fills the "bitrate parts" (rtx, video bitrate) of the
+ // BandwidthEstimationInfo, since that part that isn't possible to get
+ // through webrtc::Call::GetStats, as they are statistics of the send
+ // streams.
+ // TODO(holmer): We should change this so that either BWE graphs doesn't
+ // need access to bitrates of the streams, or change the (RTC)StatsCollector
+ // so that it's getting the send stream stats separately by calling
+ // GetStats(), and merges with BandwidthEstimationInfo by itself.
+ virtual void FillBitrateInfo(BandwidthEstimationInfo* bwe_info) = 0;
+ // Gets quality stats for the channel.
+ virtual bool GetStats(VideoMediaInfo* info) = 0;
+};
+
+enum DataMessageType {
+ // Chrome-Internal use only. See SctpDataMediaChannel for the actual PPID
+ // values.
+ DMT_NONE = 0,
+ DMT_CONTROL = 1,
+ DMT_BINARY = 2,
+ DMT_TEXT = 3,
+};
+
+// Info about data received in DataMediaChannel. For use in
+// DataMediaChannel::SignalDataReceived and in all of the signals that
+// signal fires, on up the chain.
+struct ReceiveDataParams {
+ // The in-packet stream indentifier.
+ // RTP data channels use SSRCs, SCTP data channels use SIDs.
+ union {
+ uint32_t ssrc;
+ int sid;
+ };
+ // The type of message (binary, text, or control).
+ DataMessageType type;
+ // A per-stream value incremented per packet in the stream.
+ int seq_num;
+ // A per-stream value monotonically increasing with time.
+ int timestamp;
+
+ ReceiveDataParams() : sid(0), type(DMT_TEXT), seq_num(0), timestamp(0) {}
+};
+
+struct SendDataParams {
+ // The in-packet stream indentifier.
+ // RTP data channels use SSRCs, SCTP data channels use SIDs.
+ union {
+ uint32_t ssrc;
+ int sid;
+ };
+ // The type of message (binary, text, or control).
+ DataMessageType type;
+
+ // For SCTP, whether to send messages flagged as ordered or not.
+ // If false, messages can be received out of order.
+ bool ordered;
+ // For SCTP, whether the messages are sent reliably or not.
+ // If false, messages may be lost.
+ bool reliable;
+ // For SCTP, if reliable == false, provide partial reliability by
+ // resending up to this many times. Either count or millis
+ // is supported, not both at the same time.
+ int max_rtx_count;
+ // For SCTP, if reliable == false, provide partial reliability by
+ // resending for up to this many milliseconds. Either count or millis
+ // is supported, not both at the same time.
+ int max_rtx_ms;
+
+ SendDataParams()
+ : sid(0),
+ type(DMT_TEXT),
+ // TODO(pthatcher): Make these true by default?
+ ordered(false),
+ reliable(false),
+ max_rtx_count(0),
+ max_rtx_ms(0) {}
+};
+
+enum SendDataResult { SDR_SUCCESS, SDR_ERROR, SDR_BLOCK };
+
+struct DataSendParameters : RtpSendParameters<DataCodec> {
+ std::string ToString() const {
+ std::ostringstream ost;
+ // Options and extensions aren't used.
+ ost << "{";
+ ost << "codecs: " << VectorToString(codecs) << ", ";
+ ost << "max_bandwidth_bps: " << max_bandwidth_bps;
+ ost << "}";
+ return ost.str();
+ }
+};
+
+struct DataRecvParameters : RtpParameters<DataCodec> {
+};
+
+class DataMediaChannel : public MediaChannel {
+ public:
+ enum Error {
+ ERROR_NONE = 0, // No error.
+ ERROR_OTHER, // Other errors.
+ ERROR_SEND_SRTP_ERROR = 200, // Generic SRTP failure.
+ ERROR_SEND_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_RECV_SRTP_ERROR, // Generic SRTP failure.
+ ERROR_RECV_SRTP_AUTH_FAILED, // Failed to authenticate packets.
+ ERROR_RECV_SRTP_REPLAY, // Packet replay detected.
+ };
+
+ DataMediaChannel() {}
+ explicit DataMediaChannel(const MediaConfig& config) : MediaChannel(config) {}
+ virtual ~DataMediaChannel() {}
+
+ virtual bool SetSendParameters(const DataSendParameters& params) = 0;
+ virtual bool SetRecvParameters(const DataRecvParameters& params) = 0;
+
+ // TODO(pthatcher): Implement this.
+ virtual bool GetStats(DataMediaInfo* info) { return true; }
+
+ virtual bool SetSend(bool send) = 0;
+ virtual bool SetReceive(bool receive) = 0;
+
+ virtual void OnNetworkRouteChanged(const std::string& transport_name,
+ const rtc::NetworkRoute& network_route) {}
+
+ virtual bool SendData(
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ SendDataResult* result = NULL) = 0;
+ // Signals when data is received (params, data, len)
+ sigslot::signal3<const ReceiveDataParams&,
+ const char*,
+ size_t> SignalDataReceived;
+ // Signal when the media channel is ready to send the stream. Arguments are:
+ // writable(bool)
+ sigslot::signal1<bool> SignalReadyToSend;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_MEDIACHANNEL_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/mediaconstants.cc b/third_party/libwebrtc/webrtc/media/base/mediaconstants.cc
new file mode 100644
index 0000000000..06c172c07c
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediaconstants.cc
@@ -0,0 +1,113 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/mediaconstants.h"
+
+#include <string>
+
+namespace cricket {
+
+const int kVideoCodecClockrate = 90000;
+const int kDataCodecClockrate = 90000;
+const int kDataMaxBandwidth = 30720; // bps
+
+const float kHighSystemCpuThreshold = 0.85f;
+const float kLowSystemCpuThreshold = 0.65f;
+const float kProcessCpuThreshold = 0.10f;
+
+const char kRtxCodecName[] = "rtx";
+const char kRedCodecName[] = "red";
+const char kUlpfecCodecName[] = "ulpfec";
+
+// TODO(brandtr): Change this to 'flexfec' when we are confident that the
+// header format is not changing anymore.
+const char kFlexfecCodecName[] = "flexfec-03";
+
+// draft-ietf-payload-flexible-fec-scheme-02.txt
+const char kFlexfecFmtpRepairWindow[] = "repair-window";
+
+const char kCodecParamAssociatedPayloadType[] = "apt";
+
+const char kOpusCodecName[] = "opus";
+const char kIsacCodecName[] = "ISAC";
+const char kL16CodecName[] = "L16";
+const char kG722CodecName[] = "G722";
+const char kIlbcCodecName[] = "ILBC";
+const char kPcmuCodecName[] = "PCMU";
+const char kPcmaCodecName[] = "PCMA";
+const char kCnCodecName[] = "CN";
+const char kDtmfCodecName[] = "telephone-event";
+
+// draft-spittka-payload-rtp-opus-03.txt
+const char kCodecParamPTime[] = "ptime";
+const char kCodecParamMaxPTime[] = "maxptime";
+const char kCodecParamMinPTime[] = "minptime";
+const char kCodecParamSPropStereo[] = "sprop-stereo";
+const char kCodecParamStereo[] = "stereo";
+const char kCodecParamUseInbandFec[] = "useinbandfec";
+const char kCodecParamUseDtx[] = "usedtx";
+const char kCodecParamMaxAverageBitrate[] = "maxaveragebitrate";
+const char kCodecParamMaxPlaybackRate[] = "maxplaybackrate";
+
+const char kCodecParamSctpProtocol[] = "protocol";
+const char kCodecParamSctpStreams[] = "streams";
+
+const char kParamValueTrue[] = "1";
+const char kParamValueEmpty[] = "";
+
+const int kOpusDefaultMaxPTime = 120;
+const int kOpusDefaultPTime = 20;
+const int kOpusDefaultMinPTime = 3;
+const int kOpusDefaultSPropStereo = 0;
+const int kOpusDefaultStereo = 0;
+const int kOpusDefaultUseInbandFec = 0;
+const int kOpusDefaultUseDtx = 0;
+const int kOpusDefaultMaxPlaybackRate = 48000;
+
+const int kPreferredMaxPTime = 120;
+const int kPreferredMinPTime = 10;
+const int kPreferredSPropStereo = 0;
+const int kPreferredStereo = 0;
+const int kPreferredUseInbandFec = 0;
+
+const char kRtcpFbParamNack[] = "nack";
+const char kRtcpFbNackParamPli[] = "pli";
+const char kRtcpFbParamRemb[] = "goog-remb";
+const char kRtcpFbParamTransportCc[] = "transport-cc";
+
+const char kRtcpFbParamCcm[] = "ccm";
+const char kRtcpFbCcmParamFir[] = "fir";
+const char kCodecParamMaxBitrate[] = "x-google-max-bitrate";
+const char kCodecParamMinBitrate[] = "x-google-min-bitrate";
+const char kCodecParamStartBitrate[] = "x-google-start-bitrate";
+const char kCodecParamMaxQuantization[] = "x-google-max-quantization";
+const char kCodecParamPort[] = "x-google-port";
+
+const int kGoogleRtpDataCodecPlType = 109;
+const char kGoogleRtpDataCodecName[] = "google-data";
+
+const int kGoogleSctpDataCodecPlType = 108;
+const char kGoogleSctpDataCodecName[] = "google-sctp-data";
+
+const char kComfortNoiseCodecName[] = "CN";
+
+const char kVp8CodecName[] = "VP8";
+const char kVp9CodecName[] = "VP9";
+const char kH264CodecName[] = "H264";
+
+// RFC 6184 RTP Payload Format for H.264 video
+const char kH264FmtpProfileLevelId[] = "profile-level-id";
+const char kH264FmtpLevelAsymmetryAllowed[] = "level-asymmetry-allowed";
+const char kH264FmtpPacketizationMode[] = "packetization-mode";
+const char kH264FmtpSpropParameterSets[] = "sprop-parameter-sets";
+const char kH264ProfileLevelConstrainedBaseline[] = "42e01f";
+
+const int kDefaultVideoMaxFramerate = 60;
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/mediaconstants.h b/third_party/libwebrtc/webrtc/media/base/mediaconstants.h
new file mode 100644
index 0000000000..106fad0cf4
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediaconstants.h
@@ -0,0 +1,137 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_MEDIACONSTANTS_H_
+#define MEDIA_BASE_MEDIACONSTANTS_H_
+
+#include <string>
+
+// This file contains constants related to media.
+
+namespace cricket {
+
+extern const int kVideoCodecClockrate;
+extern const int kDataCodecClockrate;
+extern const int kDataMaxBandwidth; // bps
+
+// Default CPU thresholds.
+extern const float kHighSystemCpuThreshold;
+extern const float kLowSystemCpuThreshold;
+extern const float kProcessCpuThreshold;
+
+extern const char kRtxCodecName[];
+extern const char kRedCodecName[];
+extern const char kUlpfecCodecName[];
+extern const char kFlexfecCodecName[];
+
+extern const char kFlexfecFmtpRepairWindow[];
+
+// Codec parameters
+extern const char kCodecParamAssociatedPayloadType[];
+
+extern const char kOpusCodecName[];
+extern const char kIsacCodecName[];
+extern const char kL16CodecName[];
+extern const char kG722CodecName[];
+extern const char kIlbcCodecName[];
+extern const char kPcmuCodecName[];
+extern const char kPcmaCodecName[];
+extern const char kCnCodecName[];
+extern const char kDtmfCodecName[];
+
+// Attribute parameters
+extern const char kCodecParamPTime[];
+extern const char kCodecParamMaxPTime[];
+// fmtp parameters
+extern const char kCodecParamMinPTime[];
+extern const char kCodecParamSPropStereo[];
+extern const char kCodecParamStereo[];
+extern const char kCodecParamUseInbandFec[];
+extern const char kCodecParamUseDtx[];
+extern const char kCodecParamMaxAverageBitrate[];
+extern const char kCodecParamMaxPlaybackRate[];
+extern const char kCodecParamSctpProtocol[];
+extern const char kCodecParamSctpStreams[];
+
+extern const char kParamValueTrue[];
+// Parameters are stored as parameter/value pairs. For parameters who do not
+// have a value, |kParamValueEmpty| should be used as value.
+extern const char kParamValueEmpty[];
+
+// opus parameters.
+// Default value for maxptime according to
+// http://tools.ietf.org/html/draft-spittka-payload-rtp-opus-03
+extern const int kOpusDefaultMaxPTime;
+extern const int kOpusDefaultPTime;
+extern const int kOpusDefaultMinPTime;
+extern const int kOpusDefaultSPropStereo;
+extern const int kOpusDefaultStereo;
+extern const int kOpusDefaultUseInbandFec;
+extern const int kOpusDefaultUseDtx;
+extern const int kOpusDefaultMaxPlaybackRate;
+
+// Prefered values in this code base. Note that they may differ from the default
+// values in http://tools.ietf.org/html/draft-spittka-payload-rtp-opus-03
+// Only frames larger or equal to 10 ms are currently supported in this code
+// base.
+extern const int kPreferredMaxPTime;
+extern const int kPreferredMinPTime;
+extern const int kPreferredSPropStereo;
+extern const int kPreferredStereo;
+extern const int kPreferredUseInbandFec;
+
+// rtcp-fb messages according to RFC 4585
+extern const char kRtcpFbParamNack[];
+extern const char kRtcpFbNackParamPli[];
+// rtcp-fb messages according to
+// http://tools.ietf.org/html/draft-alvestrand-rmcat-remb-00
+extern const char kRtcpFbParamRemb[];
+// rtcp-fb messages according to
+// https://tools.ietf.org/html/draft-holmer-rmcat-transport-wide-cc-extensions-01
+extern const char kRtcpFbParamTransportCc[];
+// ccm submessages according to RFC 5104
+extern const char kRtcpFbParamCcm[];
+extern const char kRtcpFbCcmParamFir[];
+// Google specific parameters
+extern const char kCodecParamMaxBitrate[];
+extern const char kCodecParamMinBitrate[];
+extern const char kCodecParamStartBitrate[];
+extern const char kCodecParamMaxQuantization[];
+extern const char kCodecParamPort[];
+
+// We put the data codec names here so callers of DataEngine::CreateChannel
+// don't have to import rtpdataengine.h to get the codec names they want to
+// pass in.
+extern const int kGoogleRtpDataCodecPlType;
+extern const char kGoogleRtpDataCodecName[];
+
+// TODO(pthatcher): Find an id that won't conflict with anything. On
+// the other hand, it really shouldn't matter since the id won't be
+// used on the wire.
+extern const int kGoogleSctpDataCodecPlType;
+extern const char kGoogleSctpDataCodecName[];
+
+extern const char kComfortNoiseCodecName[];
+
+extern const char kVp8CodecName[];
+extern const char kVp9CodecName[];
+extern const char kH264CodecName[];
+
+// RFC 6184 RTP Payload Format for H.264 video
+extern const char kH264FmtpProfileLevelId[];
+extern const char kH264FmtpLevelAsymmetryAllowed[];
+extern const char kH264FmtpPacketizationMode[];
+extern const char kH264FmtpSpropParameterSets[];
+extern const char kH264ProfileLevelConstrainedBaseline[];
+
+extern const int kDefaultVideoMaxFramerate;
+} // namespace cricket
+
+#endif // MEDIA_BASE_MEDIACONSTANTS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/mediaengine.cc b/third_party/libwebrtc/webrtc/media/base/mediaengine.cc
new file mode 100644
index 0000000000..281ddbb76f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediaengine.cc
@@ -0,0 +1,40 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/mediaengine.h"
+
+#if !defined(DISABLE_MEDIA_ENGINE_FACTORY)
+
+namespace cricket {
+
+MediaEngineFactory::MediaEngineCreateFunction
+ MediaEngineFactory::create_function_ = NULL;
+
+MediaEngineFactory::MediaEngineCreateFunction
+ MediaEngineFactory::SetCreateFunction(MediaEngineCreateFunction function) {
+ MediaEngineCreateFunction old_function = create_function_;
+ create_function_ = function;
+ return old_function;
+}
+
+}; // namespace cricket
+
+#endif // DISABLE_MEDIA_ENGINE_FACTORY
+
+namespace cricket {
+
+webrtc::RtpParameters CreateRtpParametersWithOneEncoding() {
+ webrtc::RtpParameters parameters;
+ webrtc::RtpEncodingParameters encoding;
+ parameters.encodings.push_back(encoding);
+ return parameters;
+}
+
+}; // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/mediaengine.h b/third_party/libwebrtc/webrtc/media/base/mediaengine.h
new file mode 100644
index 0000000000..483a96b7e8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/mediaengine.h
@@ -0,0 +1,188 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_MEDIAENGINE_H_
+#define MEDIA_BASE_MEDIAENGINE_H_
+
+#if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS)
+#include <CoreAudio/CoreAudio.h>
+#endif
+
+#include <string>
+#include <tuple>
+#include <utility>
+#include <vector>
+
+#include "api/audio_codecs/audio_decoder_factory.h"
+#include "api/audio_codecs/audio_encoder_factory.h"
+#include "api/rtpparameters.h"
+#include "call/audio_state.h"
+#include "media/base/codec.h"
+#include "media/base/mediachannel.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/platform_file.h"
+
+#if defined(GOOGLE_CHROME_BUILD) || defined(CHROMIUM_BUILD)
+#define DISABLE_MEDIA_ENGINE_FACTORY
+#endif
+
+namespace webrtc {
+class AudioDeviceModule;
+class AudioMixer;
+class AudioProcessing;
+class Call;
+}
+
+namespace cricket {
+
+struct RtpCapabilities {
+ std::vector<webrtc::RtpExtension> header_extensions;
+};
+
+// MediaEngineInterface is an abstraction of a media engine which can be
+// subclassed to support different media componentry backends.
+// It supports voice and video operations in the same class to facilitate
+// proper synchronization between both media types.
+class MediaEngineInterface {
+ public:
+ virtual ~MediaEngineInterface() {}
+
+ // Initialization
+ // Starts the engine.
+ virtual bool Init() = 0;
+ // TODO(solenberg): Remove once VoE API refactoring is done.
+ virtual rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const = 0;
+
+ // MediaChannel creation
+ // Creates a voice media channel. Returns NULL on failure.
+ virtual VoiceMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const AudioOptions& options) = 0;
+ // Creates a video media channel, paired with the specified voice channel.
+ // Returns NULL on failure.
+ virtual VideoMediaChannel* CreateVideoChannel(
+ webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options) = 0;
+
+ // Gets the current microphone level, as a value between 0 and 10.
+ virtual int GetInputLevel() = 0;
+
+ virtual const std::vector<AudioCodec>& audio_send_codecs() = 0;
+ virtual const std::vector<AudioCodec>& audio_recv_codecs() = 0;
+ virtual RtpCapabilities GetAudioCapabilities() = 0;
+ virtual std::vector<VideoCodec> video_codecs() = 0;
+ virtual RtpCapabilities GetVideoCapabilities() = 0;
+
+ // Starts AEC dump using existing file, a maximum file size in bytes can be
+ // specified. Logging is stopped just before the size limit is exceeded.
+ // If max_size_bytes is set to a value <= 0, no limit will be used.
+ virtual bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) = 0;
+
+ // Stops recording AEC dump.
+ virtual void StopAecDump() = 0;
+};
+
+
+#if !defined(DISABLE_MEDIA_ENGINE_FACTORY)
+class MediaEngineFactory {
+ public:
+ typedef cricket::MediaEngineInterface* (*MediaEngineCreateFunction)();
+ // Creates a media engine, using either the compiled system default or the
+ // creation function specified in SetCreateFunction, if specified.
+ static MediaEngineInterface* Create();
+ // Sets the function used when calling Create. If unset, the compiled system
+ // default will be used. Returns the old create function, or NULL if one
+ // wasn't set. Likewise, NULL can be used as the |function| parameter to
+ // reset to the default behavior.
+ static MediaEngineCreateFunction SetCreateFunction(
+ MediaEngineCreateFunction function);
+ private:
+ static MediaEngineCreateFunction create_function_;
+};
+#endif
+
+// CompositeMediaEngine constructs a MediaEngine from separate
+// voice and video engine classes.
+template <class VOICE, class VIDEO>
+class CompositeMediaEngine : public MediaEngineInterface {
+ public:
+ template <class... Args1, class... Args2>
+ CompositeMediaEngine(std::tuple<Args1...> first_args,
+ std::tuple<Args2...> second_args)
+ : engines_(std::piecewise_construct,
+ std::move(first_args),
+ std::move(second_args)) {}
+
+ virtual ~CompositeMediaEngine() {}
+ virtual bool Init() {
+ voice().Init();
+ return true;
+ }
+
+ virtual rtc::scoped_refptr<webrtc::AudioState> GetAudioState() const {
+ return voice().GetAudioState();
+ }
+ virtual VoiceMediaChannel* CreateChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const AudioOptions& options) {
+ return voice().CreateChannel(call, config, options);
+ }
+ virtual VideoMediaChannel* CreateVideoChannel(webrtc::Call* call,
+ const MediaConfig& config,
+ const VideoOptions& options) {
+ return video().CreateChannel(call, config, options);
+ }
+
+ virtual int GetInputLevel() { return voice().GetInputLevel(); }
+ virtual const std::vector<AudioCodec>& audio_send_codecs() {
+ return voice().send_codecs();
+ }
+ virtual const std::vector<AudioCodec>& audio_recv_codecs() {
+ return voice().recv_codecs();
+ }
+ virtual RtpCapabilities GetAudioCapabilities() {
+ return voice().GetCapabilities();
+ }
+ virtual std::vector<VideoCodec> video_codecs() { return video().codecs(); }
+ virtual RtpCapabilities GetVideoCapabilities() {
+ return video().GetCapabilities();
+ }
+
+ virtual bool StartAecDump(rtc::PlatformFile file, int64_t max_size_bytes) {
+ return voice().StartAecDump(file, max_size_bytes);
+ }
+
+ virtual void StopAecDump() { voice().StopAecDump(); }
+
+ protected:
+ VOICE& voice() { return engines_.first; }
+ VIDEO& video() { return engines_.second; }
+ const VOICE& voice() const { return engines_.first; }
+ const VIDEO& video() const { return engines_.second; }
+
+ private:
+ std::pair<VOICE, VIDEO> engines_;
+};
+
+enum DataChannelType { DCT_NONE = 0, DCT_RTP = 1, DCT_SCTP = 2 };
+
+class DataEngineInterface {
+ public:
+ virtual ~DataEngineInterface() {}
+ virtual DataMediaChannel* CreateChannel(const MediaConfig& config) = 0;
+ virtual const std::vector<DataCodec>& data_codecs() = 0;
+};
+
+webrtc::RtpParameters CreateRtpParametersWithOneEncoding();
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_MEDIAENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/rtpdataengine.cc b/third_party/libwebrtc/webrtc/media/base/rtpdataengine.cc
new file mode 100644
index 0000000000..7cb5fa8585
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtpdataengine.cc
@@ -0,0 +1,355 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/rtpdataengine.h"
+
+#include <map>
+
+#include "media/base/codec.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/rtputils.h"
+#include "media/base/streamparams.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/ratelimiter.h"
+#include "rtc_base/sanitizer.h"
+#include "rtc_base/stringutils.h"
+
+namespace cricket {
+
+// We want to avoid IP fragmentation.
+static const size_t kDataMaxRtpPacketLen = 1200U;
+// We reserve space after the RTP header for future wiggle room.
+static const unsigned char kReservedSpace[] = {
+ 0x00, 0x00, 0x00, 0x00
+};
+
+// Amount of overhead SRTP may take. We need to leave room in the
+// buffer for it, otherwise SRTP will fail later. If SRTP ever uses
+// more than this, we need to increase this number.
+static const size_t kMaxSrtpHmacOverhead = 16;
+
+RtpDataEngine::RtpDataEngine() {
+ data_codecs_.push_back(
+ DataCodec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName));
+}
+
+DataMediaChannel* RtpDataEngine::CreateChannel(
+ const MediaConfig& config) {
+ return new RtpDataMediaChannel(config);
+}
+
+static const DataCodec* FindCodecByName(const std::vector<DataCodec>& codecs,
+ const std::string& name) {
+ for (const DataCodec& codec : codecs) {
+ if (_stricmp(name.c_str(), codec.name.c_str()) == 0)
+ return &codec;
+ }
+ return nullptr;
+}
+
+RtpDataMediaChannel::RtpDataMediaChannel(const MediaConfig& config)
+ : DataMediaChannel(config) {
+ Construct();
+}
+
+void RtpDataMediaChannel::Construct() {
+ sending_ = false;
+ receiving_ = false;
+ send_limiter_.reset(new rtc::RateLimiter(kDataMaxBandwidth / 8, 1.0));
+}
+
+
+RtpDataMediaChannel::~RtpDataMediaChannel() {
+ std::map<uint32_t, RtpClock*>::const_iterator iter;
+ for (iter = rtp_clock_by_send_ssrc_.begin();
+ iter != rtp_clock_by_send_ssrc_.end();
+ ++iter) {
+ delete iter->second;
+ }
+}
+
+void RTC_NO_SANITIZE("float-cast-overflow") // bugs.webrtc.org/8204
+RtpClock::Tick(double now, int* seq_num, uint32_t* timestamp) {
+ *seq_num = ++last_seq_num_;
+ *timestamp = timestamp_offset_ + static_cast<uint32_t>(now * clockrate_);
+ // UBSan: 5.92374e+10 is outside the range of representable values of type
+ // 'unsigned int'
+}
+
+const DataCodec* FindUnknownCodec(const std::vector<DataCodec>& codecs) {
+ DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName);
+ std::vector<DataCodec>::const_iterator iter;
+ for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
+ if (!iter->Matches(data_codec)) {
+ return &(*iter);
+ }
+ }
+ return NULL;
+}
+
+const DataCodec* FindKnownCodec(const std::vector<DataCodec>& codecs) {
+ DataCodec data_codec(kGoogleRtpDataCodecPlType, kGoogleRtpDataCodecName);
+ std::vector<DataCodec>::const_iterator iter;
+ for (iter = codecs.begin(); iter != codecs.end(); ++iter) {
+ if (iter->Matches(data_codec)) {
+ return &(*iter);
+ }
+ }
+ return NULL;
+}
+
+bool RtpDataMediaChannel::SetRecvCodecs(const std::vector<DataCodec>& codecs) {
+ const DataCodec* unknown_codec = FindUnknownCodec(codecs);
+ if (unknown_codec) {
+ RTC_LOG(LS_WARNING) << "Failed to SetRecvCodecs because of unknown codec: "
+ << unknown_codec->ToString();
+ return false;
+ }
+
+ recv_codecs_ = codecs;
+ return true;
+}
+
+bool RtpDataMediaChannel::SetSendCodecs(const std::vector<DataCodec>& codecs) {
+ const DataCodec* known_codec = FindKnownCodec(codecs);
+ if (!known_codec) {
+ RTC_LOG(LS_WARNING)
+ << "Failed to SetSendCodecs because there is no known codec.";
+ return false;
+ }
+
+ send_codecs_ = codecs;
+ return true;
+}
+
+bool RtpDataMediaChannel::SetSendParameters(const DataSendParameters& params) {
+ return (SetSendCodecs(params.codecs) &&
+ SetMaxSendBandwidth(params.max_bandwidth_bps));
+}
+
+bool RtpDataMediaChannel::SetRecvParameters(const DataRecvParameters& params) {
+ return SetRecvCodecs(params.codecs);
+}
+
+bool RtpDataMediaChannel::AddSendStream(const StreamParams& stream) {
+ if (!stream.has_ssrcs()) {
+ return false;
+ }
+
+ if (GetStreamBySsrc(send_streams_, stream.first_ssrc())) {
+ RTC_LOG(LS_WARNING) << "Not adding data send stream '" << stream.id
+ << "' with ssrc=" << stream.first_ssrc()
+ << " because stream already exists.";
+ return false;
+ }
+
+ send_streams_.push_back(stream);
+ // TODO(pthatcher): This should be per-stream, not per-ssrc.
+ // And we should probably allow more than one per stream.
+ rtp_clock_by_send_ssrc_[stream.first_ssrc()] = new RtpClock(
+ kDataCodecClockrate,
+ rtc::CreateRandomNonZeroId(), rtc::CreateRandomNonZeroId());
+
+ RTC_LOG(LS_INFO) << "Added data send stream '" << stream.id
+ << "' with ssrc=" << stream.first_ssrc();
+ return true;
+}
+
+bool RtpDataMediaChannel::RemoveSendStream(uint32_t ssrc) {
+ if (!GetStreamBySsrc(send_streams_, ssrc)) {
+ return false;
+ }
+
+ RemoveStreamBySsrc(&send_streams_, ssrc);
+ delete rtp_clock_by_send_ssrc_[ssrc];
+ rtp_clock_by_send_ssrc_.erase(ssrc);
+ return true;
+}
+
+bool RtpDataMediaChannel::AddRecvStream(const StreamParams& stream) {
+ if (!stream.has_ssrcs()) {
+ return false;
+ }
+
+ if (GetStreamBySsrc(recv_streams_, stream.first_ssrc())) {
+ RTC_LOG(LS_WARNING) << "Not adding data recv stream '" << stream.id
+ << "' with ssrc=" << stream.first_ssrc()
+ << " because stream already exists.";
+ return false;
+ }
+
+ recv_streams_.push_back(stream);
+ RTC_LOG(LS_INFO) << "Added data recv stream '" << stream.id
+ << "' with ssrc=" << stream.first_ssrc();
+ return true;
+}
+
+bool RtpDataMediaChannel::RemoveRecvStream(uint32_t ssrc) {
+ RemoveStreamBySsrc(&recv_streams_, ssrc);
+ return true;
+}
+
+void RtpDataMediaChannel::OnPacketReceived(
+ rtc::CopyOnWriteBuffer* packet, const rtc::PacketTime& packet_time) {
+ RtpHeader header;
+ if (!GetRtpHeader(packet->cdata(), packet->size(), &header)) {
+ // Don't want to log for every corrupt packet.
+ // RTC_LOG(LS_WARNING) << "Could not read rtp header from packet of length "
+ // << packet->length() << ".";
+ return;
+ }
+
+ size_t header_length;
+ if (!GetRtpHeaderLen(packet->cdata(), packet->size(), &header_length)) {
+ // Don't want to log for every corrupt packet.
+ // RTC_LOG(LS_WARNING) << "Could not read rtp header"
+ // << length from packet of length "
+ // << packet->length() << ".";
+ return;
+ }
+ const char* data =
+ packet->cdata<char>() + header_length + sizeof(kReservedSpace);
+ size_t data_len = packet->size() - header_length - sizeof(kReservedSpace);
+
+ if (!receiving_) {
+ RTC_LOG(LS_WARNING) << "Not receiving packet " << header.ssrc << ":"
+ << header.seq_num << " before SetReceive(true) called.";
+ return;
+ }
+
+ if (!FindCodecById(recv_codecs_, header.payload_type)) {
+ // For bundling, this will be logged for every message.
+ // So disable this logging.
+ // RTC_LOG(LS_WARNING) << "Not receiving packet "
+ // << header.ssrc << ":" << header.seq_num
+ // << " (" << data_len << ")"
+ // << " because unknown payload id: " << header.payload_type;
+ return;
+ }
+
+ if (!GetStreamBySsrc(recv_streams_, header.ssrc)) {
+ RTC_LOG(LS_WARNING) << "Received packet for unknown ssrc: " << header.ssrc;
+ return;
+ }
+
+ // Uncomment this for easy debugging.
+ // const auto* found_stream = GetStreamBySsrc(recv_streams_, header.ssrc);
+ // RTC_LOG(LS_INFO) << "Received packet"
+ // << " groupid=" << found_stream.groupid
+ // << ", ssrc=" << header.ssrc
+ // << ", seqnum=" << header.seq_num
+ // << ", timestamp=" << header.timestamp
+ // << ", len=" << data_len;
+
+ ReceiveDataParams params;
+ params.ssrc = header.ssrc;
+ params.seq_num = header.seq_num;
+ params.timestamp = header.timestamp;
+ SignalDataReceived(params, data, data_len);
+}
+
+bool RtpDataMediaChannel::SetMaxSendBandwidth(int bps) {
+ if (bps <= 0) {
+ bps = kDataMaxBandwidth;
+ }
+ send_limiter_.reset(new rtc::RateLimiter(bps / 8, 1.0));
+ RTC_LOG(LS_INFO) << "RtpDataMediaChannel::SetSendBandwidth to " << bps
+ << "bps.";
+ return true;
+}
+
+bool RtpDataMediaChannel::SendData(
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ SendDataResult* result) {
+ if (result) {
+ // If we return true, we'll set this to SDR_SUCCESS.
+ *result = SDR_ERROR;
+ }
+ if (!sending_) {
+ RTC_LOG(LS_WARNING) << "Not sending packet with ssrc=" << params.ssrc
+ << " len=" << payload.size()
+ << " before SetSend(true).";
+ return false;
+ }
+
+ if (params.type != cricket::DMT_TEXT) {
+ RTC_LOG(LS_WARNING)
+ << "Not sending data because binary type is unsupported.";
+ return false;
+ }
+
+ const StreamParams* found_stream =
+ GetStreamBySsrc(send_streams_, params.ssrc);
+ if (!found_stream) {
+ RTC_LOG(LS_WARNING) << "Not sending data because ssrc is unknown: "
+ << params.ssrc;
+ return false;
+ }
+
+ const DataCodec* found_codec =
+ FindCodecByName(send_codecs_, kGoogleRtpDataCodecName);
+ if (!found_codec) {
+ RTC_LOG(LS_WARNING) << "Not sending data because codec is unknown: "
+ << kGoogleRtpDataCodecName;
+ return false;
+ }
+
+ size_t packet_len = (kMinRtpPacketLen + sizeof(kReservedSpace) +
+ payload.size() + kMaxSrtpHmacOverhead);
+ if (packet_len > kDataMaxRtpPacketLen) {
+ return false;
+ }
+
+ double now =
+ rtc::TimeMicros() / static_cast<double>(rtc::kNumMicrosecsPerSec);
+
+ if (!send_limiter_->CanUse(packet_len, now)) {
+ RTC_LOG(LS_VERBOSE) << "Dropped data packet of len=" << packet_len
+ << "; already sent " << send_limiter_->used_in_period()
+ << "/" << send_limiter_->max_per_period();
+ return false;
+ }
+
+ RtpHeader header;
+ header.payload_type = found_codec->id;
+ header.ssrc = params.ssrc;
+ rtp_clock_by_send_ssrc_[header.ssrc]->Tick(
+ now, &header.seq_num, &header.timestamp);
+
+ rtc::CopyOnWriteBuffer packet(kMinRtpPacketLen, packet_len);
+ if (!SetRtpHeader(packet.data(), packet.size(), header)) {
+ return false;
+ }
+ packet.AppendData(kReservedSpace);
+ packet.AppendData(payload);
+
+ RTC_LOG(LS_VERBOSE) << "Sent RTP data packet: "
+ << " stream=" << found_stream->id
+ << " ssrc=" << header.ssrc
+ << ", seqnum=" << header.seq_num
+ << ", timestamp=" << header.timestamp
+ << ", len=" << payload.size();
+
+ MediaChannel::SendPacket(&packet, rtc::PacketOptions());
+ send_limiter_->Use(packet_len, now);
+ if (result) {
+ *result = SDR_SUCCESS;
+ }
+ return true;
+}
+
+rtc::DiffServCodePoint RtpDataMediaChannel::PreferredDscp() const {
+ return rtc::DSCP_AF41;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/rtpdataengine.h b/third_party/libwebrtc/webrtc/media/base/rtpdataengine.h
new file mode 100644
index 0000000000..64e083b0fd
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtpdataengine.h
@@ -0,0 +1,111 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_RTPDATAENGINE_H_
+#define MEDIA_BASE_RTPDATAENGINE_H_
+
+#include <map>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "media/base/mediachannel.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/mediaengine.h"
+
+namespace cricket {
+
+struct DataCodec;
+
+class RtpDataEngine : public DataEngineInterface {
+ public:
+ RtpDataEngine();
+
+ virtual DataMediaChannel* CreateChannel(const MediaConfig& config);
+
+ virtual const std::vector<DataCodec>& data_codecs() {
+ return data_codecs_;
+ }
+
+ private:
+ std::vector<DataCodec> data_codecs_;
+};
+
+// Keep track of sequence number and timestamp of an RTP stream. The
+// sequence number starts with a "random" value and increments. The
+// timestamp starts with a "random" value and increases monotonically
+// according to the clockrate.
+class RtpClock {
+ public:
+ RtpClock(int clockrate, uint16_t first_seq_num, uint32_t timestamp_offset)
+ : clockrate_(clockrate),
+ last_seq_num_(first_seq_num),
+ timestamp_offset_(timestamp_offset) {}
+
+ // Given the current time (in number of seconds which must be
+ // monotonically increasing), Return the next sequence number and
+ // timestamp.
+ void Tick(double now, int* seq_num, uint32_t* timestamp);
+
+ private:
+ int clockrate_;
+ uint16_t last_seq_num_;
+ uint32_t timestamp_offset_;
+};
+
+class RtpDataMediaChannel : public DataMediaChannel {
+ public:
+ explicit RtpDataMediaChannel(const MediaConfig& config);
+ virtual ~RtpDataMediaChannel();
+
+ virtual bool SetSendParameters(const DataSendParameters& params);
+ virtual bool SetRecvParameters(const DataRecvParameters& params);
+ virtual bool AddSendStream(const StreamParams& sp);
+ virtual bool RemoveSendStream(uint32_t ssrc);
+ virtual bool AddRecvStream(const StreamParams& sp);
+ virtual bool RemoveRecvStream(uint32_t ssrc);
+ virtual bool SetSend(bool send) {
+ sending_ = send;
+ return true;
+ }
+ virtual bool SetReceive(bool receive) {
+ receiving_ = receive;
+ return true;
+ }
+ virtual void OnPacketReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time);
+ virtual void OnRtcpReceived(rtc::CopyOnWriteBuffer* packet,
+ const rtc::PacketTime& packet_time) {}
+ virtual void OnReadyToSend(bool ready) {}
+ virtual bool SendData(
+ const SendDataParams& params,
+ const rtc::CopyOnWriteBuffer& payload,
+ SendDataResult* result);
+ virtual rtc::DiffServCodePoint PreferredDscp() const;
+
+ private:
+ void Construct();
+ bool SetMaxSendBandwidth(int bps);
+ bool SetSendCodecs(const std::vector<DataCodec>& codecs);
+ bool SetRecvCodecs(const std::vector<DataCodec>& codecs);
+
+ bool sending_;
+ bool receiving_;
+ std::vector<DataCodec> send_codecs_;
+ std::vector<DataCodec> recv_codecs_;
+ std::vector<StreamParams> send_streams_;
+ std::vector<StreamParams> recv_streams_;
+ std::map<uint32_t, RtpClock*> rtp_clock_by_send_ssrc_;
+ std::unique_ptr<rtc::RateLimiter> send_limiter_;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_RTPDATAENGINE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/rtpdataengine_unittest.cc b/third_party/libwebrtc/webrtc/media/base/rtpdataengine_unittest.cc
new file mode 100644
index 0000000000..a05c3de264
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtpdataengine_unittest.cc
@@ -0,0 +1,377 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory>
+#include <string>
+
+#include "media/base/fakenetworkinterface.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/rtpdataengine.h"
+#include "media/base/rtputils.h"
+#include "rtc_base/copyonwritebuffer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/helpers.h"
+#include "rtc_base/ssladapter.h"
+
+class FakeDataReceiver : public sigslot::has_slots<> {
+ public:
+ FakeDataReceiver() : has_received_data_(false) {}
+
+ void OnDataReceived(
+ const cricket::ReceiveDataParams& params,
+ const char* data, size_t len) {
+ has_received_data_ = true;
+ last_received_data_ = std::string(data, len);
+ last_received_data_len_ = len;
+ last_received_data_params_ = params;
+ }
+
+ bool has_received_data() const { return has_received_data_; }
+ std::string last_received_data() const { return last_received_data_; }
+ size_t last_received_data_len() const { return last_received_data_len_; }
+ cricket::ReceiveDataParams last_received_data_params() const {
+ return last_received_data_params_;
+ }
+
+ private:
+ bool has_received_data_;
+ std::string last_received_data_;
+ size_t last_received_data_len_;
+ cricket::ReceiveDataParams last_received_data_params_;
+};
+
+class RtpDataMediaChannelTest : public testing::Test {
+ protected:
+ virtual void SetUp() {
+ // Seed needed for each test to satisfy expectations.
+ iface_.reset(new cricket::FakeNetworkInterface());
+ dme_.reset(CreateEngine());
+ receiver_.reset(new FakeDataReceiver());
+ }
+
+ void SetNow(double now) {
+ clock_.SetTimeNanos(now * rtc::kNumNanosecsPerSec);
+ }
+
+ cricket::RtpDataEngine* CreateEngine() {
+ cricket::RtpDataEngine* dme = new cricket::RtpDataEngine();
+ return dme;
+ }
+
+ cricket::RtpDataMediaChannel* CreateChannel() {
+ return CreateChannel(dme_.get());
+ }
+
+ cricket::RtpDataMediaChannel* CreateChannel(cricket::RtpDataEngine* dme) {
+ cricket::MediaConfig config;
+ cricket::RtpDataMediaChannel* channel =
+ static_cast<cricket::RtpDataMediaChannel*>(dme->CreateChannel(config));
+ channel->SetInterface(iface_.get());
+ channel->SignalDataReceived.connect(
+ receiver_.get(), &FakeDataReceiver::OnDataReceived);
+ return channel;
+ }
+
+ FakeDataReceiver* receiver() {
+ return receiver_.get();
+ }
+
+ bool HasReceivedData() {
+ return receiver_->has_received_data();
+ }
+
+ std::string GetReceivedData() {
+ return receiver_->last_received_data();
+ }
+
+ size_t GetReceivedDataLen() {
+ return receiver_->last_received_data_len();
+ }
+
+ cricket::ReceiveDataParams GetReceivedDataParams() {
+ return receiver_->last_received_data_params();
+ }
+
+ bool HasSentData(int count) {
+ return (iface_->NumRtpPackets() > count);
+ }
+
+ std::string GetSentData(int index) {
+ // Assume RTP header of length 12
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> packet(
+ iface_->GetRtpPacket(index));
+ if (packet->size() > 12) {
+ return std::string(packet->data<char>() + 12, packet->size() - 12);
+ } else {
+ return "";
+ }
+ }
+
+ cricket::RtpHeader GetSentDataHeader(int index) {
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> packet(
+ iface_->GetRtpPacket(index));
+ cricket::RtpHeader header;
+ GetRtpHeader(packet->data(), packet->size(), &header);
+ return header;
+ }
+
+ private:
+ std::unique_ptr<cricket::RtpDataEngine> dme_;
+ rtc::ScopedFakeClock clock_;
+ std::unique_ptr<cricket::FakeNetworkInterface> iface_;
+ std::unique_ptr<FakeDataReceiver> receiver_;
+};
+
+TEST_F(RtpDataMediaChannelTest, SetUnknownCodecs) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ cricket::DataCodec known_codec;
+ known_codec.id = 103;
+ known_codec.name = "google-data";
+ cricket::DataCodec unknown_codec;
+ unknown_codec.id = 104;
+ unknown_codec.name = "unknown-data";
+
+ cricket::DataSendParameters send_parameters_known;
+ send_parameters_known.codecs.push_back(known_codec);
+ cricket::DataRecvParameters recv_parameters_known;
+ recv_parameters_known.codecs.push_back(known_codec);
+
+ cricket::DataSendParameters send_parameters_unknown;
+ send_parameters_unknown.codecs.push_back(unknown_codec);
+ cricket::DataRecvParameters recv_parameters_unknown;
+ recv_parameters_unknown.codecs.push_back(unknown_codec);
+
+ cricket::DataSendParameters send_parameters_mixed;
+ send_parameters_mixed.codecs.push_back(known_codec);
+ send_parameters_mixed.codecs.push_back(unknown_codec);
+ cricket::DataRecvParameters recv_parameters_mixed;
+ recv_parameters_mixed.codecs.push_back(known_codec);
+ recv_parameters_mixed.codecs.push_back(unknown_codec);
+
+ EXPECT_TRUE(dmc->SetSendParameters(send_parameters_known));
+ EXPECT_FALSE(dmc->SetSendParameters(send_parameters_unknown));
+ EXPECT_TRUE(dmc->SetSendParameters(send_parameters_mixed));
+ EXPECT_TRUE(dmc->SetRecvParameters(recv_parameters_known));
+ EXPECT_FALSE(dmc->SetRecvParameters(recv_parameters_unknown));
+ EXPECT_FALSE(dmc->SetRecvParameters(recv_parameters_mixed));
+}
+
+TEST_F(RtpDataMediaChannelTest, AddRemoveSendStream) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ cricket::StreamParams stream1;
+ stream1.add_ssrc(41);
+ EXPECT_TRUE(dmc->AddSendStream(stream1));
+ cricket::StreamParams stream2;
+ stream2.add_ssrc(42);
+ EXPECT_TRUE(dmc->AddSendStream(stream2));
+
+ EXPECT_TRUE(dmc->RemoveSendStream(41));
+ EXPECT_TRUE(dmc->RemoveSendStream(42));
+ EXPECT_FALSE(dmc->RemoveSendStream(43));
+}
+
+TEST_F(RtpDataMediaChannelTest, AddRemoveRecvStream) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ cricket::StreamParams stream1;
+ stream1.add_ssrc(41);
+ EXPECT_TRUE(dmc->AddRecvStream(stream1));
+ cricket::StreamParams stream2;
+ stream2.add_ssrc(42);
+ EXPECT_TRUE(dmc->AddRecvStream(stream2));
+ EXPECT_FALSE(dmc->AddRecvStream(stream2));
+
+ EXPECT_TRUE(dmc->RemoveRecvStream(41));
+ EXPECT_TRUE(dmc->RemoveRecvStream(42));
+}
+
+TEST_F(RtpDataMediaChannelTest, SendData) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ cricket::SendDataParams params;
+ params.ssrc = 42;
+ unsigned char data[] = "food";
+ rtc::CopyOnWriteBuffer payload(data, 4);
+ unsigned char padded_data[] = {
+ 0x00, 0x00, 0x00, 0x00,
+ 'f', 'o', 'o', 'd',
+ };
+ cricket::SendDataResult result;
+
+ // Not sending
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+ EXPECT_EQ(cricket::SDR_ERROR, result);
+ EXPECT_FALSE(HasSentData(0));
+ ASSERT_TRUE(dmc->SetSend(true));
+
+ // Unknown stream name.
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+ EXPECT_EQ(cricket::SDR_ERROR, result);
+ EXPECT_FALSE(HasSentData(0));
+
+ cricket::StreamParams stream;
+ stream.add_ssrc(42);
+ ASSERT_TRUE(dmc->AddSendStream(stream));
+
+ // Unknown codec;
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+ EXPECT_EQ(cricket::SDR_ERROR, result);
+ EXPECT_FALSE(HasSentData(0));
+
+ cricket::DataCodec codec;
+ codec.id = 103;
+ codec.name = cricket::kGoogleRtpDataCodecName;
+ cricket::DataSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ ASSERT_TRUE(dmc->SetSendParameters(parameters));
+
+ // Length too large;
+ std::string x10000(10000, 'x');
+ EXPECT_FALSE(dmc->SendData(
+ params, rtc::CopyOnWriteBuffer(x10000.data(), x10000.length()), &result));
+ EXPECT_EQ(cricket::SDR_ERROR, result);
+ EXPECT_FALSE(HasSentData(0));
+
+ // Finally works!
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_EQ(cricket::SDR_SUCCESS, result);
+ ASSERT_TRUE(HasSentData(0));
+ EXPECT_EQ(sizeof(padded_data), GetSentData(0).length());
+ EXPECT_EQ(0, memcmp(
+ padded_data, GetSentData(0).data(), sizeof(padded_data)));
+ cricket::RtpHeader header0 = GetSentDataHeader(0);
+ EXPECT_NE(0, header0.seq_num);
+ EXPECT_NE(0U, header0.timestamp);
+ EXPECT_EQ(header0.ssrc, 42U);
+ EXPECT_EQ(header0.payload_type, 103);
+
+ // Should bump timestamp by 180000 because the clock rate is 90khz.
+ SetNow(2);
+
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ ASSERT_TRUE(HasSentData(1));
+ EXPECT_EQ(sizeof(padded_data), GetSentData(1).length());
+ EXPECT_EQ(0, memcmp(
+ padded_data, GetSentData(1).data(), sizeof(padded_data)));
+ cricket::RtpHeader header1 = GetSentDataHeader(1);
+ EXPECT_EQ(header1.ssrc, 42U);
+ EXPECT_EQ(header1.payload_type, 103);
+ EXPECT_EQ(static_cast<uint16_t>(header0.seq_num + 1),
+ static_cast<uint16_t>(header1.seq_num));
+ EXPECT_EQ(header0.timestamp + 180000, header1.timestamp);
+}
+
+TEST_F(RtpDataMediaChannelTest, SendDataRate) {
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ ASSERT_TRUE(dmc->SetSend(true));
+
+ cricket::DataCodec codec;
+ codec.id = 103;
+ codec.name = cricket::kGoogleRtpDataCodecName;
+ cricket::DataSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ ASSERT_TRUE(dmc->SetSendParameters(parameters));
+
+ cricket::StreamParams stream;
+ stream.add_ssrc(42);
+ ASSERT_TRUE(dmc->AddSendStream(stream));
+
+ cricket::SendDataParams params;
+ params.ssrc = 42;
+ unsigned char data[] = "food";
+ rtc::CopyOnWriteBuffer payload(data, 4);
+ cricket::SendDataResult result;
+
+ // With rtp overhead of 32 bytes, each one of our packets is 36
+ // bytes, or 288 bits. So, a limit of 872bps will allow 3 packets,
+ // but not four.
+ parameters.max_bandwidth_bps = 872;
+ ASSERT_TRUE(dmc->SetSendParameters(parameters));
+
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+
+ SetNow(0.9);
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+
+ SetNow(1.1);
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ SetNow(1.9);
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+
+ SetNow(2.2);
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_TRUE(dmc->SendData(params, payload, &result));
+ EXPECT_FALSE(dmc->SendData(params, payload, &result));
+}
+
+TEST_F(RtpDataMediaChannelTest, ReceiveData) {
+ // PT= 103, SN=2, TS=3, SSRC = 4, data = "abcde"
+ unsigned char data[] = {
+ 0x80, 0x67, 0x00, 0x02, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x2A,
+ 0x00, 0x00, 0x00, 0x00,
+ 'a', 'b', 'c', 'd', 'e'
+ };
+ rtc::CopyOnWriteBuffer packet(data, sizeof(data));
+
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ // SetReceived not called.
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_FALSE(HasReceivedData());
+
+ dmc->SetReceive(true);
+
+ // Unknown payload id
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_FALSE(HasReceivedData());
+
+ cricket::DataCodec codec;
+ codec.id = 103;
+ codec.name = cricket::kGoogleRtpDataCodecName;
+ cricket::DataRecvParameters parameters;
+ parameters.codecs.push_back(codec);
+ ASSERT_TRUE(dmc->SetRecvParameters(parameters));
+
+ // Unknown stream
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_FALSE(HasReceivedData());
+
+ cricket::StreamParams stream;
+ stream.add_ssrc(42);
+ ASSERT_TRUE(dmc->AddRecvStream(stream));
+
+ // Finally works!
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_TRUE(HasReceivedData());
+ EXPECT_EQ("abcde", GetReceivedData());
+ EXPECT_EQ(5U, GetReceivedDataLen());
+}
+
+TEST_F(RtpDataMediaChannelTest, InvalidRtpPackets) {
+ unsigned char data[] = {
+ 0x80, 0x65, 0x00, 0x02
+ };
+ rtc::CopyOnWriteBuffer packet(data, sizeof(data));
+
+ std::unique_ptr<cricket::RtpDataMediaChannel> dmc(CreateChannel());
+
+ // Too short
+ dmc->OnPacketReceived(&packet, rtc::PacketTime());
+ EXPECT_FALSE(HasReceivedData());
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/rtputils.cc b/third_party/libwebrtc/webrtc/media/base/rtputils.cc
new file mode 100644
index 0000000000..d0ba1cf72b
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtputils.cc
@@ -0,0 +1,473 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/rtputils.h"
+
+// PacketTimeUpdateParams is defined in asyncpacketsocket.h.
+// TODO(sergeyu): Find more appropriate place for PacketTimeUpdateParams.
+#include "media/base/turnutils.h"
+#include "rtc_base/asyncpacketsocket.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/messagedigest.h"
+
+namespace cricket {
+
+static const uint8_t kRtpVersion = 2;
+static const size_t kRtpFlagsOffset = 0;
+static const size_t kRtpPayloadTypeOffset = 1;
+static const size_t kRtpSeqNumOffset = 2;
+static const size_t kRtpTimestampOffset = 4;
+static const size_t kRtpSsrcOffset = 8;
+static const size_t kRtcpPayloadTypeOffset = 1;
+static const size_t kRtpExtensionHeaderLen = 4;
+static const size_t kAbsSendTimeExtensionLen = 3;
+static const size_t kOneByteExtensionHeaderLen = 1;
+
+namespace {
+
+// Fake auth tag written by the sender when external authentication is enabled.
+// HMAC in packet will be compared against this value before updating packet
+// with actual HMAC value.
+static const uint8_t kFakeAuthTag[10] = {
+ 0xba, 0xdd, 0xba, 0xdd, 0xba, 0xdd, 0xba, 0xdd, 0xba, 0xdd
+};
+
+void UpdateAbsSendTimeExtensionValue(uint8_t* extension_data,
+ size_t length,
+ uint64_t time_us) {
+ // Absolute send time in RTP streams.
+ //
+ // The absolute send time is signaled to the receiver in-band using the
+ // general mechanism for RTP header extensions [RFC5285]. The payload
+ // of this extension (the transmitted value) is a 24-bit unsigned integer
+ // containing the sender's current time in seconds as a fixed point number
+ // with 18 bits fractional part.
+ //
+ // The form of the absolute send time extension block:
+ //
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | len=2 | absolute send time |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ if (length != kAbsSendTimeExtensionLen) {
+ RTC_NOTREACHED();
+ return;
+ }
+
+ // Convert microseconds to a 6.18 fixed point value in seconds.
+ uint32_t send_time = ((time_us << 18) / 1000000) & 0x00FFFFFF;
+ extension_data[0] = static_cast<uint8_t>(send_time >> 16);
+ extension_data[1] = static_cast<uint8_t>(send_time >> 8);
+ extension_data[2] = static_cast<uint8_t>(send_time);
+}
+
+// Assumes |length| is actual packet length + tag length. Updates HMAC at end of
+// the RTP packet.
+void UpdateRtpAuthTag(uint8_t* rtp,
+ size_t length,
+ const rtc::PacketTimeUpdateParams& packet_time_params) {
+ // If there is no key, return.
+ if (packet_time_params.srtp_auth_key.empty()) {
+ return;
+ }
+
+ size_t tag_length = packet_time_params.srtp_auth_tag_len;
+
+ // ROC (rollover counter) is at the beginning of the auth tag.
+ const size_t kRocLength = 4;
+ if (tag_length < kRocLength || tag_length > length) {
+ RTC_NOTREACHED();
+ return;
+ }
+
+ uint8_t* auth_tag = rtp + (length - tag_length);
+
+ // We should have a fake HMAC value @ auth_tag.
+ RTC_DCHECK_EQ(0, memcmp(auth_tag, kFakeAuthTag, tag_length));
+
+ // Copy ROC after end of rtp packet.
+ memcpy(auth_tag, &packet_time_params.srtp_packet_index, kRocLength);
+ // Authentication of a RTP packet will have RTP packet + ROC size.
+ size_t auth_required_length = length - tag_length + kRocLength;
+
+ uint8_t output[64];
+ size_t result = rtc::ComputeHmac(
+ rtc::DIGEST_SHA_1, &packet_time_params.srtp_auth_key[0],
+ packet_time_params.srtp_auth_key.size(), rtp,
+ auth_required_length, output, sizeof(output));
+
+ if (result < tag_length) {
+ RTC_NOTREACHED();
+ return;
+ }
+
+ // Copy HMAC from output to packet. This is required as auth tag length
+ // may not be equal to the actual HMAC length.
+ memcpy(auth_tag, output, tag_length);
+}
+
+} // namespace
+
+bool GetUint8(const void* data, size_t offset, int* value) {
+ if (!data || !value) {
+ return false;
+ }
+ *value = *(static_cast<const uint8_t*>(data) + offset);
+ return true;
+}
+
+bool GetUint16(const void* data, size_t offset, int* value) {
+ if (!data || !value) {
+ return false;
+ }
+ *value = static_cast<int>(
+ rtc::GetBE16(static_cast<const uint8_t*>(data) + offset));
+ return true;
+}
+
+bool GetUint32(const void* data, size_t offset, uint32_t* value) {
+ if (!data || !value) {
+ return false;
+ }
+ *value = rtc::GetBE32(static_cast<const uint8_t*>(data) + offset);
+ return true;
+}
+
+bool SetUint8(void* data, size_t offset, uint8_t value) {
+ if (!data) {
+ return false;
+ }
+ rtc::Set8(data, offset, value);
+ return true;
+}
+
+bool SetUint16(void* data, size_t offset, uint16_t value) {
+ if (!data) {
+ return false;
+ }
+ rtc::SetBE16(static_cast<uint8_t*>(data) + offset, value);
+ return true;
+}
+
+bool SetUint32(void* data, size_t offset, uint32_t value) {
+ if (!data) {
+ return false;
+ }
+ rtc::SetBE32(static_cast<uint8_t*>(data) + offset, value);
+ return true;
+}
+
+bool GetRtpFlags(const void* data, size_t len, int* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ return GetUint8(data, kRtpFlagsOffset, value);
+}
+
+bool GetRtpPayloadType(const void* data, size_t len, int* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ if (!GetUint8(data, kRtpPayloadTypeOffset, value)) {
+ return false;
+ }
+ *value &= 0x7F;
+ return true;
+}
+
+bool GetRtpSeqNum(const void* data, size_t len, int* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ return GetUint16(data, kRtpSeqNumOffset, value);
+}
+
+bool GetRtpTimestamp(const void* data, size_t len, uint32_t* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ return GetUint32(data, kRtpTimestampOffset, value);
+}
+
+bool GetRtpSsrc(const void* data, size_t len, uint32_t* value) {
+ if (len < kMinRtpPacketLen) {
+ return false;
+ }
+ return GetUint32(data, kRtpSsrcOffset, value);
+}
+
+bool GetRtpHeaderLen(const void* data, size_t len, size_t* value) {
+ if (!data || len < kMinRtpPacketLen || !value) return false;
+ const uint8_t* header = static_cast<const uint8_t*>(data);
+ // Get base header size + length of CSRCs (not counting extension yet).
+ size_t header_size = kMinRtpPacketLen + (header[0] & 0xF) * sizeof(uint32_t);
+ if (len < header_size) return false;
+ // If there's an extension, read and add in the extension size.
+ if (header[0] & 0x10) {
+ if (len < header_size + sizeof(uint32_t))
+ return false;
+ header_size +=
+ ((rtc::GetBE16(header + header_size + 2) + 1) * sizeof(uint32_t));
+ if (len < header_size) return false;
+ }
+ *value = header_size;
+ return true;
+}
+
+bool GetRtpHeader(const void* data, size_t len, RtpHeader* header) {
+ return (GetRtpPayloadType(data, len, &(header->payload_type)) &&
+ GetRtpSeqNum(data, len, &(header->seq_num)) &&
+ GetRtpTimestamp(data, len, &(header->timestamp)) &&
+ GetRtpSsrc(data, len, &(header->ssrc)));
+}
+
+bool GetRtcpType(const void* data, size_t len, int* value) {
+ if (len < kMinRtcpPacketLen) {
+ return false;
+ }
+ return GetUint8(data, kRtcpPayloadTypeOffset, value);
+}
+
+// This method returns SSRC first of RTCP packet, except if packet is SDES.
+// TODO(mallinath) - Fully implement RFC 5506. This standard doesn't restrict
+// to send non-compound packets only to feedback messages.
+bool GetRtcpSsrc(const void* data, size_t len, uint32_t* value) {
+ // Packet should be at least of 8 bytes, to get SSRC from a RTCP packet.
+ if (!data || len < kMinRtcpPacketLen + 4 || !value) return false;
+ int pl_type;
+ if (!GetRtcpType(data, len, &pl_type)) return false;
+ // SDES packet parsing is not supported.
+ if (pl_type == kRtcpTypeSDES) return false;
+ *value = rtc::GetBE32(static_cast<const uint8_t*>(data) + 4);
+ return true;
+}
+
+bool SetRtpSsrc(void* data, size_t len, uint32_t value) {
+ return SetUint32(data, kRtpSsrcOffset, value);
+}
+
+// Assumes version 2, no padding, no extensions, no csrcs.
+bool SetRtpHeader(void* data, size_t len, const RtpHeader& header) {
+ if (!IsValidRtpPayloadType(header.payload_type) ||
+ header.seq_num < 0 || header.seq_num > UINT16_MAX) {
+ return false;
+ }
+ return (SetUint8(data, kRtpFlagsOffset, kRtpVersion << 6) &&
+ SetUint8(data, kRtpPayloadTypeOffset, header.payload_type & 0x7F) &&
+ SetUint16(data, kRtpSeqNumOffset,
+ static_cast<uint16_t>(header.seq_num)) &&
+ SetUint32(data, kRtpTimestampOffset, header.timestamp) &&
+ SetRtpSsrc(data, len, header.ssrc));
+}
+
+bool IsRtpPacket(const void* data, size_t len) {
+ if (len < kMinRtpPacketLen)
+ return false;
+
+ return (static_cast<const uint8_t*>(data)[0] >> 6) == kRtpVersion;
+}
+
+bool IsValidRtpPayloadType(int payload_type) {
+ return payload_type >= 0 && payload_type <= 127;
+}
+
+bool IsValidRtpRtcpPacketSize(bool rtcp, size_t size) {
+ return (rtcp ? size >= kMinRtcpPacketLen : size >= kMinRtpPacketLen) &&
+ size <= kMaxRtpPacketLen;
+}
+
+const char* RtpRtcpStringLiteral(bool rtcp) {
+ return rtcp ? "RTCP" : "RTP";
+}
+
+bool ValidateRtpHeader(const uint8_t* rtp,
+ size_t length,
+ size_t* header_length) {
+ if (header_length) {
+ *header_length = 0;
+ }
+
+ if (length < kMinRtpPacketLen) {
+ return false;
+ }
+
+ size_t cc_count = rtp[0] & 0x0F;
+ size_t header_length_without_extension = kMinRtpPacketLen + 4 * cc_count;
+ if (header_length_without_extension > length) {
+ return false;
+ }
+
+ // If extension bit is not set, we are done with header processing, as input
+ // length is verified above.
+ if (!(rtp[0] & 0x10)) {
+ if (header_length)
+ *header_length = header_length_without_extension;
+
+ return true;
+ }
+
+ rtp += header_length_without_extension;
+
+ if (header_length_without_extension + kRtpExtensionHeaderLen > length) {
+ return false;
+ }
+
+ // Getting extension profile length.
+ // Length is in 32 bit words.
+ uint16_t extension_length_in_32bits = rtc::GetBE16(rtp + 2);
+ size_t extension_length = extension_length_in_32bits * 4;
+
+ size_t rtp_header_length = extension_length +
+ header_length_without_extension +
+ kRtpExtensionHeaderLen;
+
+ // Verify input length against total header size.
+ if (rtp_header_length > length) {
+ return false;
+ }
+
+ if (header_length) {
+ *header_length = rtp_header_length;
+ }
+ return true;
+}
+
+// ValidateRtpHeader() must be called before this method to make sure, we have
+// a sane rtp packet.
+bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
+ size_t length,
+ int extension_id,
+ uint64_t time_us) {
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // |V=2|P|X| CC |M| PT | sequence number |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | timestamp |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | synchronization source (SSRC) identifier |
+ // +=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
+ // | contributing source (CSRC) identifiers |
+ // | .... |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+ // Return if extension bit is not set.
+ if (!(rtp[0] & 0x10)) {
+ return true;
+ }
+
+ size_t cc_count = rtp[0] & 0x0F;
+ size_t header_length_without_extension = kMinRtpPacketLen + 4 * cc_count;
+
+ rtp += header_length_without_extension;
+
+ // Getting extension profile ID and length.
+ uint16_t profile_id = rtc::GetBE16(rtp);
+ // Length is in 32 bit words.
+ uint16_t extension_length_in_32bits = rtc::GetBE16(rtp + 2);
+ size_t extension_length = extension_length_in_32bits * 4;
+
+ rtp += kRtpExtensionHeaderLen; // Moving past extension header.
+
+ bool found = false;
+ // WebRTC is using one byte header extension.
+ // TODO(mallinath) - Handle two byte header extension.
+ if (profile_id == 0xBEDE) { // OneByte extension header
+ // 0
+ // 0 1 2 3 4 5 6 7
+ // +-+-+-+-+-+-+-+-+
+ // | ID |length |
+ // +-+-+-+-+-+-+-+-+
+
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | 0xBE | 0xDE | length=3 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | ID | L=0 | data | ID | L=1 | data...
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // ...data | 0 (pad) | 0 (pad) | ID | L=3 |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | data |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ const uint8_t* extension_start = rtp;
+ const uint8_t* extension_end = extension_start + extension_length;
+
+ while (rtp < extension_end) {
+ const int id = (*rtp & 0xF0) >> 4;
+ const size_t length = (*rtp & 0x0F) + 1;
+ if (rtp + kOneByteExtensionHeaderLen + length > extension_end) {
+ return false;
+ }
+ // The 4-bit length is the number minus one of data bytes of this header
+ // extension element following the one-byte header.
+ if (id == extension_id) {
+ UpdateAbsSendTimeExtensionValue(rtp + kOneByteExtensionHeaderLen,
+ length, time_us);
+ found = true;
+ break;
+ }
+ rtp += kOneByteExtensionHeaderLen + length;
+ // Counting padding bytes.
+ while ((rtp < extension_end) && (*rtp == 0)) {
+ ++rtp;
+ }
+ }
+ }
+ return found;
+}
+
+bool ApplyPacketOptions(uint8_t* data,
+ size_t length,
+ const rtc::PacketTimeUpdateParams& packet_time_params,
+ uint64_t time_us) {
+ RTC_DCHECK(data);
+ RTC_DCHECK(length);
+
+ // if there is no valid |rtp_sendtime_extension_id| and |srtp_auth_key| in
+ // PacketOptions, nothing to be updated in this packet.
+ if (packet_time_params.rtp_sendtime_extension_id == -1 &&
+ packet_time_params.srtp_auth_key.empty()) {
+ return true;
+ }
+
+ // If there is a srtp auth key present then the packet must be an RTP packet.
+ // RTP packet may have been wrapped in a TURN Channel Data or TURN send
+ // indication.
+ size_t rtp_start_pos;
+ size_t rtp_length;
+ if (!UnwrapTurnPacket(data, length, &rtp_start_pos, &rtp_length)) {
+ RTC_NOTREACHED();
+ return false;
+ }
+
+ // Making sure we have a valid RTP packet at the end.
+ if (!IsRtpPacket(data + rtp_start_pos, rtp_length) ||
+ !ValidateRtpHeader(data + rtp_start_pos, rtp_length, nullptr)) {
+ RTC_NOTREACHED();
+ return false;
+ }
+
+ uint8_t* start = data + rtp_start_pos;
+ // If packet option has non default value (-1) for sendtime extension id,
+ // then we should parse the rtp packet to update the timestamp. Otherwise
+ // just calculate HMAC and update packet with it.
+ if (packet_time_params.rtp_sendtime_extension_id != -1) {
+ UpdateRtpAbsSendTimeExtension(start, rtp_length,
+ packet_time_params.rtp_sendtime_extension_id,
+ time_us);
+ }
+
+ UpdateRtpAuthTag(start, rtp_length, packet_time_params);
+ return true;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/rtputils.h b/third_party/libwebrtc/webrtc/media/base/rtputils.h
new file mode 100644
index 0000000000..0b7205cf8f
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtputils.h
@@ -0,0 +1,90 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_RTPUTILS_H_
+#define MEDIA_BASE_RTPUTILS_H_
+
+#include "rtc_base/byteorder.h"
+
+namespace rtc {
+struct PacketTimeUpdateParams;
+} // namespace rtc
+
+namespace cricket {
+
+const size_t kMinRtpPacketLen = 12;
+const size_t kMaxRtpPacketLen = 2048;
+const size_t kMinRtcpPacketLen = 4;
+
+struct RtpHeader {
+ int payload_type;
+ int seq_num;
+ uint32_t timestamp;
+ uint32_t ssrc;
+};
+
+enum RtcpTypes {
+ kRtcpTypeSR = 200, // Sender report payload type.
+ kRtcpTypeRR = 201, // Receiver report payload type.
+ kRtcpTypeSDES = 202, // SDES payload type.
+ kRtcpTypeBye = 203, // BYE payload type.
+ kRtcpTypeApp = 204, // APP payload type.
+ kRtcpTypeRTPFB = 205, // Transport layer Feedback message payload type.
+ kRtcpTypePSFB = 206, // Payload-specific Feedback message payload type.
+};
+
+bool GetRtpPayloadType(const void* data, size_t len, int* value);
+bool GetRtpSeqNum(const void* data, size_t len, int* value);
+bool GetRtpTimestamp(const void* data, size_t len, uint32_t* value);
+bool GetRtpSsrc(const void* data, size_t len, uint32_t* value);
+bool GetRtpHeaderLen(const void* data, size_t len, size_t* value);
+bool GetRtcpType(const void* data, size_t len, int* value);
+bool GetRtcpSsrc(const void* data, size_t len, uint32_t* value);
+bool GetRtpHeader(const void* data, size_t len, RtpHeader* header);
+
+bool SetRtpSsrc(void* data, size_t len, uint32_t value);
+// Assumes version 2, no padding, no extensions, no csrcs.
+bool SetRtpHeader(void* data, size_t len, const RtpHeader& header);
+
+bool IsRtpPacket(const void* data, size_t len);
+
+// True if |payload type| is 0-127.
+bool IsValidRtpPayloadType(int payload_type);
+
+// True if |size| is appropriate for the indicated packet type.
+bool IsValidRtpRtcpPacketSize(bool rtcp, size_t size);
+
+// TODO(zstein): Consider using an enum instead of a bool to differentiate
+// between RTP and RTCP.
+// Returns "RTCP" or "RTP" according to |rtcp|.
+const char* RtpRtcpStringLiteral(bool rtcp);
+
+// Verifies that a packet has a valid RTP header.
+bool ValidateRtpHeader(const uint8_t* rtp,
+ size_t length,
+ size_t* header_length);
+
+// Helper method which updates the absolute send time extension if present.
+bool UpdateRtpAbsSendTimeExtension(uint8_t* rtp,
+ size_t length,
+ int extension_id,
+ uint64_t time_us);
+
+// Applies specified |options| to the packet. It updates the absolute send time
+// extension header if it is present present then updates HMAC.
+bool ApplyPacketOptions(uint8_t* data,
+ size_t length,
+ const rtc::PacketTimeUpdateParams& packet_time_params,
+ uint64_t time_us);
+
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_RTPUTILS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/rtputils_unittest.cc b/third_party/libwebrtc/webrtc/media/base/rtputils_unittest.cc
new file mode 100644
index 0000000000..a71eac7a07
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/rtputils_unittest.cc
@@ -0,0 +1,353 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "media/base/rtputils.h"
+#include "media/base/fakertp.h"
+#include "rtc_base/asyncpacketsocket.h"
+#include "rtc_base/gunit.h"
+
+namespace cricket {
+
+static const uint8_t kRtpPacketWithMarker[] = {
+ 0x80, 0x80, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01
+};
+// 3 CSRCs (0x01020304, 0x12345678, 0xAABBCCDD)
+// Extension (0xBEDE, 0x1122334455667788)
+static const uint8_t kRtpPacketWithMarkerAndCsrcAndExtension[] = {
+ 0x93, 0x80, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x02, 0x03, 0x04, 0x12, 0x34, 0x56, 0x78, 0xAA, 0xBB, 0xCC, 0xDD,
+ 0xBE, 0xDE, 0x00, 0x02, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88
+};
+static const uint8_t kInvalidPacket[] = { 0x80, 0x00 };
+static const uint8_t kInvalidPacketWithCsrc[] = {
+ 0x83, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x02, 0x03, 0x04, 0x12, 0x34, 0x56, 0x78, 0xAA, 0xBB, 0xCC
+};
+static const uint8_t kInvalidPacketWithCsrcAndExtension1[] = {
+ 0x93, 0x80, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x02, 0x03, 0x04, 0x12, 0x34, 0x56, 0x78, 0xAA, 0xBB, 0xCC, 0xDD,
+ 0xBE, 0xDE, 0x00
+};
+static const uint8_t kInvalidPacketWithCsrcAndExtension2[] = {
+ 0x93, 0x80, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
+ 0x01, 0x02, 0x03, 0x04, 0x12, 0x34, 0x56, 0x78, 0xAA, 0xBB, 0xCC, 0xDD,
+ 0xBE, 0xDE, 0x00, 0x02, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77
+};
+
+// PT = 206, FMT = 1, Sender SSRC = 0x1111, Media SSRC = 0x1111
+// No FCI information is needed for PLI.
+static const uint8_t kNonCompoundRtcpPliFeedbackPacket[] = {
+ 0x81, 0xCE, 0x00, 0x0C, 0x00, 0x00, 0x11, 0x11, 0x00, 0x00, 0x11, 0x11
+};
+
+// Packet has only mandatory fixed RTCP header
+// PT = 204, SSRC = 0x1111
+static const uint8_t kNonCompoundRtcpAppPacket[] = {
+ 0x81, 0xCC, 0x00, 0x0C, 0x00, 0x00, 0x11, 0x11
+};
+
+// PT = 202, Source count = 0
+static const uint8_t kNonCompoundRtcpSDESPacket[] = {
+ 0x80, 0xCA, 0x00, 0x00
+};
+
+static uint8_t kFakeTag[4] = { 0xba, 0xdd, 0xba, 0xdd };
+static uint8_t kTestKey[] = "12345678901234567890";
+static uint8_t kTestAstValue[3] = { 0xaa, 0xbb, 0xcc };
+
+// Valid rtp Message with 2 byte header extension.
+static uint8_t kRtpMsgWith2ByteExtnHeader[] = {
+ 0x90, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0xAA, 0xBB, 0xCC, 0XDD, // SSRC
+ 0x10, 0x00, 0x00, 0x01, // 2 Byte header extension
+ 0x01, 0x00, 0x00, 0x00
+};
+
+// RTP packet with single byte extension header of length 4 bytes.
+// Extension id = 3 and length = 3
+static uint8_t kRtpMsgWithAbsSendTimeExtension[] = {
+ 0x90, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0xBE, 0xDE, 0x00, 0x02,
+ 0x22, 0x00, 0x02, 0x1c,
+ 0x32, 0xaa, 0xbb, 0xcc,
+};
+
+// Index of AbsSendTimeExtn data in message |kRtpMsgWithAbsSendTimeExtension|.
+static const int kAstIndexInRtpMsg = 21;
+
+TEST(RtpUtilsTest, GetRtp) {
+ EXPECT_TRUE(IsRtpPacket(kPcmuFrame, sizeof(kPcmuFrame)));
+
+ int pt;
+ EXPECT_TRUE(GetRtpPayloadType(kPcmuFrame, sizeof(kPcmuFrame), &pt));
+ EXPECT_EQ(0, pt);
+ EXPECT_TRUE(GetRtpPayloadType(kRtpPacketWithMarker,
+ sizeof(kRtpPacketWithMarker), &pt));
+ EXPECT_EQ(0, pt);
+
+ int seq_num;
+ EXPECT_TRUE(GetRtpSeqNum(kPcmuFrame, sizeof(kPcmuFrame), &seq_num));
+ EXPECT_EQ(1, seq_num);
+
+ uint32_t ts;
+ EXPECT_TRUE(GetRtpTimestamp(kPcmuFrame, sizeof(kPcmuFrame), &ts));
+ EXPECT_EQ(0u, ts);
+
+ uint32_t ssrc;
+ EXPECT_TRUE(GetRtpSsrc(kPcmuFrame, sizeof(kPcmuFrame), &ssrc));
+ EXPECT_EQ(1u, ssrc);
+
+ RtpHeader header;
+ EXPECT_TRUE(GetRtpHeader(kPcmuFrame, sizeof(kPcmuFrame), &header));
+ EXPECT_EQ(0, header.payload_type);
+ EXPECT_EQ(1, header.seq_num);
+ EXPECT_EQ(0u, header.timestamp);
+ EXPECT_EQ(1u, header.ssrc);
+
+ EXPECT_FALSE(GetRtpPayloadType(kInvalidPacket, sizeof(kInvalidPacket), &pt));
+ EXPECT_FALSE(GetRtpSeqNum(kInvalidPacket, sizeof(kInvalidPacket), &seq_num));
+ EXPECT_FALSE(GetRtpTimestamp(kInvalidPacket, sizeof(kInvalidPacket), &ts));
+ EXPECT_FALSE(GetRtpSsrc(kInvalidPacket, sizeof(kInvalidPacket), &ssrc));
+}
+
+TEST(RtpUtilsTest, SetRtpHeader) {
+ uint8_t packet[] = {
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
+ };
+
+ RtpHeader header = { 9, 1111, 2222u, 3333u };
+ EXPECT_TRUE(SetRtpHeader(packet, sizeof(packet), header));
+
+ // Bits: 10 0 0 0000
+ EXPECT_EQ(128u, packet[0]);
+ size_t len;
+ EXPECT_TRUE(GetRtpHeaderLen(packet, sizeof(packet), &len));
+ EXPECT_EQ(12U, len);
+ EXPECT_TRUE(GetRtpHeader(packet, sizeof(packet), &header));
+ EXPECT_EQ(9, header.payload_type);
+ EXPECT_EQ(1111, header.seq_num);
+ EXPECT_EQ(2222u, header.timestamp);
+ EXPECT_EQ(3333u, header.ssrc);
+}
+
+TEST(RtpUtilsTest, GetRtpHeaderLen) {
+ size_t len;
+ EXPECT_TRUE(GetRtpHeaderLen(kPcmuFrame, sizeof(kPcmuFrame), &len));
+ EXPECT_EQ(12U, len);
+
+ EXPECT_TRUE(GetRtpHeaderLen(kRtpPacketWithMarkerAndCsrcAndExtension,
+ sizeof(kRtpPacketWithMarkerAndCsrcAndExtension),
+ &len));
+ EXPECT_EQ(sizeof(kRtpPacketWithMarkerAndCsrcAndExtension), len);
+
+ EXPECT_FALSE(GetRtpHeaderLen(kInvalidPacket, sizeof(kInvalidPacket), &len));
+ EXPECT_FALSE(GetRtpHeaderLen(kInvalidPacketWithCsrc,
+ sizeof(kInvalidPacketWithCsrc), &len));
+ EXPECT_FALSE(GetRtpHeaderLen(kInvalidPacketWithCsrcAndExtension1,
+ sizeof(kInvalidPacketWithCsrcAndExtension1),
+ &len));
+ EXPECT_FALSE(GetRtpHeaderLen(kInvalidPacketWithCsrcAndExtension2,
+ sizeof(kInvalidPacketWithCsrcAndExtension2),
+ &len));
+}
+
+TEST(RtpUtilsTest, GetRtcp) {
+ int pt;
+ EXPECT_TRUE(GetRtcpType(kRtcpReport, sizeof(kRtcpReport), &pt));
+ EXPECT_EQ(0xc9, pt);
+
+ EXPECT_FALSE(GetRtcpType(kInvalidPacket, sizeof(kInvalidPacket), &pt));
+
+ uint32_t ssrc;
+ EXPECT_TRUE(GetRtcpSsrc(kNonCompoundRtcpPliFeedbackPacket,
+ sizeof(kNonCompoundRtcpPliFeedbackPacket),
+ &ssrc));
+ EXPECT_TRUE(GetRtcpSsrc(kNonCompoundRtcpAppPacket,
+ sizeof(kNonCompoundRtcpAppPacket),
+ &ssrc));
+ EXPECT_FALSE(GetRtcpSsrc(kNonCompoundRtcpSDESPacket,
+ sizeof(kNonCompoundRtcpSDESPacket),
+ &ssrc));
+}
+
+// Invalid RTP packets.
+TEST(RtpUtilsTest, InvalidRtpHeader) {
+ // Rtp message with invalid length.
+ const uint8_t kRtpMsgWithInvalidLength[] = {
+ 0x94, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0xAA, 0xBB, 0xCC, 0XDD, // SSRC
+ 0xDD, 0xCC, 0xBB, 0xAA, // Only 1 CSRC, but CC count is 4.
+ };
+ EXPECT_FALSE(ValidateRtpHeader(kRtpMsgWithInvalidLength,
+ sizeof(kRtpMsgWithInvalidLength), nullptr));
+
+ // Rtp message with single byte header extension, invalid extension length.
+ const uint8_t kRtpMsgWithInvalidExtnLength[] = {
+ 0x90, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ 0xBE, 0xDE, 0x0A, 0x00, // Extn length - 0x0A00
+ };
+ EXPECT_FALSE(ValidateRtpHeader(kRtpMsgWithInvalidExtnLength,
+ sizeof(kRtpMsgWithInvalidExtnLength),
+ nullptr));
+}
+
+// Valid RTP packet with a 2byte header extension.
+TEST(RtpUtilsTest, Valid2ByteExtnHdrRtpMessage) {
+ EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWith2ByteExtnHeader,
+ sizeof(kRtpMsgWith2ByteExtnHeader), nullptr));
+}
+
+// Valid RTP packet which has 1 byte header AbsSendTime extension in it.
+TEST(RtpUtilsTest, ValidRtpPacketWithAbsSendTimeExtension) {
+ EXPECT_TRUE(ValidateRtpHeader(kRtpMsgWithAbsSendTimeExtension,
+ sizeof(kRtpMsgWithAbsSendTimeExtension),
+ nullptr));
+}
+
+// Verify handling of a 2 byte extension header RTP messsage. Currently these
+// messages are not supported.
+TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionIn2ByteHeaderExtn) {
+ std::vector<uint8_t> data(
+ kRtpMsgWith2ByteExtnHeader,
+ kRtpMsgWith2ByteExtnHeader + sizeof(kRtpMsgWith2ByteExtnHeader));
+ EXPECT_FALSE(UpdateRtpAbsSendTimeExtension(&data[0], data.size(), 3, 0));
+}
+
+// Verify finding an extension ID in the TURN send indication message.
+TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInTurnSendIndication) {
+ // A valid STUN indication message with a valid RTP header in data attribute
+ // payload field and no extension bit set.
+ uint8_t message_without_extension[] = {
+ 0x00, 0x16, 0x00, 0x18, // length of
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7',
+ '8', '9', 'a', 'b',
+ 0x00, 0x20, 0x00, 0x04, // Mapped address.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x13, 0x00, 0x0C, // Data attribute.
+ 0x80, 0x00, 0x00, 0x00, // RTP packet.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x00, 0x00,
+ };
+ EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(
+ message_without_extension, sizeof(message_without_extension), 3, 0));
+
+ // A valid STUN indication message with a valid RTP header and a extension
+ // header.
+ uint8_t message[] = {
+ 0x00, 0x16, 0x00, 0x24, // length of
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7',
+ '8', '9', 'a', 'b',
+ 0x00, 0x20, 0x00, 0x04, // Mapped address.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x13, 0x00, 0x18, // Data attribute.
+ 0x90, 0x00, 0x00, 0x00, // RTP packet.
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xBE, 0xDE,
+ 0x00, 0x02, 0x22, 0xaa, 0xbb, 0xcc, 0x32, 0xaa, 0xbb, 0xcc,
+ };
+ EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(message, sizeof(message), 3, 0));
+}
+
+// Test without any packet options variables set. This method should return
+// without HMAC value in the packet.
+TEST(RtpUtilsTest, ApplyPacketOptionsWithDefaultValues) {
+ rtc::PacketTimeUpdateParams packet_time_params;
+ std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
+ kRtpMsgWithAbsSendTimeExtension +
+ sizeof(kRtpMsgWithAbsSendTimeExtension));
+ rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
+ EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
+ packet_time_params, 0));
+
+ // Making sure HMAC wasn't updated..
+ EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
+ kFakeTag, 4));
+
+ // Verify AbsouluteSendTime extension field wasn't modified.
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kTestAstValue,
+ sizeof(kTestAstValue)));
+}
+
+// Veirfy HMAC is updated when packet option parameters are set.
+TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParams) {
+ rtc::PacketTimeUpdateParams packet_time_params;
+ packet_time_params.srtp_auth_key.assign(kTestKey,
+ kTestKey + sizeof(kTestKey));
+ packet_time_params.srtp_auth_tag_len = 4;
+
+ std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
+ kRtpMsgWithAbsSendTimeExtension +
+ sizeof(kRtpMsgWithAbsSendTimeExtension));
+ rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
+ EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
+ packet_time_params, 0));
+
+ uint8_t kExpectedTag[] = {0xc1, 0x7a, 0x8c, 0xa0};
+ EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
+ kExpectedTag, sizeof(kExpectedTag)));
+
+ // Verify AbsouluteSendTime extension field is not modified.
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kTestAstValue,
+ sizeof(kTestAstValue)));
+}
+
+// Verify finding an extension ID in a raw rtp message.
+TEST(RtpUtilsTest, UpdateAbsSendTimeExtensionInRtpPacket) {
+ std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
+ kRtpMsgWithAbsSendTimeExtension +
+ sizeof(kRtpMsgWithAbsSendTimeExtension));
+
+ EXPECT_TRUE(UpdateRtpAbsSendTimeExtension(&rtp_packet[0], rtp_packet.size(),
+ 3, 51183266));
+
+ // Verify that the timestamp was updated.
+ const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kExpectedTimestamp,
+ sizeof(kExpectedTimestamp)));
+}
+
+// Verify we update both AbsSendTime extension header and HMAC.
+TEST(RtpUtilsTest, ApplyPacketOptionsWithAuthParamsAndAbsSendTime) {
+ rtc::PacketTimeUpdateParams packet_time_params;
+ packet_time_params.srtp_auth_key.assign(kTestKey,
+ kTestKey + sizeof(kTestKey));
+ packet_time_params.srtp_auth_tag_len = 4;
+ packet_time_params.rtp_sendtime_extension_id = 3;
+ // 3 is also present in the test message.
+
+ std::vector<uint8_t> rtp_packet(kRtpMsgWithAbsSendTimeExtension,
+ kRtpMsgWithAbsSendTimeExtension +
+ sizeof(kRtpMsgWithAbsSendTimeExtension));
+ rtp_packet.insert(rtp_packet.end(), kFakeTag, kFakeTag + sizeof(kFakeTag));
+ EXPECT_TRUE(ApplyPacketOptions(&rtp_packet[0], rtp_packet.size(),
+ packet_time_params, 51183266));
+
+ const uint8_t kExpectedTag[] = {0x81, 0xd1, 0x2c, 0x0e};
+ EXPECT_EQ(0, memcmp(&rtp_packet[sizeof(kRtpMsgWithAbsSendTimeExtension)],
+ kExpectedTag, sizeof(kExpectedTag)));
+
+ // Verify that the timestamp was updated.
+ const uint8_t kExpectedTimestamp[3] = {0xcc, 0xbb, 0xaa};
+ EXPECT_EQ(0, memcmp(&rtp_packet[kAstIndexInRtpMsg], kExpectedTimestamp,
+ sizeof(kExpectedTimestamp)));
+}
+
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/streamparams.cc b/third_party/libwebrtc/webrtc/media/base/streamparams.cc
new file mode 100644
index 0000000000..fd61a87ffd
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/streamparams.cc
@@ -0,0 +1,268 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/streamparams.h"
+
+#include <list>
+#include <sstream>
+
+namespace cricket {
+namespace {
+// NOTE: There is no check here for duplicate streams, so check before
+// adding.
+void AddStream(std::vector<StreamParams>* streams, const StreamParams& stream) {
+ streams->push_back(stream);
+}
+}
+
+const char kFecSsrcGroupSemantics[] = "FEC";
+const char kFecFrSsrcGroupSemantics[] = "FEC-FR";
+const char kFidSsrcGroupSemantics[] = "FID";
+const char kSimSsrcGroupSemantics[] = "SIM";
+
+bool GetStream(const StreamParamsVec& streams,
+ const StreamSelector& selector,
+ StreamParams* stream_out) {
+ const StreamParams* found = GetStream(streams, selector);
+ if (found && stream_out)
+ *stream_out = *found;
+ return found != nullptr;
+}
+
+bool MediaStreams::GetAudioStream(
+ const StreamSelector& selector, StreamParams* stream) {
+ return GetStream(audio_, selector, stream);
+}
+
+bool MediaStreams::GetVideoStream(
+ const StreamSelector& selector, StreamParams* stream) {
+ return GetStream(video_, selector, stream);
+}
+
+bool MediaStreams::GetDataStream(
+ const StreamSelector& selector, StreamParams* stream) {
+ return GetStream(data_, selector, stream);
+}
+
+void MediaStreams::CopyFrom(const MediaStreams& streams) {
+ audio_ = streams.audio_;
+ video_ = streams.video_;
+ data_ = streams.data_;
+}
+
+void MediaStreams::AddAudioStream(const StreamParams& stream) {
+ AddStream(&audio_, stream);
+}
+
+void MediaStreams::AddVideoStream(const StreamParams& stream) {
+ AddStream(&video_, stream);
+}
+
+void MediaStreams::AddDataStream(const StreamParams& stream) {
+ AddStream(&data_, stream);
+}
+
+bool MediaStreams::RemoveAudioStream(
+ const StreamSelector& selector) {
+ return RemoveStream(&audio_, selector);
+}
+
+bool MediaStreams::RemoveVideoStream(
+ const StreamSelector& selector) {
+ return RemoveStream(&video_, selector);
+}
+
+bool MediaStreams::RemoveDataStream(
+ const StreamSelector& selector) {
+ return RemoveStream(&data_, selector);
+}
+
+static std::string SsrcsToString(const std::vector<uint32_t>& ssrcs) {
+ std::ostringstream ost;
+ ost << "ssrcs:[";
+ for (std::vector<uint32_t>::const_iterator it = ssrcs.begin();
+ it != ssrcs.end(); ++it) {
+ if (it != ssrcs.begin()) {
+ ost << ",";
+ }
+ ost << *it;
+ }
+ ost << "]";
+ return ost.str();
+}
+
+bool SsrcGroup::has_semantics(const std::string& semantics_in) const {
+ return (semantics == semantics_in && ssrcs.size() > 0);
+}
+
+std::string SsrcGroup::ToString() const {
+ std::ostringstream ost;
+ ost << "{";
+ ost << "semantics:" << semantics << ";";
+ ost << SsrcsToString(ssrcs);
+ ost << "}";
+ return ost.str();
+}
+
+std::string StreamParams::ToString() const {
+ std::ostringstream ost;
+ ost << "{";
+ if (!groupid.empty()) {
+ ost << "groupid:" << groupid << ";";
+ }
+ if (!id.empty()) {
+ ost << "id:" << id << ";";
+ }
+ ost << SsrcsToString(ssrcs) << ";";
+ ost << "ssrc_groups:";
+ for (std::vector<SsrcGroup>::const_iterator it = ssrc_groups.begin();
+ it != ssrc_groups.end(); ++it) {
+ if (it != ssrc_groups.begin()) {
+ ost << ",";
+ }
+ ost << it->ToString();
+ }
+ ost << ";";
+ if (!type.empty()) {
+ ost << "type:" << type << ";";
+ }
+ if (!display.empty()) {
+ ost << "display:" << display << ";";
+ }
+ if (!cname.empty()) {
+ ost << "cname:" << cname << ";";
+ }
+ if (!sync_label.empty()) {
+ ost << "sync_label:" << sync_label;
+ }
+ ost << "}";
+ return ost.str();
+}
+void StreamParams::GetPrimarySsrcs(std::vector<uint32_t>* ssrcs) const {
+ const SsrcGroup* sim_group = get_ssrc_group(kSimSsrcGroupSemantics);
+ if (sim_group == NULL) {
+ ssrcs->push_back(first_ssrc());
+ } else {
+ for (size_t i = 0; i < sim_group->ssrcs.size(); ++i) {
+ ssrcs->push_back(sim_group->ssrcs[i]);
+ }
+ }
+}
+
+void StreamParams::GetFidSsrcs(const std::vector<uint32_t>& primary_ssrcs,
+ std::vector<uint32_t>* fid_ssrcs) const {
+ for (size_t i = 0; i < primary_ssrcs.size(); ++i) {
+ uint32_t fid_ssrc;
+ if (GetFidSsrc(primary_ssrcs[i], &fid_ssrc)) {
+ fid_ssrcs->push_back(fid_ssrc);
+ }
+ }
+}
+
+bool StreamParams::AddSecondarySsrc(const std::string& semantics,
+ uint32_t primary_ssrc,
+ uint32_t secondary_ssrc) {
+ if (!has_ssrc(primary_ssrc)) {
+ return false;
+ }
+
+ ssrcs.push_back(secondary_ssrc);
+ std::vector<uint32_t> ssrc_vector;
+ ssrc_vector.push_back(primary_ssrc);
+ ssrc_vector.push_back(secondary_ssrc);
+ SsrcGroup ssrc_group = SsrcGroup(semantics, ssrc_vector);
+ ssrc_groups.push_back(ssrc_group);
+ return true;
+}
+
+bool StreamParams::GetSecondarySsrc(const std::string& semantics,
+ uint32_t primary_ssrc,
+ uint32_t* secondary_ssrc) const {
+ for (std::vector<SsrcGroup>::const_iterator it = ssrc_groups.begin();
+ it != ssrc_groups.end(); ++it) {
+ if (it->has_semantics(semantics) &&
+ it->ssrcs.size() >= 2 &&
+ it->ssrcs[0] == primary_ssrc) {
+ *secondary_ssrc = it->ssrcs[1];
+ return true;
+ }
+ }
+ return false;
+}
+
+bool IsOneSsrcStream(const StreamParams& sp) {
+ if (sp.ssrcs.size() == 1 && sp.ssrc_groups.empty()) {
+ return true;
+ }
+ const SsrcGroup* fid_group = sp.get_ssrc_group(kFidSsrcGroupSemantics);
+ const SsrcGroup* fecfr_group = sp.get_ssrc_group(kFecFrSsrcGroupSemantics);
+ if (sp.ssrcs.size() == 2) {
+ if (fid_group != nullptr && sp.ssrcs == fid_group->ssrcs) {
+ return true;
+ }
+ if (fecfr_group != nullptr && sp.ssrcs == fecfr_group->ssrcs) {
+ return true;
+ }
+ }
+ if (sp.ssrcs.size() == 3) {
+ if (fid_group == nullptr || fecfr_group == nullptr) {
+ return false;
+ }
+ if (sp.ssrcs[0] != fid_group->ssrcs[0] ||
+ sp.ssrcs[0] != fecfr_group->ssrcs[0]) {
+ return false;
+ }
+ // We do not check for FlexFEC over RTX,
+ // as this combination is not supported.
+ if (sp.ssrcs[1] == fid_group->ssrcs[1] &&
+ sp.ssrcs[2] == fecfr_group->ssrcs[1]) {
+ return true;
+ }
+ if (sp.ssrcs[1] == fecfr_group->ssrcs[1] &&
+ sp.ssrcs[2] == fid_group->ssrcs[1]) {
+ return true;
+ }
+ }
+ return false;
+}
+
+static void RemoveFirst(std::list<uint32_t>* ssrcs, uint32_t value) {
+ std::list<uint32_t>::iterator it =
+ std::find(ssrcs->begin(), ssrcs->end(), value);
+ if (it != ssrcs->end()) {
+ ssrcs->erase(it);
+ }
+}
+
+bool IsSimulcastStream(const StreamParams& sp) {
+ const SsrcGroup* const sg = sp.get_ssrc_group(kSimSsrcGroupSemantics);
+ if (sg == NULL || sg->ssrcs.size() < 2) {
+ return false;
+ }
+ // Start with all StreamParams SSRCs. Remove simulcast SSRCs (from sg) and
+ // RTX SSRCs. If we still have SSRCs left, we don't know what they're for.
+ // Also we remove first-found SSRCs only. So duplicates should lead to errors.
+ std::list<uint32_t> sp_ssrcs(sp.ssrcs.begin(), sp.ssrcs.end());
+ for (size_t i = 0; i < sg->ssrcs.size(); ++i) {
+ RemoveFirst(&sp_ssrcs, sg->ssrcs[i]);
+ }
+ for (size_t i = 0; i < sp.ssrc_groups.size(); ++i) {
+ const SsrcGroup& group = sp.ssrc_groups[i];
+ if (group.semantics.compare(kFidSsrcGroupSemantics) != 0 ||
+ group.ssrcs.size() != 2) {
+ continue;
+ }
+ RemoveFirst(&sp_ssrcs, group.ssrcs[1]);
+ }
+ // If there's SSRCs left that we don't know how to handle, we bail out.
+ return sp_ssrcs.size() == 0;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/streamparams.h b/third_party/libwebrtc/webrtc/media/base/streamparams.h
new file mode 100644
index 0000000000..1b2ebfa871
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/streamparams.h
@@ -0,0 +1,332 @@
+/*
+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains structures for describing SSRCs from a media source such
+// as a MediaStreamTrack when it is sent across an RTP session. Multiple media
+// sources may be sent across the same RTP session, each of them will be
+// described by one StreamParams object
+// SsrcGroup is used to describe the relationship between the SSRCs that
+// are used for this media source.
+// E.x: Consider a source that is sent as 3 simulcast streams
+// Let the simulcast elements have SSRC 10, 20, 30.
+// Let each simulcast element use FEC and let the protection packets have
+// SSRC 11,21,31.
+// To describe this 4 SsrcGroups are needed,
+// StreamParams would then contain ssrc = {10,11,20,21,30,31} and
+// ssrc_groups = {{SIM,{10,20,30}, {FEC,{10,11}, {FEC, {20,21}, {FEC {30,31}}}
+// Please see RFC 5576.
+
+#ifndef MEDIA_BASE_STREAMPARAMS_H_
+#define MEDIA_BASE_STREAMPARAMS_H_
+
+#include <stdint.h>
+
+#include <algorithm>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "rtc_base/constructormagic.h"
+
+namespace cricket {
+
+extern const char kFecSsrcGroupSemantics[];
+extern const char kFecFrSsrcGroupSemantics[];
+extern const char kFidSsrcGroupSemantics[];
+extern const char kSimSsrcGroupSemantics[];
+
+struct SsrcGroup {
+ SsrcGroup(const std::string& usage, const std::vector<uint32_t>& ssrcs)
+ : semantics(usage), ssrcs(ssrcs) {}
+
+ bool operator==(const SsrcGroup& other) const {
+ return (semantics == other.semantics && ssrcs == other.ssrcs);
+ }
+ bool operator!=(const SsrcGroup &other) const {
+ return !(*this == other);
+ }
+
+ bool has_semantics(const std::string& semantics) const;
+
+ std::string ToString() const;
+
+ std::string semantics; // e.g FIX, FEC, SIM.
+ std::vector<uint32_t> ssrcs; // SSRCs of this type.
+};
+
+struct StreamParams {
+ static StreamParams CreateLegacy(uint32_t ssrc) {
+ StreamParams stream;
+ stream.ssrcs.push_back(ssrc);
+ return stream;
+ }
+
+ bool operator==(const StreamParams& other) const {
+ return (groupid == other.groupid &&
+ id == other.id &&
+ ssrcs == other.ssrcs &&
+ ssrc_groups == other.ssrc_groups &&
+ type == other.type &&
+ display == other.display &&
+ cname == other.cname &&
+ sync_label == other.sync_label);
+ }
+ bool operator!=(const StreamParams &other) const {
+ return !(*this == other);
+ }
+
+ uint32_t first_ssrc() const {
+ if (ssrcs.empty()) {
+ return 0;
+ }
+
+ return ssrcs[0];
+ }
+ bool has_ssrcs() const {
+ return !ssrcs.empty();
+ }
+ bool has_ssrc(uint32_t ssrc) const {
+ return std::find(ssrcs.begin(), ssrcs.end(), ssrc) != ssrcs.end();
+ }
+ void add_ssrc(uint32_t ssrc) { ssrcs.push_back(ssrc); }
+ bool has_ssrc_groups() const {
+ return !ssrc_groups.empty();
+ }
+ bool has_ssrc_group(const std::string& semantics) const {
+ return (get_ssrc_group(semantics) != NULL);
+ }
+ const SsrcGroup* get_ssrc_group(const std::string& semantics) const {
+ for (std::vector<SsrcGroup>::const_iterator it = ssrc_groups.begin();
+ it != ssrc_groups.end(); ++it) {
+ if (it->has_semantics(semantics)) {
+ return &(*it);
+ }
+ }
+ return NULL;
+ }
+
+ // Convenience function to add an FID ssrc for a primary_ssrc
+ // that's already been added.
+ bool AddFidSsrc(uint32_t primary_ssrc, uint32_t fid_ssrc) {
+ return AddSecondarySsrc(kFidSsrcGroupSemantics, primary_ssrc, fid_ssrc);
+ }
+
+ // Convenience function to lookup the FID ssrc for a primary_ssrc.
+ // Returns false if primary_ssrc not found or FID not defined for it.
+ bool GetFidSsrc(uint32_t primary_ssrc, uint32_t* fid_ssrc) const {
+ return GetSecondarySsrc(kFidSsrcGroupSemantics, primary_ssrc, fid_ssrc);
+ }
+
+ // Convenience function to add an FEC-FR ssrc for a primary_ssrc
+ // that's already been added.
+ bool AddFecFrSsrc(uint32_t primary_ssrc, uint32_t fecfr_ssrc) {
+ return AddSecondarySsrc(kFecFrSsrcGroupSemantics, primary_ssrc, fecfr_ssrc);
+ }
+
+ // Convenience function to lookup the FEC-FR ssrc for a primary_ssrc.
+ // Returns false if primary_ssrc not found or FEC-FR not defined for it.
+ bool GetFecFrSsrc(uint32_t primary_ssrc, uint32_t* fecfr_ssrc) const {
+ return GetSecondarySsrc(kFecFrSsrcGroupSemantics, primary_ssrc, fecfr_ssrc);
+ }
+
+ // Convenience to get all the SIM SSRCs if there are SIM ssrcs, or
+ // the first SSRC otherwise.
+ void GetPrimarySsrcs(std::vector<uint32_t>* ssrcs) const;
+
+ // Convenience to get all the FID SSRCs for the given primary ssrcs.
+ // If a given primary SSRC does not have a FID SSRC, the list of FID
+ // SSRCS will be smaller than the list of primary SSRCs.
+ void GetFidSsrcs(const std::vector<uint32_t>& primary_ssrcs,
+ std::vector<uint32_t>* fid_ssrcs) const;
+
+ std::string ToString() const;
+
+ // Resource of the MUC jid of the participant of with this stream.
+ // For 1:1 calls, should be left empty (which means remote streams
+ // and local streams should not be mixed together).
+ std::string groupid;
+ // Unique per-groupid, not across all groupids
+ std::string id;
+ std::vector<uint32_t> ssrcs; // All SSRCs for this source
+ std::vector<SsrcGroup> ssrc_groups; // e.g. FID, FEC, SIM
+ // Examples: "camera", "screencast"
+ std::string type;
+ // Friendly name describing stream
+ std::string display;
+ std::string cname; // RTCP CNAME
+ std::string sync_label; // Friendly name of cname.
+
+ private:
+ bool AddSecondarySsrc(const std::string& semantics,
+ uint32_t primary_ssrc,
+ uint32_t secondary_ssrc);
+ bool GetSecondarySsrc(const std::string& semantics,
+ uint32_t primary_ssrc,
+ uint32_t* secondary_ssrc) const;
+};
+
+// A Stream can be selected by either groupid+id or ssrc.
+struct StreamSelector {
+ explicit StreamSelector(uint32_t ssrc) : ssrc(ssrc) {}
+
+ StreamSelector(const std::string& groupid,
+ const std::string& streamid) :
+ ssrc(0),
+ groupid(groupid),
+ streamid(streamid) {
+ }
+
+ bool Matches(const StreamParams& stream) const {
+ if (ssrc == 0) {
+ return stream.groupid == groupid && stream.id == streamid;
+ } else {
+ return stream.has_ssrc(ssrc);
+ }
+ }
+
+ uint32_t ssrc;
+ std::string groupid;
+ std::string streamid;
+};
+
+typedef std::vector<StreamParams> StreamParamsVec;
+
+// A collection of audio and video and data streams. Most of the
+// methods are merely for convenience. Many of these methods are keyed
+// by ssrc, which is the source identifier in the RTP spec
+// (http://tools.ietf.org/html/rfc3550).
+// TODO(pthatcher): Add basic unit test for these.
+// See https://code.google.com/p/webrtc/issues/detail?id=4107
+struct MediaStreams {
+ public:
+ MediaStreams() {}
+ void CopyFrom(const MediaStreams& sources);
+
+ bool empty() const {
+ return audio_.empty() && video_.empty() && data_.empty();
+ }
+
+ std::vector<StreamParams>* mutable_audio() { return &audio_; }
+ std::vector<StreamParams>* mutable_video() { return &video_; }
+ std::vector<StreamParams>* mutable_data() { return &data_; }
+ const std::vector<StreamParams>& audio() const { return audio_; }
+ const std::vector<StreamParams>& video() const { return video_; }
+ const std::vector<StreamParams>& data() const { return data_; }
+
+ // Gets a stream, returning true if found.
+ bool GetAudioStream(
+ const StreamSelector& selector, StreamParams* stream);
+ bool GetVideoStream(
+ const StreamSelector& selector, StreamParams* stream);
+ bool GetDataStream(
+ const StreamSelector& selector, StreamParams* stream);
+ // Adds a stream.
+ void AddAudioStream(const StreamParams& stream);
+ void AddVideoStream(const StreamParams& stream);
+ void AddDataStream(const StreamParams& stream);
+ // Removes a stream, returning true if found and removed.
+ bool RemoveAudioStream(const StreamSelector& selector);
+ bool RemoveVideoStream(const StreamSelector& selector);
+ bool RemoveDataStream(const StreamSelector& selector);
+
+ private:
+ std::vector<StreamParams> audio_;
+ std::vector<StreamParams> video_;
+ std::vector<StreamParams> data_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(MediaStreams);
+};
+
+template <class Condition>
+const StreamParams* GetStream(const StreamParamsVec& streams,
+ Condition condition) {
+ StreamParamsVec::const_iterator found =
+ std::find_if(streams.begin(), streams.end(), condition);
+ return found == streams.end() ? nullptr : &(*found);
+}
+
+template <class Condition>
+StreamParams* GetStream(StreamParamsVec& streams, Condition condition) {
+ StreamParamsVec::iterator found =
+ std::find_if(streams.begin(), streams.end(), condition);
+ return found == streams.end() ? nullptr : &(*found);
+}
+
+inline const StreamParams* GetStreamBySsrc(const StreamParamsVec& streams,
+ uint32_t ssrc) {
+ return GetStream(streams,
+ [&ssrc](const StreamParams& sp) { return sp.has_ssrc(ssrc); });
+}
+
+inline const StreamParams* GetStreamByIds(const StreamParamsVec& streams,
+ const std::string& groupid,
+ const std::string& id) {
+ return GetStream(streams, [&groupid, &id](const StreamParams& sp) {
+ return sp.groupid == groupid && sp.id == id;
+ });
+}
+
+inline StreamParams* GetStreamByIds(StreamParamsVec& streams,
+ const std::string& groupid,
+ const std::string& id) {
+ return GetStream(streams,
+ [&groupid, &id](const StreamParams& sp) {
+ return sp.groupid == groupid && sp.id == id;
+ });
+}
+
+inline const StreamParams* GetStream(const StreamParamsVec& streams,
+ const StreamSelector& selector) {
+ return GetStream(streams,
+ [&selector](const StreamParams& sp) { return selector.Matches(sp); });
+}
+
+template <class Condition>
+bool RemoveStream(StreamParamsVec* streams, Condition condition) {
+ auto iter(std::remove_if(streams->begin(), streams->end(), condition));
+ if (iter == streams->end())
+ return false;
+ streams->erase(iter, streams->end());
+ return true;
+}
+
+// Removes the stream from streams. Returns true if a stream is
+// found and removed.
+inline bool RemoveStream(StreamParamsVec* streams,
+ const StreamSelector& selector) {
+ return RemoveStream(streams,
+ [&selector](const StreamParams& sp) { return selector.Matches(sp); });
+}
+inline bool RemoveStreamBySsrc(StreamParamsVec* streams, uint32_t ssrc) {
+ return RemoveStream(streams,
+ [&ssrc](const StreamParams& sp) { return sp.has_ssrc(ssrc); });
+}
+inline bool RemoveStreamByIds(StreamParamsVec* streams,
+ const std::string& groupid,
+ const std::string& id) {
+ return RemoveStream(streams,
+ [&groupid, &id](const StreamParams& sp) {
+ return sp.groupid == groupid && sp.id == id;
+ });
+}
+
+// Checks if |sp| defines parameters for a single primary stream. There may
+// be an RTX stream or a FlexFEC stream (or both) associated with the primary
+// stream. Leaving as non-static so we can test this function.
+bool IsOneSsrcStream(const StreamParams& sp);
+
+// Checks if |sp| defines parameters for one Simulcast stream. There may be RTX
+// streams associated with the simulcast streams. Leaving as non-static so we
+// can test this function.
+bool IsSimulcastStream(const StreamParams& sp);
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_STREAMPARAMS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/streamparams_unittest.cc b/third_party/libwebrtc/webrtc/media/base/streamparams_unittest.cc
new file mode 100644
index 0000000000..6e934ae7d6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/streamparams_unittest.cc
@@ -0,0 +1,310 @@
+/*
+ * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/streamparams.h"
+#include "media/base/testutils.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/gunit.h"
+
+static const uint32_t kSsrcs1[] = {1};
+static const uint32_t kSsrcs2[] = {1, 2};
+static const uint32_t kSsrcs3[] = {1, 2, 3};
+static const uint32_t kRtxSsrcs3[] = {4, 5, 6};
+
+static cricket::StreamParams CreateStreamParamsWithSsrcGroup(
+ const std::string& semantics,
+ const uint32_t ssrcs_in[],
+ size_t len) {
+ cricket::StreamParams stream;
+ std::vector<uint32_t> ssrcs(ssrcs_in, ssrcs_in + len);
+ cricket::SsrcGroup sg(semantics, ssrcs);
+ stream.ssrcs = ssrcs;
+ stream.ssrc_groups.push_back(sg);
+ return stream;
+}
+
+TEST(SsrcGroup, EqualNotEqual) {
+ cricket::SsrcGroup ssrc_groups[] = {
+ cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs1)),
+ cricket::SsrcGroup("ABC", MAKE_VECTOR(kSsrcs2)),
+ cricket::SsrcGroup("Abc", MAKE_VECTOR(kSsrcs2)),
+ cricket::SsrcGroup("abc", MAKE_VECTOR(kSsrcs2)),
+ };
+
+ for (size_t i = 0; i < arraysize(ssrc_groups); ++i) {
+ for (size_t j = 0; j < arraysize(ssrc_groups); ++j) {
+ EXPECT_EQ((ssrc_groups[i] == ssrc_groups[j]), (i == j));
+ EXPECT_EQ((ssrc_groups[i] != ssrc_groups[j]), (i != j));
+ }
+ }
+}
+
+TEST(SsrcGroup, HasSemantics) {
+ cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1));
+ EXPECT_TRUE(sg1.has_semantics("ABC"));
+
+ cricket::SsrcGroup sg2("Abc", MAKE_VECTOR(kSsrcs1));
+ EXPECT_FALSE(sg2.has_semantics("ABC"));
+
+ cricket::SsrcGroup sg3("abc", MAKE_VECTOR(kSsrcs1));
+ EXPECT_FALSE(sg3.has_semantics("ABC"));
+}
+
+TEST(SsrcGroup, ToString) {
+ cricket::SsrcGroup sg1("ABC", MAKE_VECTOR(kSsrcs1));
+ EXPECT_STREQ("{semantics:ABC;ssrcs:[1]}", sg1.ToString().c_str());
+}
+
+TEST(StreamParams, CreateLegacy) {
+ const uint32_t ssrc = 7;
+ cricket::StreamParams one_sp = cricket::StreamParams::CreateLegacy(ssrc);
+ EXPECT_EQ(1U, one_sp.ssrcs.size());
+ EXPECT_EQ(ssrc, one_sp.first_ssrc());
+ EXPECT_TRUE(one_sp.has_ssrcs());
+ EXPECT_TRUE(one_sp.has_ssrc(ssrc));
+ EXPECT_FALSE(one_sp.has_ssrc(ssrc+1));
+ EXPECT_FALSE(one_sp.has_ssrc_groups());
+ EXPECT_EQ(0U, one_sp.ssrc_groups.size());
+}
+
+TEST(StreamParams, HasSsrcGroup) {
+ cricket::StreamParams sp =
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
+ EXPECT_EQ(2U, sp.ssrcs.size());
+ EXPECT_EQ(kSsrcs2[0], sp.first_ssrc());
+ EXPECT_TRUE(sp.has_ssrcs());
+ EXPECT_TRUE(sp.has_ssrc(kSsrcs2[0]));
+ EXPECT_TRUE(sp.has_ssrc(kSsrcs2[1]));
+ EXPECT_TRUE(sp.has_ssrc_group("XYZ"));
+ EXPECT_EQ(1U, sp.ssrc_groups.size());
+ EXPECT_EQ(2U, sp.ssrc_groups[0].ssrcs.size());
+ EXPECT_EQ(kSsrcs2[0], sp.ssrc_groups[0].ssrcs[0]);
+ EXPECT_EQ(kSsrcs2[1], sp.ssrc_groups[0].ssrcs[1]);
+}
+
+TEST(StreamParams, GetSsrcGroup) {
+ cricket::StreamParams sp =
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
+ EXPECT_EQ(NULL, sp.get_ssrc_group("xyz"));
+ EXPECT_EQ(&sp.ssrc_groups[0], sp.get_ssrc_group("XYZ"));
+}
+
+TEST(StreamParams, EqualNotEqual) {
+ cricket::StreamParams l1 = cricket::StreamParams::CreateLegacy(1);
+ cricket::StreamParams l2 = cricket::StreamParams::CreateLegacy(2);
+ cricket::StreamParams sg1 =
+ CreateStreamParamsWithSsrcGroup("ABC", kSsrcs1, arraysize(kSsrcs1));
+ cricket::StreamParams sg2 =
+ CreateStreamParamsWithSsrcGroup("ABC", kSsrcs2, arraysize(kSsrcs2));
+ cricket::StreamParams sg3 =
+ CreateStreamParamsWithSsrcGroup("Abc", kSsrcs2, arraysize(kSsrcs2));
+ cricket::StreamParams sg4 =
+ CreateStreamParamsWithSsrcGroup("abc", kSsrcs2, arraysize(kSsrcs2));
+ cricket::StreamParams sps[] = {l1, l2, sg1, sg2, sg3, sg4};
+
+ for (size_t i = 0; i < arraysize(sps); ++i) {
+ for (size_t j = 0; j < arraysize(sps); ++j) {
+ EXPECT_EQ((sps[i] == sps[j]), (i == j));
+ EXPECT_EQ((sps[i] != sps[j]), (i != j));
+ }
+ }
+}
+
+TEST(StreamParams, FidFunctions) {
+ uint32_t fid_ssrc;
+
+ cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(1);
+ EXPECT_FALSE(sp.AddFidSsrc(10, 20));
+ EXPECT_TRUE(sp.AddFidSsrc(1, 2));
+ EXPECT_TRUE(sp.GetFidSsrc(1, &fid_ssrc));
+ EXPECT_EQ(2u, fid_ssrc);
+ EXPECT_FALSE(sp.GetFidSsrc(15, &fid_ssrc));
+
+ sp.add_ssrc(20);
+ EXPECT_TRUE(sp.AddFidSsrc(20, 30));
+ EXPECT_TRUE(sp.GetFidSsrc(20, &fid_ssrc));
+ EXPECT_EQ(30u, fid_ssrc);
+
+ // Manually create SsrcGroup to test bounds-checking
+ // in GetSecondarySsrc. We construct an invalid StreamParams
+ // for this.
+ std::vector<uint32_t> fid_vector;
+ fid_vector.push_back(13);
+ cricket::SsrcGroup invalid_fid_group(cricket::kFidSsrcGroupSemantics,
+ fid_vector);
+ cricket::StreamParams sp_invalid;
+ sp_invalid.add_ssrc(13);
+ sp_invalid.ssrc_groups.push_back(invalid_fid_group);
+ EXPECT_FALSE(sp_invalid.GetFidSsrc(13, &fid_ssrc));
+}
+
+TEST(StreamParams, GetPrimaryAndFidSsrcs) {
+ cricket::StreamParams sp;
+ sp.ssrcs.push_back(1);
+ sp.ssrcs.push_back(2);
+ sp.ssrcs.push_back(3);
+
+ std::vector<uint32_t> primary_ssrcs;
+ sp.GetPrimarySsrcs(&primary_ssrcs);
+ std::vector<uint32_t> fid_ssrcs;
+ sp.GetFidSsrcs(primary_ssrcs, &fid_ssrcs);
+ ASSERT_EQ(1u, primary_ssrcs.size());
+ EXPECT_EQ(1u, primary_ssrcs[0]);
+ ASSERT_EQ(0u, fid_ssrcs.size());
+
+ sp.ssrc_groups.push_back(
+ cricket::SsrcGroup(cricket::kSimSsrcGroupSemantics, sp.ssrcs));
+ sp.AddFidSsrc(1, 10);
+ sp.AddFidSsrc(2, 20);
+
+ primary_ssrcs.clear();
+ sp.GetPrimarySsrcs(&primary_ssrcs);
+ fid_ssrcs.clear();
+ sp.GetFidSsrcs(primary_ssrcs, &fid_ssrcs);
+ ASSERT_EQ(3u, primary_ssrcs.size());
+ EXPECT_EQ(1u, primary_ssrcs[0]);
+ EXPECT_EQ(2u, primary_ssrcs[1]);
+ EXPECT_EQ(3u, primary_ssrcs[2]);
+ ASSERT_EQ(2u, fid_ssrcs.size());
+ EXPECT_EQ(10u, fid_ssrcs[0]);
+ EXPECT_EQ(20u, fid_ssrcs[1]);
+}
+
+TEST(StreamParams, FecFrFunctions) {
+ uint32_t fecfr_ssrc;
+
+ cricket::StreamParams sp = cricket::StreamParams::CreateLegacy(1);
+ EXPECT_FALSE(sp.AddFecFrSsrc(10, 20));
+ EXPECT_TRUE(sp.AddFecFrSsrc(1, 2));
+ EXPECT_TRUE(sp.GetFecFrSsrc(1, &fecfr_ssrc));
+ EXPECT_EQ(2u, fecfr_ssrc);
+ EXPECT_FALSE(sp.GetFecFrSsrc(15, &fecfr_ssrc));
+
+ sp.add_ssrc(20);
+ EXPECT_TRUE(sp.AddFecFrSsrc(20, 30));
+ EXPECT_TRUE(sp.GetFecFrSsrc(20, &fecfr_ssrc));
+ EXPECT_EQ(30u, fecfr_ssrc);
+
+ // Manually create SsrcGroup to test bounds-checking
+ // in GetSecondarySsrc. We construct an invalid StreamParams
+ // for this.
+ std::vector<uint32_t> fecfr_vector;
+ fecfr_vector.push_back(13);
+ cricket::SsrcGroup invalid_fecfr_group(cricket::kFecFrSsrcGroupSemantics,
+ fecfr_vector);
+ cricket::StreamParams sp_invalid;
+ sp_invalid.add_ssrc(13);
+ sp_invalid.ssrc_groups.push_back(invalid_fecfr_group);
+ EXPECT_FALSE(sp_invalid.GetFecFrSsrc(13, &fecfr_ssrc));
+}
+
+TEST(StreamParams, ToString) {
+ cricket::StreamParams sp =
+ CreateStreamParamsWithSsrcGroup("XYZ", kSsrcs2, arraysize(kSsrcs2));
+ EXPECT_STREQ("{ssrcs:[1,2];ssrc_groups:{semantics:XYZ;ssrcs:[1,2]};}",
+ sp.ToString().c_str());
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_LegacyStream) {
+ EXPECT_TRUE(
+ cricket::IsOneSsrcStream(cricket::StreamParams::CreateLegacy(13)));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SingleRtxStream) {
+ cricket::StreamParams stream;
+ stream.add_ssrc(13);
+ EXPECT_TRUE(stream.AddFidSsrc(13, 14));
+ EXPECT_TRUE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SingleFlexfecStream) {
+ cricket::StreamParams stream;
+ stream.add_ssrc(13);
+ EXPECT_TRUE(stream.AddFecFrSsrc(13, 14));
+ EXPECT_TRUE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SingleFlexfecAndRtxStream) {
+ cricket::StreamParams stream;
+ stream.add_ssrc(13);
+ EXPECT_TRUE(stream.AddFecFrSsrc(13, 14));
+ EXPECT_TRUE(stream.AddFidSsrc(13, 15));
+ EXPECT_TRUE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SimulcastStream) {
+ EXPECT_FALSE(cricket::IsOneSsrcStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2))));
+ EXPECT_FALSE(cricket::IsOneSsrcStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))));
+}
+
+TEST(StreamParams, TestIsOneSsrcStream_SimRtxStream) {
+ cricket::StreamParams stream =
+ cricket::CreateSimWithRtxStreamParams("cname",
+ MAKE_VECTOR(kSsrcs3),
+ MAKE_VECTOR(kRtxSsrcs3));
+ EXPECT_FALSE(cricket::IsOneSsrcStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_LegacyStream) {
+ EXPECT_FALSE(
+ cricket::IsSimulcastStream(cricket::StreamParams::CreateLegacy(13)));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SingleRtxStream) {
+ cricket::StreamParams stream;
+ stream.add_ssrc(13);
+ EXPECT_TRUE(stream.AddFidSsrc(13, 14));
+ EXPECT_FALSE(cricket::IsSimulcastStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SimulcastStream) {
+ EXPECT_TRUE(cricket::IsSimulcastStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2))));
+ EXPECT_TRUE(cricket::IsSimulcastStream(
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs3))));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_SimRtxStream) {
+ cricket::StreamParams stream =
+ cricket::CreateSimWithRtxStreamParams("cname",
+ MAKE_VECTOR(kSsrcs3),
+ MAKE_VECTOR(kRtxSsrcs3));
+ EXPECT_TRUE(cricket::IsSimulcastStream(stream));
+}
+
+TEST(StreamParams, TestIsSimulcastStream_InvalidStreams) {
+ // stream1 has extra non-sim, non-fid ssrc.
+ cricket::StreamParams stream1 =
+ cricket::CreateSimWithRtxStreamParams("cname",
+ MAKE_VECTOR(kSsrcs3),
+ MAKE_VECTOR(kRtxSsrcs3));
+ stream1.add_ssrc(25);
+ EXPECT_FALSE(cricket::IsSimulcastStream(stream1));
+
+ // stream2 has invalid fid-group (no primary).
+ cricket::StreamParams stream2;
+ stream2.add_ssrc(13);
+ EXPECT_TRUE(stream2.AddFidSsrc(13, 14));
+ std::remove(stream2.ssrcs.begin(), stream2.ssrcs.end(), 13u);
+ EXPECT_FALSE(cricket::IsSimulcastStream(stream2));
+
+ // stream3 has two SIM groups.
+ cricket::StreamParams stream3 =
+ cricket::CreateSimStreamParams("cname", MAKE_VECTOR(kSsrcs2));
+ std::vector<uint32_t> sim_ssrcs = MAKE_VECTOR(kRtxSsrcs3);
+ cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, sim_ssrcs);
+ for (size_t i = 0; i < sim_ssrcs.size(); i++) {
+ stream3.add_ssrc(sim_ssrcs[i]);
+ }
+ stream3.ssrc_groups.push_back(sg);
+ EXPECT_FALSE(cricket::IsSimulcastStream(stream3));
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/test/mock_mediachannel.h b/third_party/libwebrtc/webrtc/media/base/test/mock_mediachannel.h
new file mode 100644
index 0000000000..fdfbf3440e
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/test/mock_mediachannel.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_TEST_MOCK_MEDIACHANNEL_H_
+#define MEDIA_BASE_TEST_MOCK_MEDIACHANNEL_H_
+
+#include "media/base/fakemediaengine.h"
+#include "test/gmock.h"
+
+namespace webrtc {
+
+class MockVideoMediaChannel : public cricket::FakeVideoMediaChannel {
+ public:
+ MockVideoMediaChannel()
+ : cricket::FakeVideoMediaChannel(nullptr, cricket::VideoOptions()) {}
+ MOCK_METHOD1(GetStats, bool(cricket::VideoMediaInfo*));
+};
+
+class MockVoiceMediaChannel : public cricket::FakeVoiceMediaChannel {
+ public:
+ MockVoiceMediaChannel()
+ : cricket::FakeVoiceMediaChannel(nullptr, cricket::AudioOptions()) {}
+ MOCK_METHOD1(GetStats, bool(cricket::VoiceMediaInfo*));
+};
+
+} // namespace webrtc
+
+#endif // MEDIA_BASE_TEST_MOCK_MEDIACHANNEL_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/testutils.cc b/third_party/libwebrtc/webrtc/media/base/testutils.cc
new file mode 100644
index 0000000000..f92d4013eb
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/testutils.cc
@@ -0,0 +1,170 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/testutils.h"
+
+#include <math.h>
+#include <algorithm>
+#include <memory>
+
+#include "api/video/video_frame.h"
+#include "media/base/videocapturer.h"
+#include "rtc_base/bytebuffer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/stream.h"
+#include "rtc_base/stringutils.h"
+#include "rtc_base/testutils.h"
+
+namespace cricket {
+
+/////////////////////////////////////////////////////////////////////////
+// Implementation of RawRtpPacket
+/////////////////////////////////////////////////////////////////////////
+void RawRtpPacket::WriteToByteBuffer(uint32_t in_ssrc,
+ rtc::ByteBufferWriter* buf) const {
+ if (!buf) return;
+
+ buf->WriteUInt8(ver_to_cc);
+ buf->WriteUInt8(m_to_pt);
+ buf->WriteUInt16(sequence_number);
+ buf->WriteUInt32(timestamp);
+ buf->WriteUInt32(in_ssrc);
+ buf->WriteBytes(payload, sizeof(payload));
+}
+
+bool RawRtpPacket::ReadFromByteBuffer(rtc::ByteBufferReader* buf) {
+ if (!buf) return false;
+
+ bool ret = true;
+ ret &= buf->ReadUInt8(&ver_to_cc);
+ ret &= buf->ReadUInt8(&m_to_pt);
+ ret &= buf->ReadUInt16(&sequence_number);
+ ret &= buf->ReadUInt32(&timestamp);
+ ret &= buf->ReadUInt32(&ssrc);
+ ret &= buf->ReadBytes(payload, sizeof(payload));
+ return ret;
+}
+
+bool RawRtpPacket::SameExceptSeqNumTimestampSsrc(const RawRtpPacket& packet,
+ uint16_t seq,
+ uint32_t ts,
+ uint32_t ssc) const {
+ return sequence_number == seq &&
+ timestamp == ts &&
+ ver_to_cc == packet.ver_to_cc &&
+ m_to_pt == packet.m_to_pt &&
+ ssrc == ssc &&
+ 0 == memcmp(payload, packet.payload, sizeof(payload));
+}
+
+/////////////////////////////////////////////////////////////////////////
+// Implementation of RawRtcpPacket
+/////////////////////////////////////////////////////////////////////////
+void RawRtcpPacket::WriteToByteBuffer(rtc::ByteBufferWriter *buf) const {
+ if (!buf) return;
+
+ buf->WriteUInt8(ver_to_count);
+ buf->WriteUInt8(type);
+ buf->WriteUInt16(length);
+ buf->WriteBytes(payload, sizeof(payload));
+}
+
+bool RawRtcpPacket::ReadFromByteBuffer(rtc::ByteBufferReader* buf) {
+ if (!buf) return false;
+
+ bool ret = true;
+ ret &= buf->ReadUInt8(&ver_to_count);
+ ret &= buf->ReadUInt8(&type);
+ ret &= buf->ReadUInt16(&length);
+ ret &= buf->ReadBytes(payload, sizeof(payload));
+ return ret;
+}
+
+bool RawRtcpPacket::EqualsTo(const RawRtcpPacket& packet) const {
+ return ver_to_count == packet.ver_to_count &&
+ type == packet.type &&
+ length == packet.length &&
+ 0 == memcmp(payload, packet.payload, sizeof(payload));
+}
+
+// Implementation of VideoCaptureListener.
+VideoCapturerListener::VideoCapturerListener(VideoCapturer* capturer)
+ : capturer_(capturer),
+ last_capture_state_(CS_STARTING),
+ frame_count_(0),
+ frame_width_(0),
+ frame_height_(0),
+ resolution_changed_(false) {
+ capturer->SignalStateChange.connect(this,
+ &VideoCapturerListener::OnStateChange);
+ capturer->AddOrUpdateSink(this, rtc::VideoSinkWants());
+}
+
+VideoCapturerListener::~VideoCapturerListener() {
+ capturer_->RemoveSink(this);
+}
+
+void VideoCapturerListener::OnStateChange(VideoCapturer* capturer,
+ CaptureState result) {
+ last_capture_state_ = result;
+}
+
+void VideoCapturerListener::OnFrame(const webrtc::VideoFrame& frame) {
+ ++frame_count_;
+ if (1 == frame_count_) {
+ frame_width_ = frame.width();
+ frame_height_ = frame.height();
+ } else if (frame_width_ != frame.width() || frame_height_ != frame.height()) {
+ resolution_changed_ = true;
+ }
+}
+
+cricket::StreamParams CreateSimStreamParams(
+ const std::string& cname,
+ const std::vector<uint32_t>& ssrcs) {
+ cricket::StreamParams sp;
+ cricket::SsrcGroup sg(cricket::kSimSsrcGroupSemantics, ssrcs);
+ sp.ssrcs = ssrcs;
+ sp.ssrc_groups.push_back(sg);
+ sp.cname = cname;
+ return sp;
+}
+
+// There should be an rtx_ssrc per ssrc.
+cricket::StreamParams CreateSimWithRtxStreamParams(
+ const std::string& cname,
+ const std::vector<uint32_t>& ssrcs,
+ const std::vector<uint32_t>& rtx_ssrcs) {
+ cricket::StreamParams sp = CreateSimStreamParams(cname, ssrcs);
+ for (size_t i = 0; i < ssrcs.size(); ++i) {
+ sp.ssrcs.push_back(rtx_ssrcs[i]);
+ std::vector<uint32_t> fid_ssrcs;
+ fid_ssrcs.push_back(ssrcs[i]);
+ fid_ssrcs.push_back(rtx_ssrcs[i]);
+ cricket::SsrcGroup fid_group(cricket::kFidSsrcGroupSemantics, fid_ssrcs);
+ sp.ssrc_groups.push_back(fid_group);
+ }
+ return sp;
+}
+
+cricket::StreamParams CreatePrimaryWithFecFrStreamParams(
+ const std::string& cname,
+ uint32_t primary_ssrc,
+ uint32_t flexfec_ssrc) {
+ cricket::StreamParams sp;
+ cricket::SsrcGroup sg(cricket::kFecFrSsrcGroupSemantics,
+ {primary_ssrc, flexfec_ssrc});
+ sp.ssrcs = {primary_ssrc};
+ sp.ssrc_groups.push_back(sg);
+ sp.cname = cname;
+ return sp;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/testutils.h b/third_party/libwebrtc/webrtc/media/base/testutils.h
new file mode 100644
index 0000000000..8ee77c1852
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/testutils.h
@@ -0,0 +1,151 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_TESTUTILS_H_
+#define MEDIA_BASE_TESTUTILS_H_
+
+#include <string>
+#include <vector>
+
+#include "media/base/mediachannel.h"
+#include "media/base/videocapturer.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/basictypes.h"
+#include "rtc_base/sigslot.h"
+#include "rtc_base/window.h"
+
+namespace rtc {
+class ByteBufferReader;
+class ByteBufferWriter;
+class StreamInterface;
+}
+
+namespace webrtc {
+class VideoFrame;
+}
+
+namespace cricket {
+
+// Returns size of 420 image with rounding on chroma for odd sizes.
+#define I420_SIZE(w, h) (w * h + (((w + 1) / 2) * ((h + 1) / 2)) * 2)
+// Returns size of ARGB image.
+#define ARGB_SIZE(w, h) (w * h * 4)
+
+template <class T> inline std::vector<T> MakeVector(const T a[], size_t s) {
+ return std::vector<T>(a, a + s);
+}
+#define MAKE_VECTOR(a) cricket::MakeVector(a, arraysize(a))
+
+struct RtpDumpPacket;
+class RtpDumpWriter;
+
+struct RawRtpPacket {
+ void WriteToByteBuffer(uint32_t in_ssrc, rtc::ByteBufferWriter* buf) const;
+ bool ReadFromByteBuffer(rtc::ByteBufferReader* buf);
+ // Check if this packet is the same as the specified packet except the
+ // sequence number and timestamp, which should be the same as the specified
+ // parameters.
+ bool SameExceptSeqNumTimestampSsrc(const RawRtpPacket& packet,
+ uint16_t seq,
+ uint32_t ts,
+ uint32_t ssc) const;
+ int size() const { return 28; }
+
+ uint8_t ver_to_cc;
+ uint8_t m_to_pt;
+ uint16_t sequence_number;
+ uint32_t timestamp;
+ uint32_t ssrc;
+ char payload[16];
+};
+
+struct RawRtcpPacket {
+ void WriteToByteBuffer(rtc::ByteBufferWriter* buf) const;
+ bool ReadFromByteBuffer(rtc::ByteBufferReader* buf);
+ bool EqualsTo(const RawRtcpPacket& packet) const;
+
+ uint8_t ver_to_count;
+ uint8_t type;
+ uint16_t length;
+ char payload[16];
+};
+
+// Test helper for testing VideoCapturer implementations.
+class VideoCapturerListener
+ : public sigslot::has_slots<>,
+ public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ explicit VideoCapturerListener(VideoCapturer* cap);
+ ~VideoCapturerListener();
+
+ CaptureState last_capture_state() const { return last_capture_state_; }
+ int frame_count() const { return frame_count_; }
+ int frame_width() const { return frame_width_; }
+ int frame_height() const { return frame_height_; }
+ bool resolution_changed() const { return resolution_changed_; }
+
+ void OnStateChange(VideoCapturer* capturer, CaptureState state);
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ private:
+ VideoCapturer* capturer_;
+ CaptureState last_capture_state_;
+ int frame_count_;
+ int frame_width_;
+ int frame_height_;
+ bool resolution_changed_;
+};
+
+class VideoMediaErrorCatcher : public sigslot::has_slots<> {
+ public:
+ VideoMediaErrorCatcher() : ssrc_(0), error_(VideoMediaChannel::ERROR_NONE) { }
+ uint32_t ssrc() const { return ssrc_; }
+ VideoMediaChannel::Error error() const { return error_; }
+ void OnError(uint32_t ssrc, VideoMediaChannel::Error error) {
+ ssrc_ = ssrc;
+ error_ = error;
+ }
+ private:
+ uint32_t ssrc_;
+ VideoMediaChannel::Error error_;
+};
+
+// Checks whether |codecs| contains |codec|; checks using Codec::Matches().
+template <class C>
+bool ContainsMatchingCodec(const std::vector<C>& codecs, const C& codec) {
+ typename std::vector<C>::const_iterator it;
+ for (it = codecs.begin(); it != codecs.end(); ++it) {
+ if (it->Matches(codec)) {
+ return true;
+ }
+ }
+ return false;
+}
+
+// Create Simulcast StreamParams with given |ssrcs| and |cname|.
+cricket::StreamParams CreateSimStreamParams(const std::string& cname,
+ const std::vector<uint32_t>& ssrcs);
+// Create Simulcast stream with given |ssrcs| and |rtx_ssrcs|.
+// The number of |rtx_ssrcs| must match number of |ssrcs|.
+cricket::StreamParams CreateSimWithRtxStreamParams(
+ const std::string& cname,
+ const std::vector<uint32_t>& ssrcs,
+ const std::vector<uint32_t>& rtx_ssrcs);
+
+// Create StreamParams with single primary SSRC and corresponding FlexFEC SSRC.
+cricket::StreamParams CreatePrimaryWithFecFrStreamParams(
+ const std::string& cname,
+ uint32_t primary_ssrc,
+ uint32_t flexfec_ssrc);
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_TESTUTILS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/turnutils.cc b/third_party/libwebrtc/webrtc/media/base/turnutils.cc
new file mode 100644
index 0000000000..cf258042e8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/turnutils.cc
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/turnutils.h"
+
+#include "p2p/base/stun.h"
+#include "rtc_base/byteorder.h"
+#include "rtc_base/checks.h"
+
+namespace cricket {
+
+namespace {
+
+const size_t kTurnChannelHeaderLength = 4;
+
+bool IsTurnChannelData(const uint8_t* data, size_t length) {
+ return length >= kTurnChannelHeaderLength && ((*data & 0xC0) == 0x40);
+}
+
+bool IsTurnSendIndicationPacket(const uint8_t* data, size_t length) {
+ if (length < kStunHeaderSize) {
+ return false;
+ }
+
+ uint16_t type = rtc::GetBE16(data);
+ return (type == TURN_SEND_INDICATION);
+}
+
+} // namespace
+
+bool UnwrapTurnPacket(const uint8_t* packet,
+ size_t packet_size,
+ size_t* content_position,
+ size_t* content_size) {
+ if (IsTurnChannelData(packet, packet_size)) {
+ // Turn Channel Message header format.
+ // 0 1 2 3
+ // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | Channel Number | Length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | |
+ // / Application Data /
+ // / /
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ size_t length = rtc::GetBE16(&packet[2]);
+ if (length + kTurnChannelHeaderLength > packet_size) {
+ return false;
+ }
+
+ *content_position = kTurnChannelHeaderLength;
+ *content_size = length;
+ return true;
+ }
+
+ if (IsTurnSendIndicationPacket(packet, packet_size)) {
+ // Validate STUN message length.
+ const size_t stun_message_length = rtc::GetBE16(&packet[2]);
+ if (stun_message_length + kStunHeaderSize != packet_size) {
+ return false;
+ }
+
+ // First skip mandatory stun header which is of 20 bytes.
+ size_t pos = kStunHeaderSize;
+ // Loop through STUN attributes until we find STUN DATA attribute.
+ while (pos < packet_size) {
+ // Keep reading STUN attributes until we hit DATA attribute.
+ // Attribute will be a TLV structure.
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | Type | Length |
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // | Value (variable) ....
+ // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ // The value in the length field MUST contain the length of the Value
+ // part of the attribute, prior to padding, measured in bytes. Since
+ // STUN aligns attributes on 32-bit boundaries, attributes whose content
+ // is not a multiple of 4 bytes are padded with 1, 2, or 3 bytes of
+ // padding so that its value contains a multiple of 4 bytes. The
+ // padding bits are ignored, and may be any value.
+ uint16_t attr_type, attr_length;
+ const int kAttrHeaderLength = sizeof(attr_type) + sizeof(attr_length);
+
+ if (packet_size < pos + kAttrHeaderLength) {
+ return false;
+ }
+
+ // Getting attribute type and length.
+ attr_type = rtc::GetBE16(&packet[pos]);
+ attr_length = rtc::GetBE16(&packet[pos + sizeof(attr_type)]);
+
+ pos += kAttrHeaderLength; // Skip STUN_DATA_ATTR header.
+
+ // Checking for bogus attribute length.
+ if (pos + attr_length > packet_size) {
+ return false;
+ }
+
+ if (attr_type == STUN_ATTR_DATA) {
+ *content_position = pos;
+ *content_size = attr_length;
+ return true;
+ }
+
+ pos += attr_length;
+ if ((attr_length % 4) != 0) {
+ pos += (4 - (attr_length % 4));
+ }
+ }
+
+ // There is no data attribute present in the message.
+ return false;
+ }
+
+ // This is not a TURN packet.
+ *content_position = 0;
+ *content_size = packet_size;
+ return true;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/turnutils.h b/third_party/libwebrtc/webrtc/media/base/turnutils.h
new file mode 100644
index 0000000000..13ed26b7a5
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/turnutils.h
@@ -0,0 +1,30 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_TURNUTILS_H_
+#define MEDIA_BASE_TURNUTILS_H_
+
+#include <cstddef>
+#include <cstdint>
+
+namespace cricket {
+
+struct PacketOptions;
+
+// Finds data location within a TURN Channel Message or TURN Send Indication
+// message.
+bool UnwrapTurnPacket(const uint8_t* packet,
+ size_t packet_size,
+ size_t* content_position,
+ size_t* content_size);
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_TURNUTILS_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/turnutils_unittest.cc b/third_party/libwebrtc/webrtc/media/base/turnutils_unittest.cc
new file mode 100644
index 0000000000..ca1282760b
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/turnutils_unittest.cc
@@ -0,0 +1,120 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/turnutils.h"
+
+#include <stddef.h>
+
+#include "rtc_base/gunit.h"
+
+namespace cricket {
+
+// Invalid TURN send indication messages. Messages are proper STUN
+// messages with incorrect values in attributes.
+TEST(TurnUtilsTest, InvalidTurnSendIndicationMessages) {
+ size_t content_pos = SIZE_MAX;
+ size_t content_size = SIZE_MAX;
+
+ // Stun Indication message with Zero length
+ uint8_t kTurnSendIndicationMsgWithNoAttributes[] = {
+ 0x00, 0x16, 0x00, 0x00, // Zero length
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7', '8', '9', 'a', 'b',
+ };
+ EXPECT_FALSE(UnwrapTurnPacket(kTurnSendIndicationMsgWithNoAttributes,
+ sizeof(kTurnSendIndicationMsgWithNoAttributes),
+ &content_pos, &content_size));
+ EXPECT_EQ(SIZE_MAX, content_pos);
+ EXPECT_EQ(SIZE_MAX, content_size);
+
+ // Stun Send Indication message with invalid length in stun header.
+ const uint8_t kTurnSendIndicationMsgWithInvalidLength[] = {
+ 0x00, 0x16, 0xFF, 0x00, // length of 0xFF00
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7', '8', '9', 'a', 'b',
+ };
+ EXPECT_FALSE(UnwrapTurnPacket(kTurnSendIndicationMsgWithInvalidLength,
+ sizeof(kTurnSendIndicationMsgWithInvalidLength),
+ &content_pos, &content_size));
+ EXPECT_EQ(SIZE_MAX, content_pos);
+ EXPECT_EQ(SIZE_MAX, content_size);
+
+ // Stun Send Indication message with no DATA attribute in message.
+ const uint8_t kTurnSendIndicatinMsgWithNoDataAttribute[] = {
+ 0x00, 0x16, 0x00, 0x08, // length of
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7', '8', '9', 'a', 'b',
+ 0x00, 0x20, 0x00, 0x04, // Mapped address.
+ 0x00, 0x00, 0x00, 0x00,
+ };
+ EXPECT_FALSE(
+ UnwrapTurnPacket(kTurnSendIndicatinMsgWithNoDataAttribute,
+ sizeof(kTurnSendIndicatinMsgWithNoDataAttribute),
+ &content_pos, &content_size));
+ EXPECT_EQ(SIZE_MAX, content_pos);
+ EXPECT_EQ(SIZE_MAX, content_size);
+}
+
+// Valid TURN Send Indication messages.
+TEST(TurnUtilsTest, ValidTurnSendIndicationMessage) {
+ size_t content_pos = SIZE_MAX;
+ size_t content_size = SIZE_MAX;
+ // A valid STUN indication message with a valid RTP header in data attribute
+ // payload field and no extension bit set.
+ const uint8_t kTurnSendIndicationMsgWithoutRtpExtension[] = {
+ 0x00, 0x16, 0x00, 0x18, // length of
+ 0x21, 0x12, 0xA4, 0x42, // magic cookie
+ '0', '1', '2', '3', // transaction id
+ '4', '5', '6', '7', '8', '9', 'a', 'b',
+ 0x00, 0x20, 0x00, 0x04, // Mapped address.
+ 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x13, 0x00, 0x0C, // Data attribute.
+ 0x80, 0x00, 0x00, 0x00, // RTP packet.
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ };
+ EXPECT_TRUE(UnwrapTurnPacket(
+ kTurnSendIndicationMsgWithoutRtpExtension,
+ sizeof(kTurnSendIndicationMsgWithoutRtpExtension), &content_pos,
+ &content_size));
+ EXPECT_EQ(12U, content_size);
+ EXPECT_EQ(32U, content_pos);
+}
+
+// Verify that parsing of valid TURN Channel Messages.
+TEST(TurnUtilsTest, ValidTurnChannelMessages) {
+ const uint8_t kTurnChannelMsgWithRtpPacket[] = {
+ 0x40, 0x00, 0x00, 0x0C,
+ 0x80, 0x00, 0x00, 0x00, // RTP packet.
+ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ };
+
+ size_t content_pos = 0, content_size = 0;
+ EXPECT_TRUE(UnwrapTurnPacket(
+ kTurnChannelMsgWithRtpPacket,
+ sizeof(kTurnChannelMsgWithRtpPacket), &content_pos, &content_size));
+ EXPECT_EQ(12U, content_size);
+ EXPECT_EQ(4U, content_pos);
+}
+
+TEST(TurnUtilsTest, ChannelMessageZeroLength) {
+ const uint8_t kTurnChannelMsgWithZeroLength[] = {0x40, 0x00, 0x00, 0x00};
+ size_t content_pos = SIZE_MAX;
+ size_t content_size = SIZE_MAX;
+ EXPECT_TRUE(UnwrapTurnPacket(kTurnChannelMsgWithZeroLength,
+ sizeof(kTurnChannelMsgWithZeroLength),
+ &content_pos, &content_size));
+ EXPECT_EQ(4, content_pos);
+ EXPECT_EQ(0, content_size);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videoadapter.cc b/third_party/libwebrtc/webrtc/media/base/videoadapter.cc
new file mode 100644
index 0000000000..8756c15a25
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videoadapter.cc
@@ -0,0 +1,293 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videoadapter.h"
+
+#include <algorithm>
+#include <cmath>
+#include <cstdlib>
+#include <limits>
+#include <utility>
+
+#include "api/optional.h"
+#include "media/base/mediaconstants.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/arraysize.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace {
+struct Fraction {
+ int numerator;
+ int denominator;
+
+ // Determines number of output pixels if both width and height of an input of
+ // |input_pixels| pixels is scaled with the fraction numerator / denominator.
+ int scale_pixel_count(int input_pixels) {
+ return (numerator * numerator * input_pixels) / (denominator * denominator);
+ }
+};
+
+// Round |value_to_round| to a multiple of |multiple|. Prefer rounding upwards,
+// but never more than |max_value|.
+int roundUp(int value_to_round, int multiple, int max_value) {
+ const int rounded_value =
+ (value_to_round + multiple - 1) / multiple * multiple;
+ return rounded_value <= max_value ? rounded_value
+ : (max_value / multiple * multiple);
+}
+
+// Generates a scale factor that makes |input_pixels| close to |target_pixels|,
+// but no higher than |max_pixels|.
+Fraction FindScale(int input_pixels, int target_pixels, int max_pixels) {
+ // This function only makes sense for a positive target.
+ RTC_DCHECK_GT(target_pixels, 0);
+ RTC_DCHECK_GT(max_pixels, 0);
+ RTC_DCHECK_GE(max_pixels, target_pixels);
+
+ // Don't scale up original.
+ if (target_pixels >= input_pixels)
+ return Fraction{1, 1};
+
+ Fraction current_scale = Fraction{1, 1};
+ Fraction best_scale = Fraction{1, 1};
+ // The minimum (absolute) difference between the number of output pixels and
+ // the target pixel count.
+ int min_pixel_diff = std::numeric_limits<int>::max();
+ if (input_pixels <= max_pixels) {
+ // Start condition for 1/1 case, if it is less than max.
+ min_pixel_diff = std::abs(input_pixels - target_pixels);
+ }
+
+ // Alternately scale down by 2/3 and 3/4. This results in fractions which are
+ // effectively scalable. For instance, starting at 1280x720 will result in
+ // the series (3/4) => 960x540, (1/2) => 640x360, (3/8) => 480x270,
+ // (1/4) => 320x180, (3/16) => 240x125, (1/8) => 160x90.
+ while (current_scale.scale_pixel_count(input_pixels) > target_pixels) {
+ if (current_scale.numerator % 3 == 0 &&
+ current_scale.denominator % 2 == 0) {
+ // Multiply by 2/3.
+ current_scale.numerator /= 3;
+ current_scale.denominator /= 2;
+ } else {
+ // Multiply by 3/4.
+ current_scale.numerator *= 3;
+ current_scale.denominator *= 4;
+ }
+
+ int output_pixels = current_scale.scale_pixel_count(input_pixels);
+ if (output_pixels <= max_pixels) {
+ int diff = std::abs(target_pixels - output_pixels);
+ if (diff < min_pixel_diff) {
+ min_pixel_diff = diff;
+ best_scale = current_scale;
+ }
+ }
+ }
+
+ return best_scale;
+}
+} // namespace
+
+namespace cricket {
+
+VideoAdapter::VideoAdapter(int required_resolution_alignment)
+ : frames_in_(0),
+ frames_out_(0),
+ frames_scaled_(0),
+ adaption_changes_(0),
+ previous_width_(0),
+ previous_height_(0),
+ required_resolution_alignment_(required_resolution_alignment),
+ resolution_request_target_pixel_count_(std::numeric_limits<int>::max()),
+ resolution_request_max_pixel_count_(std::numeric_limits<int>::max()),
+ max_framerate_request_(std::numeric_limits<int>::max()) {}
+
+VideoAdapter::VideoAdapter() : VideoAdapter(1) {}
+
+VideoAdapter::~VideoAdapter() {}
+
+bool VideoAdapter::KeepFrame(int64_t in_timestamp_ns) {
+ rtc::CritScope cs(&critical_section_);
+ if (max_framerate_request_ <= 0)
+ return false;
+
+ int64_t frame_interval_ns =
+ requested_format_ ? requested_format_->interval : 0;
+
+ // If |max_framerate_request_| is not set, it will default to maxint, which
+ // will lead to a frame_interval_ns rounded to 0.
+ frame_interval_ns = std::max<int64_t>(
+ frame_interval_ns, rtc::kNumNanosecsPerSec / max_framerate_request_);
+
+ if (frame_interval_ns <= 0) {
+ // Frame rate throttling not enabled.
+ return true;
+ }
+
+ if (next_frame_timestamp_ns_) {
+ // Time until next frame should be outputted.
+ const int64_t time_until_next_frame_ns =
+ (*next_frame_timestamp_ns_ - in_timestamp_ns);
+
+ // Continue if timestamp is within expected range.
+ if (std::abs(time_until_next_frame_ns) < 2 * frame_interval_ns) {
+ // Drop if a frame shouldn't be outputted yet.
+ if (time_until_next_frame_ns > 0)
+ return false;
+ // Time to output new frame.
+ *next_frame_timestamp_ns_ += frame_interval_ns;
+ return true;
+ }
+ }
+
+ // First timestamp received or timestamp is way outside expected range, so
+ // reset. Set first timestamp target to just half the interval to prefer
+ // keeping frames in case of jitter.
+ next_frame_timestamp_ns_ = in_timestamp_ns + frame_interval_ns / 2;
+ return true;
+}
+
+bool VideoAdapter::AdaptFrameResolution(int in_width,
+ int in_height,
+ int64_t in_timestamp_ns,
+ int* cropped_width,
+ int* cropped_height,
+ int* out_width,
+ int* out_height) {
+ rtc::CritScope cs(&critical_section_);
+ ++frames_in_;
+
+ // The max output pixel count is the minimum of the requests from
+ // OnOutputFormatRequest and OnResolutionRequest.
+ int max_pixel_count = resolution_request_max_pixel_count_;
+ if (scale_) {
+ // We calculate the scaled pixel count from the in_width and in_height,
+ // which is the input resolution. We then take the minimum of the scaled
+ // resolution and the current max_pixel_count. This will allow the
+ // quality scaler to reduce the resolution in response to load, but we
+ // will never go above the requested scaled resolution.
+ int scaled_pixel_count = (in_width*in_height/scale_resolution_by_)/scale_resolution_by_;
+ max_pixel_count = std::min(max_pixel_count, scaled_pixel_count);
+ }
+
+ if (requested_format_) {
+ max_pixel_count = std::min(
+ max_pixel_count, requested_format_->width * requested_format_->height);
+ }
+ int target_pixel_count =
+ std::min(resolution_request_target_pixel_count_, max_pixel_count);
+
+ // Drop the input frame if necessary.
+ if (max_pixel_count <= 0 || !KeepFrame(in_timestamp_ns)) {
+ // Show VAdapt log every 90 frames dropped. (3 seconds)
+ if ((frames_in_ - frames_out_) % 90 == 0) {
+ // TODO(fbarchard): Reduce to LS_VERBOSE when adapter info is not needed
+ // in default calls.
+ RTC_LOG(LS_INFO) << "VAdapt Drop Frame: scaled " << frames_scaled_
+ << " / out " << frames_out_ << " / in " << frames_in_
+ << " Changes: " << adaption_changes_
+ << " Input: " << in_width << "x" << in_height
+ << " timestamp: " << in_timestamp_ns << " Output: i"
+ << (requested_format_ ? requested_format_->interval : 0);
+ }
+
+ // Drop frame.
+ return false;
+ }
+
+ // Calculate how the input should be cropped.
+ if (!requested_format_ ||
+ requested_format_->width == 0 || requested_format_->height == 0) {
+ *cropped_width = in_width;
+ *cropped_height = in_height;
+ } else {
+ // Adjust |requested_format_| orientation to match input.
+ if ((in_width > in_height) !=
+ (requested_format_->width > requested_format_->height)) {
+ std::swap(requested_format_->width, requested_format_->height);
+ }
+ const float requested_aspect =
+ requested_format_->width /
+ static_cast<float>(requested_format_->height);
+ *cropped_width =
+ std::min(in_width, static_cast<int>(in_height * requested_aspect));
+ *cropped_height =
+ std::min(in_height, static_cast<int>(in_width / requested_aspect));
+ }
+ const Fraction scale = FindScale((*cropped_width) * (*cropped_height),
+ target_pixel_count, max_pixel_count);
+ // Adjust cropping slightly to get even integer output size and a perfect
+ // scale factor. Make sure the resulting dimensions are aligned correctly
+ // to be nice to hardware encoders.
+ *cropped_width =
+ roundUp(*cropped_width,
+ scale.denominator * required_resolution_alignment_, in_width);
+ *cropped_height =
+ roundUp(*cropped_height,
+ scale.denominator * required_resolution_alignment_, in_height);
+ RTC_DCHECK_EQ(0, *cropped_width % scale.denominator);
+ RTC_DCHECK_EQ(0, *cropped_height % scale.denominator);
+
+ // Calculate final output size.
+ *out_width = *cropped_width / scale.denominator * scale.numerator;
+ *out_height = *cropped_height / scale.denominator * scale.numerator;
+ RTC_DCHECK_EQ(0, *out_width % required_resolution_alignment_);
+ RTC_DCHECK_EQ(0, *out_height % required_resolution_alignment_);
+
+ ++frames_out_;
+ if (scale.numerator != scale.denominator)
+ ++frames_scaled_;
+
+ if ((previous_width_ || scale_) && (previous_width_ != *out_width ||
+ previous_height_ != *out_height)) {
+ ++adaption_changes_;
+ RTC_LOG(LS_INFO) << "Frame size changed: scaled " << frames_scaled_
+ << " / out " << frames_out_ << " / in " << frames_in_
+ << " Changes: " << adaption_changes_
+ << " Input: " << in_width << "x" << in_height
+ << " Scale: " << scale.numerator << "/"
+ << scale.denominator << " Output: " << *out_width << "x"
+ << *out_height << " i"
+ << (requested_format_ ? requested_format_->interval : 0);
+ }
+
+ previous_width_ = *out_width;
+ previous_height_ = *out_height;
+
+ return true;
+}
+
+void VideoAdapter::OnOutputFormatRequest(const VideoFormat& format) {
+ rtc::CritScope cs(&critical_section_);
+ requested_format_ = format;
+ next_frame_timestamp_ns_ = rtc::nullopt;
+}
+
+void VideoAdapter::OnResolutionFramerateRequest(
+ const rtc::Optional<int>& target_pixel_count,
+ int max_pixel_count,
+ int max_framerate_fps) {
+ rtc::CritScope cs(&critical_section_);
+ resolution_request_max_pixel_count_ = max_pixel_count;
+ resolution_request_target_pixel_count_ =
+ target_pixel_count.value_or(resolution_request_max_pixel_count_);
+ max_framerate_request_ = max_framerate_fps;
+}
+
+void VideoAdapter::OnScaleResolutionBy(
+ rtc::Optional<float> scale_resolution_by) {
+ rtc::CritScope cs(&critical_section_);
+ scale_resolution_by_ = scale_resolution_by.value_or(1.0);
+ RTC_DCHECK_GE(scale_resolution_by_, 1.0);
+ scale_ = static_cast<bool>(scale_resolution_by);
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videoadapter.h b/third_party/libwebrtc/webrtc/media/base/videoadapter.h
new file mode 100644
index 0000000000..e7c9fa6830
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videoadapter.h
@@ -0,0 +1,104 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOADAPTER_H_
+#define MEDIA_BASE_VIDEOADAPTER_H_
+
+#include "api/optional.h"
+#include "media/base/videocommon.h"
+#include "rtc_base/constructormagic.h"
+#include "rtc_base/criticalsection.h"
+
+namespace cricket {
+
+// VideoAdapter adapts an input video frame to an output frame based on the
+// specified input and output formats. The adaptation includes dropping frames
+// to reduce frame rate and scaling frames.
+// VideoAdapter is thread safe.
+class VideoAdapter {
+ public:
+ VideoAdapter();
+ // The output frames will have height and width that is divisible by
+ // |required_resolution_alignment|.
+ explicit VideoAdapter(int required_resolution_alignment);
+ virtual ~VideoAdapter();
+
+ // Return the adapted resolution and cropping parameters given the
+ // input resolution. The input frame should first be cropped, then
+ // scaled to the final output resolution. Returns true if the frame
+ // should be adapted, and false if it should be dropped.
+ bool AdaptFrameResolution(int in_width,
+ int in_height,
+ int64_t in_timestamp_ns,
+ int* cropped_width,
+ int* cropped_height,
+ int* out_width,
+ int* out_height);
+
+ // Requests the output frame size and frame interval from
+ // |AdaptFrameResolution| to not be larger than |format|. Also, the input
+ // frame size will be cropped to match the requested aspect ratio. The
+ // requested aspect ratio is orientation agnostic and will be adjusted to
+ // maintain the input orientation, so it doesn't matter if e.g. 1280x720 or
+ // 720x1280 is requested.
+ void OnOutputFormatRequest(const VideoFormat& format);
+
+ // Requests the output frame size from |AdaptFrameResolution| to have as close
+ // as possible to |target_pixel_count| pixels (if set) but no more than
+ // |max_pixel_count|.
+ // |max_framerate_fps| is essentially analogous to |max_pixel_count|, but for
+ // framerate rather than resolution.
+ // Set |max_pixel_count| and/or |max_framerate_fps| to
+ // std::numeric_limit<int>::max() if no upper limit is desired.
+ void OnResolutionFramerateRequest(
+ const rtc::Optional<int>& target_pixel_count,
+ int max_pixel_count,
+ int max_framerate_fps);
+
+ // Requests the output frame size from |AdaptFrameResolution| be scaled
+ // down from the input by a factor of scale_resolution_by (min 1.0)
+ virtual void OnScaleResolutionBy(rtc::Optional<float> scale_resolution_by);
+
+ private:
+ // Determine if frame should be dropped based on input fps and requested fps.
+ bool KeepFrame(int64_t in_timestamp_ns);
+
+ int frames_in_; // Number of input frames.
+ int frames_out_; // Number of output frames.
+ int frames_scaled_; // Number of frames scaled.
+ int adaption_changes_; // Number of changes in scale factor.
+ int previous_width_; // Previous adapter output width.
+ int previous_height_; // Previous adapter output height.
+ // Resolution must be divisible by this factor.
+ const int required_resolution_alignment_;
+ // The target timestamp for the next frame based on requested format.
+ rtc::Optional<int64_t> next_frame_timestamp_ns_
+ RTC_GUARDED_BY(critical_section_);
+
+ // Max number of pixels requested via calls to OnOutputFormatRequest,
+ // OnResolutionRequest respectively.
+ // The adapted output format is the minimum of these.
+ rtc::Optional<VideoFormat> requested_format_
+ RTC_GUARDED_BY(critical_section_);
+ int resolution_request_target_pixel_count_ RTC_GUARDED_BY(critical_section_);
+ int resolution_request_max_pixel_count_ RTC_GUARDED_BY(critical_section_);
+ int max_framerate_request_ RTC_GUARDED_BY(critical_section_);
+ float scale_resolution_by_ RTC_GUARDED_BY(critical_section_);
+ bool scale_ RTC_GUARDED_BY(critical_section_);
+
+ // The critical section to protect the above variables.
+ rtc::CriticalSection critical_section_;
+
+ RTC_DISALLOW_COPY_AND_ASSIGN(VideoAdapter);
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_VIDEOADAPTER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videoadapter_unittest.cc b/third_party/libwebrtc/webrtc/media/base/videoadapter_unittest.cc
new file mode 100644
index 0000000000..039d1da636
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videoadapter_unittest.cc
@@ -0,0 +1,1096 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <limits.h> // For INT_MAX
+
+#include <limits>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "media/base/fakevideocapturer.h"
+#include "media/base/mediachannel.h"
+#include "media/base/testutils.h"
+#include "media/base/videoadapter.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+
+namespace cricket {
+namespace {
+const int kDefaultFps = 30;
+} // namespace
+
+class VideoAdapterTest : public testing::Test {
+ public:
+ virtual void SetUp() {
+ capturer_.reset(new FakeVideoCapturer);
+ capture_format_ = capturer_->GetSupportedFormats()->at(0);
+ capture_format_.interval = VideoFormat::FpsToInterval(kDefaultFps);
+
+ listener_.reset(new VideoCapturerListener(&adapter_));
+ capturer_->AddOrUpdateSink(listener_.get(), rtc::VideoSinkWants());
+ }
+
+ virtual void TearDown() {
+ // Explicitly disconnect the VideoCapturer before to avoid data races
+ // (frames delivered to VideoCapturerListener while it's being destructed).
+ capturer_->RemoveSink(listener_.get());
+ }
+
+ protected:
+ class VideoCapturerListener
+ : public rtc::VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ struct Stats {
+ int captured_frames;
+ int dropped_frames;
+ bool last_adapt_was_no_op;
+
+ int cropped_width;
+ int cropped_height;
+ int out_width;
+ int out_height;
+ };
+
+ explicit VideoCapturerListener(VideoAdapter* adapter)
+ : video_adapter_(adapter),
+ cropped_width_(0),
+ cropped_height_(0),
+ out_width_(0),
+ out_height_(0),
+ captured_frames_(0),
+ dropped_frames_(0),
+ last_adapt_was_no_op_(false) {}
+
+ void OnFrame(const webrtc::VideoFrame& frame) {
+ rtc::CritScope lock(&crit_);
+ const int in_width = frame.width();
+ const int in_height = frame.height();
+ int cropped_width;
+ int cropped_height;
+ int out_width;
+ int out_height;
+ if (video_adapter_->AdaptFrameResolution(
+ in_width, in_height,
+ frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec,
+ &cropped_width, &cropped_height, &out_width, &out_height)) {
+ cropped_width_ = cropped_width;
+ cropped_height_ = cropped_height;
+ out_width_ = out_width;
+ out_height_ = out_height;
+ last_adapt_was_no_op_ =
+ (in_width == cropped_width && in_height == cropped_height &&
+ in_width == out_width && in_height == out_height);
+ } else {
+ ++dropped_frames_;
+ }
+ ++captured_frames_;
+ }
+
+ Stats GetStats() {
+ rtc::CritScope lock(&crit_);
+ Stats stats;
+ stats.captured_frames = captured_frames_;
+ stats.dropped_frames = dropped_frames_;
+ stats.last_adapt_was_no_op = last_adapt_was_no_op_;
+ stats.cropped_width = cropped_width_;
+ stats.cropped_height = cropped_height_;
+ stats.out_width = out_width_;
+ stats.out_height = out_height_;
+ return stats;
+ }
+
+ private:
+ rtc::CriticalSection crit_;
+ VideoAdapter* video_adapter_;
+ int cropped_width_;
+ int cropped_height_;
+ int out_width_;
+ int out_height_;
+ int captured_frames_;
+ int dropped_frames_;
+ bool last_adapt_was_no_op_;
+ };
+
+
+ void VerifyAdaptedResolution(const VideoCapturerListener::Stats& stats,
+ int cropped_width,
+ int cropped_height,
+ int out_width,
+ int out_height) {
+ EXPECT_EQ(cropped_width, stats.cropped_width);
+ EXPECT_EQ(cropped_height, stats.cropped_height);
+ EXPECT_EQ(out_width, stats.out_width);
+ EXPECT_EQ(out_height, stats.out_height);
+ }
+
+ std::unique_ptr<FakeVideoCapturer> capturer_;
+ VideoAdapter adapter_;
+ int cropped_width_;
+ int cropped_height_;
+ int out_width_;
+ int out_height_;
+ std::unique_ptr<VideoCapturerListener> listener_;
+ VideoFormat capture_format_;
+};
+
+// Do not adapt the frame rate or the resolution. Expect no frame drop, no
+// cropping, and no resolution change.
+TEST_F(VideoAdapterTest, AdaptNothing) {
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop and no resolution change.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(0, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
+ EXPECT_TRUE(stats.last_adapt_was_no_op);
+}
+
+TEST_F(VideoAdapterTest, AdaptZeroInterval) {
+ VideoFormat format = capturer_->GetSupportedFormats()->at(0);
+ format.interval = 0;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no crash and that frames aren't dropped.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(0, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
+}
+
+// Adapt the frame rate to be half of the capture rate at the beginning. Expect
+// the number of dropped frames to be half of the number the captured frames.
+TEST_F(VideoAdapterTest, AdaptFramerateToHalf) {
+ VideoFormat request_format = capture_format_;
+ request_format.interval *= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+
+ // Capture 10 frames and verify that every other frame is dropped. The first
+ // frame should not be dropped.
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 1);
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 2);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 3);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 4);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 5);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 6);
+ EXPECT_EQ(3, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 7);
+ EXPECT_EQ(3, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 8);
+ EXPECT_EQ(4, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 9);
+ EXPECT_EQ(4, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 10);
+ EXPECT_EQ(5, listener_->GetStats().dropped_frames);
+}
+
+// Adapt the frame rate to be two thirds of the capture rate at the beginning.
+// Expect the number of dropped frames to be one thirds of the number the
+// captured frames.
+TEST_F(VideoAdapterTest, AdaptFramerateToTwoThirds) {
+ VideoFormat request_format = capture_format_;
+ request_format.interval = request_format.interval * 3 / 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+
+ // Capture 10 frames and verify that every third frame is dropped. The first
+ // frame should not be dropped.
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 1);
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 2);
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 3);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 4);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 5);
+ EXPECT_EQ(1, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 6);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 7);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 8);
+ EXPECT_EQ(2, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 9);
+ EXPECT_EQ(3, listener_->GetStats().dropped_frames);
+
+ capturer_->CaptureFrame();
+ EXPECT_GE(listener_->GetStats().captured_frames, 10);
+ EXPECT_EQ(3, listener_->GetStats().dropped_frames);
+}
+
+// Request frame rate twice as high as captured frame rate. Expect no frame
+// drop.
+TEST_F(VideoAdapterTest, AdaptFramerateHighLimit) {
+ VideoFormat request_format = capture_format_;
+ request_format.interval /= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop.
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+}
+
+// After the first timestamp, add a big offset to the timestamps. Expect that
+// the adapter is conservative and resets to the new offset and does not drop
+// any frame.
+TEST_F(VideoAdapterTest, AdaptFramerateTimestampOffset) {
+ const int64_t capture_interval = VideoFormat::FpsToInterval(kDefaultFps);
+ adapter_.OnOutputFormatRequest(
+ VideoFormat(640, 480, capture_interval, cricket::FOURCC_ANY));
+
+ const int64_t first_timestamp = 0;
+ adapter_.AdaptFrameResolution(640, 480, first_timestamp,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ const int64_t big_offset = -987654321LL * 1000;
+ const int64_t second_timestamp = big_offset;
+ adapter_.AdaptFrameResolution(640, 480, second_timestamp,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ const int64_t third_timestamp = big_offset + capture_interval;
+ adapter_.AdaptFrameResolution(640, 480, third_timestamp,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+}
+
+// Request 30 fps and send 30 fps with jitter. Expect that no frame is dropped.
+TEST_F(VideoAdapterTest, AdaptFramerateTimestampJitter) {
+ const int64_t capture_interval = VideoFormat::FpsToInterval(kDefaultFps);
+ adapter_.OnOutputFormatRequest(
+ VideoFormat(640, 480, capture_interval, cricket::FOURCC_ANY));
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 0 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 10 / 10 - 1,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 25 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 30 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 35 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+
+ adapter_.AdaptFrameResolution(640, 480, capture_interval * 50 / 10,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_);
+ EXPECT_GT(out_width_, 0);
+ EXPECT_GT(out_height_, 0);
+}
+
+// Adapt the frame rate to be half of the capture rate after capturing no less
+// than 10 frames. Expect no frame dropped before adaptation and frame dropped
+// after adaptation.
+TEST_F(VideoAdapterTest, AdaptFramerateOntheFly) {
+ VideoFormat request_format = capture_format_;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop before adaptation.
+ EXPECT_EQ(0, listener_->GetStats().dropped_frames);
+
+ // Adapat the frame rate.
+ request_format.interval *= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+
+ for (int i = 0; i < 20; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify frame drop after adaptation.
+ EXPECT_GT(listener_->GetStats().dropped_frames, 0);
+}
+
+// Do not adapt the frame rate or the resolution. Expect no frame drop, no
+// cropping, and no resolution change.
+TEST_F(VideoAdapterTest, OnFramerateRequestMax) {
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ std::numeric_limits<int>::max(),
+ std::numeric_limits<int>::max());
+
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop and no resolution change.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(0, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
+ EXPECT_TRUE(stats.last_adapt_was_no_op);
+}
+
+TEST_F(VideoAdapterTest, OnFramerateRequestZero) {
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ std::numeric_limits<int>::max(), 0);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no crash and that frames aren't dropped.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(10, stats.dropped_frames);
+}
+
+// Adapt the frame rate to be half of the capture rate at the beginning. Expect
+// the number of dropped frames to be half of the number the captured frames.
+TEST_F(VideoAdapterTest, OnFramerateRequestHalf) {
+ adapter_.OnResolutionFramerateRequest(
+ rtc::nullopt, std::numeric_limits<int>::max(), kDefaultFps / 2);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no crash and that frames aren't dropped.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(5, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ capture_format_.width, capture_format_.height);
+}
+
+// Set a very high output pixel resolution. Expect no cropping or resolution
+// change.
+TEST_F(VideoAdapterTest, AdaptFrameResolutionHighLimit) {
+ VideoFormat output_format = capture_format_;
+ output_format.width *= 10;
+ output_format.height *= 10;
+ adapter_.OnOutputFormatRequest(output_format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(capture_format_.width, out_width_);
+ EXPECT_EQ(capture_format_.height, out_height_);
+}
+
+// Adapt the frame resolution to be the same as capture resolution. Expect no
+// cropping or resolution change.
+TEST_F(VideoAdapterTest, AdaptFrameResolutionIdentical) {
+ adapter_.OnOutputFormatRequest(capture_format_);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(capture_format_.width, out_width_);
+ EXPECT_EQ(capture_format_.height, out_height_);
+}
+
+// Adapt the frame resolution to be a quarter of the capture resolution. Expect
+// no cropping, but a resolution change.
+TEST_F(VideoAdapterTest, AdaptFrameResolutionQuarter) {
+ VideoFormat request_format = capture_format_;
+ request_format.width /= 2;
+ request_format.height /= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(capture_format_.width, cropped_width_);
+ EXPECT_EQ(capture_format_.height, cropped_height_);
+ EXPECT_EQ(request_format.width, out_width_);
+ EXPECT_EQ(request_format.height, out_height_);
+}
+
+// Adapt the pixel resolution to 0. Expect frame drop.
+TEST_F(VideoAdapterTest, AdaptFrameResolutionDrop) {
+ VideoFormat output_format = capture_format_;
+ output_format.width = 0;
+ output_format.height = 0;
+ adapter_.OnOutputFormatRequest(output_format);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+}
+
+// Adapt the frame resolution to be a quarter of the capture resolution at the
+// beginning. Expect no cropping but a resolution change.
+TEST_F(VideoAdapterTest, AdaptResolution) {
+ VideoFormat request_format = capture_format_;
+ request_format.width /= 2;
+ request_format.height /= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no frame drop, no cropping, and resolution change.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_EQ(0, stats.dropped_frames);
+ VerifyAdaptedResolution(stats, capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
+}
+
+// Adapt the frame resolution to be a quarter of the capture resolution after
+// capturing no less than 10 frames. Expect no resolution change before
+// adaptation and resolution change after adaptation.
+TEST_F(VideoAdapterTest, AdaptResolutionOnTheFly) {
+ VideoFormat request_format = capture_format_;
+ adapter_.OnOutputFormatRequest(request_format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify no resolution change before adaptation.
+ VerifyAdaptedResolution(listener_->GetStats(),
+ capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
+
+ // Adapt the frame resolution.
+ request_format.width /= 2;
+ request_format.height /= 2;
+ adapter_.OnOutputFormatRequest(request_format);
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify resolution change after adaptation.
+ VerifyAdaptedResolution(listener_->GetStats(),
+ capture_format_.width, capture_format_.height,
+ request_format.width, request_format.height);
+}
+
+// Drop all frames.
+TEST_F(VideoAdapterTest, DropAllFrames) {
+ VideoFormat format; // with resolution 0x0.
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_EQ(CS_RUNNING, capturer_->Start(capture_format_));
+ for (int i = 0; i < 10; ++i)
+ capturer_->CaptureFrame();
+
+ // Verify all frames are dropped.
+ VideoCapturerListener::Stats stats = listener_->GetStats();
+ EXPECT_GE(stats.captured_frames, 10);
+ EXPECT_EQ(stats.captured_frames, stats.dropped_frames);
+}
+
+TEST_F(VideoAdapterTest, TestOnOutputFormatRequest) {
+ VideoFormat format(640, 400, 0, 0);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(400, out_height_);
+
+ // Format request 640x400.
+ format.height = 400;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(400, out_height_);
+
+ // Request 1280x720, higher than input, but aspect 16:9. Expect cropping but
+ // no scaling.
+ format.width = 1280;
+ format.height = 720;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Request 0x0.
+ format.width = 0;
+ format.height = 0;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+
+ // Request 320x200. Expect scaling, but no cropping.
+ format.width = 320;
+ format.height = 200;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
+
+ // Request resolution close to 2/3 scale. Expect adapt down. Scaling to 2/3
+ // is not optimized and not allowed, therefore 1/2 scaling will be used
+ // instead.
+ format.width = 424;
+ format.height = 265;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
+
+ // Request resolution of 3 / 8. Expect adapt down.
+ format.width = 640 * 3 / 8;
+ format.height = 400 * 3 / 8;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(640 * 3 / 8, out_width_);
+ EXPECT_EQ(400 * 3 / 8, out_height_);
+
+ // Switch back up. Expect adapt.
+ format.width = 320;
+ format.height = 200;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(200, out_height_);
+
+ // Format request 480x300.
+ format.width = 480;
+ format.height = 300;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 400, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(400, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(300, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestViewRequestPlusCameraSwitch) {
+ // Start at HD.
+ VideoFormat format(1280, 720, 0, 0);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+
+ // Format request for VGA.
+ format.width = 640;
+ format.height = 360;
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Now, the camera reopens at VGA.
+ // Both the frame and the output format should be 640x360.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // And another view request comes in for 640x360, which should have no
+ // real impact.
+ adapter_.OnOutputFormatRequest(format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestVGAWidth) {
+ // Reqeuested Output format is 640x360.
+ VideoFormat format(640, 360, 0, FOURCC_I420);
+ adapter_.OnOutputFormatRequest(format);
+
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ // Expect cropping.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // But if frames come in at 640x360, we shouldn't adapt them down.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestOnResolutionRequestInSmallSteps) {
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+
+ // Adapt down one step.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 1280 * 720 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(960, out_width_);
+ EXPECT_EQ(540, out_height_);
+
+ // Adapt down one step more.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 960 * 540 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Adapt down one step more.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Adapt up one step.
+ adapter_.OnResolutionFramerateRequest(640 * 360,
+ 960 * 540,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Adapt up one step more.
+ adapter_.OnResolutionFramerateRequest(960 * 540,
+ 1280 * 720,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(960, out_width_);
+ EXPECT_EQ(540, out_height_);
+
+ // Adapt up one step more.
+ adapter_.OnResolutionFramerateRequest(1280 * 720,
+ 1920 * 1080,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestOnResolutionRequestMaxZero) {
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 0,
+ std::numeric_limits<int>::max());
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+}
+
+TEST_F(VideoAdapterTest, TestOnResolutionRequestInLargeSteps) {
+ // Large step down.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Large step up.
+ adapter_.OnResolutionFramerateRequest(1280 * 720, 1920 * 1080,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestOnOutputFormatRequestCapsMaxResolution) {
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ VideoFormat new_format(640, 360, 0, FOURCC_I420);
+ adapter_.OnOutputFormatRequest(new_format);
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 960 * 720,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestOnResolutionRequestReset) {
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ std::numeric_limits<int>::max(),
+ std::numeric_limits<int>::max());
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(1280, 720, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(1280, cropped_width_);
+ EXPECT_EQ(720, cropped_height_);
+ EXPECT_EQ(1280, out_width_);
+ EXPECT_EQ(720, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestCroppingWithResolutionRequest) {
+ // Ask for 640x360 (16:9 aspect).
+ adapter_.OnOutputFormatRequest(VideoFormat(640, 360, 0, FOURCC_I420));
+ // Send 640x480 (4:3 aspect).
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ // Expect cropping to 16:9 format and no scaling.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Adapt down one step.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 360 - 1,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and 3/4 scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Adapt down one step more.
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 480 * 270 - 1,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and 1/2 scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(320, out_width_);
+ EXPECT_EQ(180, out_height_);
+
+ // Adapt up one step.
+ adapter_.OnResolutionFramerateRequest(480 * 270, 640 * 360,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and 3/4 scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(480, out_width_);
+ EXPECT_EQ(270, out_height_);
+
+ // Adapt up one step more.
+ adapter_.OnResolutionFramerateRequest(640 * 360, 960 * 540,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and no scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+
+ // Try to adapt up one step more.
+ adapter_.OnResolutionFramerateRequest(960 * 540, 1280 * 720,
+ std::numeric_limits<int>::max());
+ // Expect cropping to 16:9 format and no scaling.
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(360, cropped_height_);
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestCroppingOddResolution) {
+ // Ask for 640x360 (16:9 aspect), with 3/16 scaling.
+ adapter_.OnOutputFormatRequest(
+ VideoFormat(640, 360, 0, FOURCC_I420));
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ 640 * 360 * 3 / 16 * 3 / 16,
+ std::numeric_limits<int>::max());
+
+ // Send 640x480 (4:3 aspect).
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 480, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+
+ // Instead of getting the exact aspect ratio with cropped resolution 640x360,
+ // the resolution should be adjusted to get a perfect scale factor instead.
+ EXPECT_EQ(640, cropped_width_);
+ EXPECT_EQ(368, cropped_height_);
+ EXPECT_EQ(120, out_width_);
+ EXPECT_EQ(69, out_height_);
+}
+
+TEST_F(VideoAdapterTest, TestAdaptToVerySmallResolution) {
+ // Ask for 1920x1080 (16:9 aspect), with 1/16 scaling.
+ const int w = 1920;
+ const int h = 1080;
+ adapter_.OnOutputFormatRequest(VideoFormat(w, h, 0, FOURCC_I420));
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt,
+ w * h * 1 / 16 * 1 / 16,
+ std::numeric_limits<int>::max());
+
+ // Send 1920x1080 (16:9 aspect).
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ w, h, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_));
+
+ // Instead of getting the exact aspect ratio with cropped resolution 1920x1080
+ // the resolution should be adjusted to get a perfect scale factor instead.
+ EXPECT_EQ(1920, cropped_width_);
+ EXPECT_EQ(1072, cropped_height_);
+ EXPECT_EQ(120, out_width_);
+ EXPECT_EQ(67, out_height_);
+
+ // Adapt back up one step to 3/32.
+ adapter_.OnResolutionFramerateRequest(w * h * 3 / 32 * 3 / 32,
+ w * h * 1 / 8 * 1 / 8,
+ std::numeric_limits<int>::max());
+
+ // Send 1920x1080 (16:9 aspect).
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(
+ w, h, 0, &cropped_width_, &cropped_height_, &out_width_, &out_height_));
+
+ EXPECT_EQ(180, out_width_);
+ EXPECT_EQ(99, out_height_);
+}
+
+TEST_F(VideoAdapterTest, AdaptFrameResolutionDropWithResolutionRequest) {
+ VideoFormat output_format = capture_format_;
+ output_format.width = 0;
+ output_format.height = 0;
+ adapter_.OnOutputFormatRequest(output_format);
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+
+ adapter_.OnResolutionFramerateRequest(960 * 540,
+ std::numeric_limits<int>::max(),
+ std::numeric_limits<int>::max());
+
+ // Still expect all frames to be dropped
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+
+ adapter_.OnResolutionFramerateRequest(rtc::nullopt, 640 * 480 - 1,
+ std::numeric_limits<int>::max());
+
+ // Still expect all frames to be dropped
+ EXPECT_FALSE(adapter_.AdaptFrameResolution(
+ capture_format_.width, capture_format_.height, 0,
+ &cropped_width_, &cropped_height_,
+ &out_width_, &out_height_));
+}
+
+// Test that we will adapt to max given a target pixel count close to max.
+TEST_F(VideoAdapterTest, TestAdaptToMax) {
+ adapter_.OnOutputFormatRequest(VideoFormat(640, 360, 0, FOURCC_I420));
+ adapter_.OnResolutionFramerateRequest(640 * 360 - 1 /* target */,
+ std::numeric_limits<int>::max(),
+ std::numeric_limits<int>::max());
+
+ EXPECT_TRUE(adapter_.AdaptFrameResolution(640, 360, 0, &cropped_width_,
+ &cropped_height_, &out_width_,
+ &out_height_));
+ EXPECT_EQ(640, out_width_);
+ EXPECT_EQ(360, out_height_);
+}
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc b/third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc
new file mode 100644
index 0000000000..d2a9c54116
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videobroadcaster.cc
@@ -0,0 +1,131 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videobroadcaster.h"
+
+#include <limits>
+
+#include "api/video/i420_buffer.h"
+#include "rtc_base/checks.h"
+#include "rtc_base/logging.h"
+
+namespace rtc {
+
+VideoBroadcaster::VideoBroadcaster() {
+ thread_checker_.DetachFromThread();
+}
+
+void VideoBroadcaster::AddOrUpdateSink(
+ VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(sink != nullptr);
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ VideoSourceBase::AddOrUpdateSink(sink, wants);
+ UpdateWants();
+}
+
+void VideoBroadcaster::RemoveSink(
+ VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(sink != nullptr);
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ VideoSourceBase::RemoveSink(sink);
+ UpdateWants();
+}
+
+bool VideoBroadcaster::frame_wanted() const {
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ return !sink_pairs().empty();
+}
+
+VideoSinkWants VideoBroadcaster::wants() const {
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ return current_wants_;
+}
+
+void VideoBroadcaster::OnFrame(const webrtc::VideoFrame& frame) {
+ rtc::CritScope cs(&sinks_and_wants_lock_);
+ for (auto& sink_pair : sink_pairs()) {
+ if (sink_pair.wants.rotation_applied &&
+ frame.rotation() != webrtc::kVideoRotation_0) {
+ // Calls to OnFrame are not synchronized with changes to the sink wants.
+ // When rotation_applied is set to true, one or a few frames may get here
+ // with rotation still pending. Protect sinks that don't expect any
+ // pending rotation.
+ RTC_LOG(LS_VERBOSE) << "Discarding frame with unexpected rotation.";
+ continue;
+ }
+ if (sink_pair.wants.black_frames) {
+ sink_pair.sink->OnFrame(webrtc::VideoFrame(
+ GetBlackFrameBuffer(frame.width(), frame.height()), frame.rotation(),
+ frame.timestamp_us()));
+ } else {
+ sink_pair.sink->OnFrame(frame);
+ }
+ }
+}
+
+void VideoBroadcaster::OnDiscardedFrame() {
+ for (auto& sink_pair : sink_pairs()) {
+ sink_pair.sink->OnDiscardedFrame();
+ }
+}
+
+void VideoBroadcaster::UpdateWants() {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+
+ VideoSinkWants wants;
+ wants.rotation_applied = false;
+ for (auto& sink : sink_pairs()) {
+ // wants.rotation_applied == ANY(sink.wants.rotation_applied)
+ if (sink.wants.rotation_applied) {
+ wants.rotation_applied = true;
+ }
+ // wants.max_pixel_count == MIN(sink.wants.max_pixel_count)
+ if (sink.wants.max_pixel_count < wants.max_pixel_count) {
+ wants.max_pixel_count = sink.wants.max_pixel_count;
+ }
+ // Select the minimum requested target_pixel_count, if any, of all sinks so
+ // that we don't over utilize the resources for any one.
+ // TODO(sprang): Consider using the median instead, since the limit can be
+ // expressed by max_pixel_count.
+ if (sink.wants.target_pixel_count &&
+ (!wants.target_pixel_count ||
+ (*sink.wants.target_pixel_count < *wants.target_pixel_count))) {
+ wants.target_pixel_count = sink.wants.target_pixel_count;
+ }
+ // Select the minimum for the requested max framerates.
+ if (sink.wants.max_framerate_fps < wants.max_framerate_fps) {
+ wants.max_framerate_fps = sink.wants.max_framerate_fps;
+ }
+ }
+
+ if (wants.target_pixel_count &&
+ *wants.target_pixel_count >= wants.max_pixel_count) {
+ wants.target_pixel_count.emplace(wants.max_pixel_count);
+ }
+ current_wants_ = wants;
+}
+
+const rtc::scoped_refptr<webrtc::VideoFrameBuffer>&
+VideoBroadcaster::GetBlackFrameBuffer(int width, int height) {
+ if (!black_frame_buffer_ || black_frame_buffer_->width() != width ||
+ black_frame_buffer_->height() != height) {
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer =
+ webrtc::I420Buffer::Create(width, height);
+ webrtc::I420Buffer::SetBlack(buffer.get());
+ black_frame_buffer_ = buffer;
+ }
+
+ return black_frame_buffer_;
+}
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/webrtc/media/base/videobroadcaster.h b/third_party/libwebrtc/webrtc/media/base/videobroadcaster.h
new file mode 100644
index 0000000000..a8e21fa5b8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videobroadcaster.h
@@ -0,0 +1,70 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOBROADCASTER_H_
+#define MEDIA_BASE_VIDEOBROADCASTER_H_
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "api/video/video_frame.h"
+#include "media/base/videosinkinterface.h"
+#include "media/base/videosourcebase.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/thread_checker.h"
+
+namespace rtc {
+
+// VideoBroadcaster broadcast video frames to sinks and combines
+// VideoSinkWants from its sinks. It does that by implementing
+// rtc::VideoSourceInterface and rtc::VideoSinkInterface.
+// Sinks must be added and removed on one and only one thread.
+// Video frames can be broadcasted on any thread. I.e VideoBroadcaster::OnFrame
+// can be called on any thread.
+class VideoBroadcaster : public VideoSourceBase,
+ public VideoSinkInterface<webrtc::VideoFrame> {
+ public:
+ VideoBroadcaster();
+ void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) override;
+ void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ // Returns true if the next frame will be delivered to at least one sink.
+ bool frame_wanted() const;
+
+ // Returns VideoSinkWants a source is requested to fulfill. They are
+ // aggregated by all VideoSinkWants from all sinks.
+ VideoSinkWants wants() const;
+
+ // This method ensures that if a sink sets rotation_applied == true,
+ // it will never receive a frame with pending rotation. Our caller
+ // may pass in frames without precise synchronization with changes
+ // to the VideoSinkWants.
+ void OnFrame(const webrtc::VideoFrame& frame) override;
+
+ void OnDiscardedFrame() override;
+
+ protected:
+ void UpdateWants() RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
+ const rtc::scoped_refptr<webrtc::VideoFrameBuffer>& GetBlackFrameBuffer(
+ int width,
+ int height) RTC_EXCLUSIVE_LOCKS_REQUIRED(sinks_and_wants_lock_);
+
+ ThreadChecker thread_checker_;
+ rtc::CriticalSection sinks_and_wants_lock_;
+
+ VideoSinkWants current_wants_ RTC_GUARDED_BY(sinks_and_wants_lock_);
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> black_frame_buffer_;
+};
+
+} // namespace rtc
+
+#endif // MEDIA_BASE_VIDEOBROADCASTER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videobroadcaster_unittest.cc b/third_party/libwebrtc/webrtc/media/base/videobroadcaster_unittest.cc
new file mode 100644
index 0000000000..0f2057ebe0
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videobroadcaster_unittest.cc
@@ -0,0 +1,195 @@
+/*
+ * Copyright 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <limits>
+
+#include "media/base/videobroadcaster.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "media/base/fakevideorenderer.h"
+#include "rtc_base/gunit.h"
+
+using rtc::VideoBroadcaster;
+using rtc::VideoSinkWants;
+using cricket::FakeVideoRenderer;
+
+
+TEST(VideoBroadcasterTest, frame_wanted) {
+ VideoBroadcaster broadcaster;
+ EXPECT_FALSE(broadcaster.frame_wanted());
+
+ FakeVideoRenderer sink;
+ broadcaster.AddOrUpdateSink(&sink, rtc::VideoSinkWants());
+ EXPECT_TRUE(broadcaster.frame_wanted());
+
+ broadcaster.RemoveSink(&sink);
+ EXPECT_FALSE(broadcaster.frame_wanted());
+}
+
+TEST(VideoBroadcasterTest, OnFrame) {
+ VideoBroadcaster broadcaster;
+
+ FakeVideoRenderer sink1;
+ FakeVideoRenderer sink2;
+ broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
+ broadcaster.AddOrUpdateSink(&sink2, rtc::VideoSinkWants());
+ static int kWidth = 100;
+ static int kHeight = 50;
+
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer(
+ webrtc::I420Buffer::Create(kWidth, kHeight));
+ // Initialize, to avoid warnings on use of initialized values.
+ webrtc::I420Buffer::SetBlack(buffer);
+
+ webrtc::VideoFrame frame(buffer, webrtc::kVideoRotation_0, 0);
+
+ broadcaster.OnFrame(frame);
+ EXPECT_EQ(1, sink1.num_rendered_frames());
+ EXPECT_EQ(1, sink2.num_rendered_frames());
+
+ broadcaster.RemoveSink(&sink1);
+ broadcaster.OnFrame(frame);
+ EXPECT_EQ(1, sink1.num_rendered_frames());
+ EXPECT_EQ(2, sink2.num_rendered_frames());
+
+ broadcaster.AddOrUpdateSink(&sink1, rtc::VideoSinkWants());
+ broadcaster.OnFrame(frame);
+ EXPECT_EQ(2, sink1.num_rendered_frames());
+ EXPECT_EQ(3, sink2.num_rendered_frames());
+}
+
+TEST(VideoBroadcasterTest, AppliesRotationIfAnySinkWantsRotationApplied) {
+ VideoBroadcaster broadcaster;
+ EXPECT_FALSE(broadcaster.wants().rotation_applied);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.rotation_applied = false;
+
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ EXPECT_FALSE(broadcaster.wants().rotation_applied);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.rotation_applied = true;
+
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+ EXPECT_TRUE(broadcaster.wants().rotation_applied);
+
+ broadcaster.RemoveSink(&sink2);
+ EXPECT_FALSE(broadcaster.wants().rotation_applied);
+}
+
+TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxPixelCount) {
+ VideoBroadcaster broadcaster;
+ EXPECT_EQ(std::numeric_limits<int>::max(),
+ broadcaster.wants().max_pixel_count);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.max_pixel_count = 1280 * 720;
+
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ EXPECT_EQ(1280 * 720, broadcaster.wants().max_pixel_count);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.max_pixel_count = 640 * 360;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+ EXPECT_EQ(640 * 360, broadcaster.wants().max_pixel_count);
+
+ broadcaster.RemoveSink(&sink2);
+ EXPECT_EQ(1280 * 720, broadcaster.wants().max_pixel_count);
+}
+
+TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxAndTargetPixelCount) {
+ VideoBroadcaster broadcaster;
+ EXPECT_TRUE(!broadcaster.wants().target_pixel_count);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.target_pixel_count = 1280 * 720;
+
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ EXPECT_EQ(1280 * 720, *broadcaster.wants().target_pixel_count);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.target_pixel_count = 640 * 360;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+ EXPECT_EQ(640 * 360, *broadcaster.wants().target_pixel_count);
+
+ broadcaster.RemoveSink(&sink2);
+ EXPECT_EQ(1280 * 720, *broadcaster.wants().target_pixel_count);
+}
+
+TEST(VideoBroadcasterTest, AppliesMinOfSinkWantsMaxFramerate) {
+ VideoBroadcaster broadcaster;
+ EXPECT_EQ(std::numeric_limits<int>::max(),
+ broadcaster.wants().max_framerate_fps);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.max_framerate_fps = 30;
+
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ EXPECT_EQ(30, broadcaster.wants().max_framerate_fps);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.max_framerate_fps = 15;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+ EXPECT_EQ(15, broadcaster.wants().max_framerate_fps);
+
+ broadcaster.RemoveSink(&sink2);
+ EXPECT_EQ(30, broadcaster.wants().max_framerate_fps);
+}
+
+TEST(VideoBroadcasterTest, SinkWantsBlackFrames) {
+ VideoBroadcaster broadcaster;
+ EXPECT_TRUE(!broadcaster.wants().black_frames);
+
+ FakeVideoRenderer sink1;
+ VideoSinkWants wants1;
+ wants1.black_frames = true;
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+
+ FakeVideoRenderer sink2;
+ VideoSinkWants wants2;
+ wants2.black_frames = false;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+
+ rtc::scoped_refptr<webrtc::I420Buffer> buffer(
+ webrtc::I420Buffer::Create(100, 200));
+ // Makes it not all black.
+ buffer->InitializeData();
+
+ webrtc::VideoFrame frame1(buffer, webrtc::kVideoRotation_0,
+ 10 /* timestamp_us */);
+ broadcaster.OnFrame(frame1);
+ EXPECT_TRUE(sink1.black_frame());
+ EXPECT_EQ(10, sink1.timestamp_us());
+ EXPECT_FALSE(sink2.black_frame());
+ EXPECT_EQ(10, sink2.timestamp_us());
+
+ // Switch the sink wants.
+ wants1.black_frames = false;
+ broadcaster.AddOrUpdateSink(&sink1, wants1);
+ wants2.black_frames = true;
+ broadcaster.AddOrUpdateSink(&sink2, wants2);
+
+ webrtc::VideoFrame frame2(buffer, webrtc::kVideoRotation_0,
+ 30 /* timestamp_us */);
+ broadcaster.OnFrame(frame2);
+ EXPECT_FALSE(sink1.black_frame());
+ EXPECT_EQ(30, sink1.timestamp_us());
+ EXPECT_TRUE(sink2.black_frame());
+ EXPECT_EQ(30, sink2.timestamp_us());
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/videocapturer.cc b/third_party/libwebrtc/webrtc/media/base/videocapturer.cc
new file mode 100644
index 0000000000..f6e05ba5d5
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocapturer.cc
@@ -0,0 +1,381 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Implementation file of class VideoCapturer.
+
+#include "media/base/videocapturer.h"
+
+#include <algorithm>
+
+#include "api/video/i420_buffer.h"
+#include "api/video/video_frame.h"
+#include "rtc_base/logging.h"
+#include "system_wrappers/include/field_trial.h"
+
+namespace cricket {
+
+namespace {
+
+static const int64_t kMaxDistance = ~(static_cast<int64_t>(1) << 63);
+#ifdef WEBRTC_LINUX
+static const int kYU12Penalty = 16; // Needs to be higher than MJPG index.
+#endif
+static const char* kSimulcastScreenshareFieldTrialName =
+ "WebRTC-SimulcastScreenshare";
+
+} // namespace
+
+/////////////////////////////////////////////////////////////////////
+// Implementation of class VideoCapturer
+/////////////////////////////////////////////////////////////////////
+VideoCapturer::VideoCapturer() : apply_rotation_(false) {
+ thread_checker_.DetachFromThread();
+ Construct();
+}
+
+void VideoCapturer::Construct() {
+ enable_camera_list_ = false;
+ capture_state_ = CS_STOPPED;
+ scaled_width_ = 0;
+ scaled_height_ = 0;
+ enable_video_adapter_ = true;
+}
+
+const std::vector<VideoFormat>* VideoCapturer::GetSupportedFormats() const {
+ return &filtered_supported_formats_;
+}
+
+bool VideoCapturer::StartCapturing(const VideoFormat& capture_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ CaptureState result = Start(capture_format);
+ const bool success = (result == CS_RUNNING) || (result == CS_STARTING);
+ if (!success) {
+ return false;
+ }
+ if (result == CS_RUNNING) {
+ SetCaptureState(result);
+ }
+ return true;
+}
+
+void VideoCapturer::SetSupportedFormats(
+ const std::vector<VideoFormat>& formats) {
+ // This method is OK to call during initialization on a separate thread.
+ RTC_DCHECK(capture_state_ == CS_STOPPED ||
+ thread_checker_.CalledOnValidThread());
+ supported_formats_ = formats;
+ UpdateFilteredSupportedFormats();
+}
+
+bool VideoCapturer::GetBestCaptureFormat(const VideoFormat& format,
+ VideoFormat* best_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ // TODO(fbarchard): Directly support max_format.
+ UpdateFilteredSupportedFormats();
+ const std::vector<VideoFormat>* supported_formats = GetSupportedFormats();
+
+ if (supported_formats->empty()) {
+ return false;
+ }
+ RTC_LOG(LS_INFO) << " Capture Requested " << format.ToString();
+ int64_t best_distance = kMaxDistance;
+ std::vector<VideoFormat>::const_iterator best = supported_formats->end();
+ std::vector<VideoFormat>::const_iterator i;
+ for (i = supported_formats->begin(); i != supported_formats->end(); ++i) {
+ int64_t distance = GetFormatDistance(format, *i);
+ // TODO(fbarchard): Reduce to LS_VERBOSE if/when camera capture is
+ // relatively bug free.
+ RTC_LOG(LS_INFO) << " Supported " << i->ToString() << " distance "
+ << distance;
+ if (distance < best_distance) {
+ best_distance = distance;
+ best = i;
+ }
+ }
+ if (supported_formats->end() == best) {
+ RTC_LOG(LS_ERROR) << " No acceptable camera format found";
+ return false;
+ }
+
+ if (best_format) {
+ best_format->width = best->width;
+ best_format->height = best->height;
+ best_format->fourcc = best->fourcc;
+ best_format->interval = best->interval;
+ RTC_LOG(LS_INFO) << " Best " << best_format->ToString() << " Interval "
+ << best_format->interval << " distance " << best_distance;
+ }
+ return true;
+}
+
+void VideoCapturer::ConstrainSupportedFormats(const VideoFormat& max_format) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ max_format_.reset(new VideoFormat(max_format));
+ RTC_LOG(LS_VERBOSE) << " ConstrainSupportedFormats " << max_format.ToString();
+ UpdateFilteredSupportedFormats();
+}
+
+bool VideoCapturer::GetInputSize(int* width, int* height) {
+ rtc::CritScope cs(&frame_stats_crit_);
+ if (!input_size_valid_) {
+ return false;
+ }
+ *width = input_width_;
+ *height = input_height_;
+
+ return true;
+}
+
+void VideoCapturer::RemoveSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ broadcaster_.RemoveSink(sink);
+ OnSinkWantsChanged(broadcaster_.wants());
+}
+
+void VideoCapturer::AddOrUpdateSink(
+ rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ broadcaster_.AddOrUpdateSink(sink, wants);
+ OnSinkWantsChanged(broadcaster_.wants());
+}
+
+void VideoCapturer::OnSinkWantsChanged(const rtc::VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ apply_rotation_ = wants.rotation_applied;
+
+ if (video_adapter()) {
+ video_adapter()->OnResolutionFramerateRequest(wants.target_pixel_count,
+ wants.max_pixel_count,
+ wants.max_framerate_fps);
+ }
+}
+
+bool VideoCapturer::AdaptFrame(int width,
+ int height,
+ int64_t camera_time_us,
+ int64_t system_time_us,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y,
+ int64_t* translated_camera_time_us) {
+ if (translated_camera_time_us) {
+ *translated_camera_time_us =
+ timestamp_aligner_.TranslateTimestamp(camera_time_us, system_time_us);
+ }
+ if (!broadcaster_.frame_wanted()) {
+ return false;
+ }
+
+ bool simulcast_screenshare_enabled =
+ webrtc::field_trial::IsEnabled(kSimulcastScreenshareFieldTrialName);
+ if (enable_video_adapter_ &&
+ (!IsScreencast() || simulcast_screenshare_enabled)) {
+ if (!video_adapter_.AdaptFrameResolution(
+ width, height, camera_time_us * rtc::kNumNanosecsPerMicrosec,
+ crop_width, crop_height, out_width, out_height)) {
+ // VideoAdapter dropped the frame.
+ broadcaster_.OnDiscardedFrame();
+ return false;
+ }
+ *crop_x = (width - *crop_width) / 2;
+ *crop_y = (height - *crop_height) / 2;
+ } else {
+ *out_width = width;
+ *out_height = height;
+ *crop_width = width;
+ *crop_height = height;
+ *crop_x = 0;
+ *crop_y = 0;
+ }
+
+ return true;
+}
+
+void VideoCapturer::OnFrame(const webrtc::VideoFrame& frame,
+ int orig_width,
+ int orig_height) {
+ // For a child class which implements rotation itself, we should
+ // always have apply_rotation_ == false or frame.rotation() == 0.
+ // Except possibly during races where apply_rotation_ is changed
+ // mid-stream.
+ if (apply_rotation_ && frame.rotation() != webrtc::kVideoRotation_0) {
+ rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer(
+ frame.video_frame_buffer());
+ if (buffer->type() != webrtc::VideoFrameBuffer::Type::kI420) {
+ // Sources producing non-I420 frames must handle apply_rotation
+ // themselves. But even if they do, we may occasionally end up
+ // in this case, for frames in flight at the time
+ // applied_rotation is set to true. In that case, we just drop
+ // the frame.
+ RTC_LOG(LS_WARNING) << "Non-I420 frame requiring rotation. Discarding.";
+ return;
+ }
+ broadcaster_.OnFrame(webrtc::VideoFrame(
+ webrtc::I420Buffer::Rotate(*buffer->GetI420(), frame.rotation()),
+ webrtc::kVideoRotation_0, frame.timestamp_us()));
+ } else {
+ broadcaster_.OnFrame(frame);
+ }
+ UpdateInputSize(orig_width, orig_height);
+}
+
+void VideoCapturer::SetCaptureState(CaptureState state) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (state == capture_state_) {
+ // Don't trigger a state changed callback if the state hasn't changed.
+ return;
+ }
+ capture_state_ = state;
+ SignalStateChange(this, capture_state_);
+}
+
+// Get the distance between the supported and desired formats.
+// Prioritization is done according to this algorithm:
+// 1) Width closeness. If not same, we prefer wider.
+// 2) Height closeness. If not same, we prefer higher.
+// 3) Framerate closeness. If not same, we prefer faster.
+// 4) Compression. If desired format has a specific fourcc, we need exact match;
+// otherwise, we use preference.
+int64_t VideoCapturer::GetFormatDistance(const VideoFormat& desired,
+ const VideoFormat& supported) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ int64_t distance = kMaxDistance;
+
+ // Check fourcc.
+ uint32_t supported_fourcc = CanonicalFourCC(supported.fourcc);
+ int64_t delta_fourcc = kMaxDistance;
+ if (FOURCC_ANY == desired.fourcc) {
+ // Any fourcc is OK for the desired. Use preference to find best fourcc.
+ std::vector<uint32_t> preferred_fourccs;
+ if (!GetPreferredFourccs(&preferred_fourccs)) {
+ return distance;
+ }
+
+ for (size_t i = 0; i < preferred_fourccs.size(); ++i) {
+ if (supported_fourcc == CanonicalFourCC(preferred_fourccs[i])) {
+ delta_fourcc = i;
+#ifdef WEBRTC_LINUX
+ // For HD avoid YU12 which is a software conversion and has 2 bugs
+ // b/7326348 b/6960899. Reenable when fixed.
+ if (supported.height >= 720 && (supported_fourcc == FOURCC_YU12 ||
+ supported_fourcc == FOURCC_YV12)) {
+ delta_fourcc += kYU12Penalty;
+ }
+#endif
+ break;
+ }
+ }
+ } else if (supported_fourcc == CanonicalFourCC(desired.fourcc)) {
+ delta_fourcc = 0; // Need exact match.
+ }
+
+ if (kMaxDistance == delta_fourcc) {
+ // Failed to match fourcc.
+ return distance;
+ }
+
+ // Check resolution and fps.
+ int desired_width = desired.width;
+ int desired_height = desired.height;
+ int64_t delta_w = supported.width - desired_width;
+ float supported_fps = VideoFormat::IntervalToFpsFloat(supported.interval);
+ float delta_fps =
+ supported_fps - VideoFormat::IntervalToFpsFloat(desired.interval);
+ // Check height of supported height compared to height we would like it to be.
+ int64_t aspect_h = desired_width
+ ? supported.width * desired_height / desired_width
+ : desired_height;
+ int64_t delta_h = supported.height - aspect_h;
+
+ distance = 0;
+ // Set high penalty if the supported format is lower than the desired format.
+ // 3x means we would prefer down to down to 3/4, than up to double.
+ // But we'd prefer up to double than down to 1/2. This is conservative,
+ // strongly avoiding going down in resolution, similar to
+ // the old method, but not completely ruling it out in extreme situations.
+ // It also ignores framerate, which is often very low at high resolutions.
+ // TODO(fbarchard): Improve logic to use weighted factors.
+ static const int kDownPenalty = -3;
+ if (delta_w < 0) {
+ delta_w = delta_w * kDownPenalty;
+ }
+ if (delta_h < 0) {
+ delta_h = delta_h * kDownPenalty;
+ }
+ // Require camera fps to be at least 80% of what is requested if resolution
+ // matches.
+ // Require camera fps to be at least 96% of what is requested, or higher,
+ // if resolution differs. 96% allows for slight variations in fps. e.g. 29.97
+ if (delta_fps < 0) {
+ float min_desirable_fps = delta_w ?
+ VideoFormat::IntervalToFpsFloat(desired.interval) * 28.f / 30.f :
+ VideoFormat::IntervalToFpsFloat(desired.interval) * 23.f / 30.f;
+ delta_fps = -delta_fps;
+ if (supported_fps < min_desirable_fps) {
+ distance |= static_cast<int64_t>(1) << 62;
+ } else {
+ distance |= static_cast<int64_t>(1) << 15;
+ }
+ }
+ int64_t idelta_fps = static_cast<int>(delta_fps);
+
+ // 12 bits for width and height and 8 bits for fps and fourcc.
+ distance |=
+ (delta_w << 28) | (delta_h << 16) | (idelta_fps << 8) | delta_fourcc;
+
+ return distance;
+}
+
+void VideoCapturer::UpdateFilteredSupportedFormats() {
+ filtered_supported_formats_.clear();
+ filtered_supported_formats_ = supported_formats_;
+ if (!max_format_) {
+ return;
+ }
+ std::vector<VideoFormat>::iterator iter = filtered_supported_formats_.begin();
+ while (iter != filtered_supported_formats_.end()) {
+ if (ShouldFilterFormat(*iter)) {
+ iter = filtered_supported_formats_.erase(iter);
+ } else {
+ ++iter;
+ }
+ }
+ if (filtered_supported_formats_.empty()) {
+ // The device only captures at resolutions higher than |max_format_| this
+ // indicates that |max_format_| should be ignored as it is better to capture
+ // at too high a resolution than to not capture at all.
+ filtered_supported_formats_ = supported_formats_;
+ }
+}
+
+bool VideoCapturer::ShouldFilterFormat(const VideoFormat& format) const {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ if (!enable_camera_list_) {
+ return false;
+ }
+ return format.width > max_format_->width ||
+ format.height > max_format_->height;
+}
+
+void VideoCapturer::UpdateInputSize(int width, int height) {
+ // Update stats protected from fetches from different thread.
+ rtc::CritScope cs(&frame_stats_crit_);
+
+ input_size_valid_ = true;
+ input_width_ = width;
+ input_height_ = height;
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videocapturer.h b/third_party/libwebrtc/webrtc/media/base/videocapturer.h
new file mode 100644
index 0000000000..684f8730bd
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocapturer.h
@@ -0,0 +1,289 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Declaration of abstract class VideoCapturer
+
+#ifndef MEDIA_BASE_VIDEOCAPTURER_H_
+#define MEDIA_BASE_VIDEOCAPTURER_H_
+
+#include <stdint.h>
+
+#include <algorithm>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "media/base/videoadapter.h"
+#include "media/base/videobroadcaster.h"
+#include "media/base/videocommon.h"
+#include "media/base/videosourceinterface.h"
+#include "rtc_base/constructormagic.h"
+#include "rtc_base/criticalsection.h"
+#include "rtc_base/sigslot.h"
+#include "rtc_base/thread_checker.h"
+#include "rtc_base/timestampaligner.h"
+
+namespace webrtc {
+class VideoFrame;
+}
+
+namespace cricket {
+
+// Current state of the capturer.
+enum CaptureState {
+ CS_STOPPED, // The capturer has been stopped or hasn't started yet.
+ CS_STARTING, // The capturer is in the process of starting. Note, it may
+ // still fail to start.
+ CS_RUNNING, // The capturer has been started successfully and is now
+ // capturing.
+ CS_FAILED, // The capturer failed to start.
+};
+
+// VideoCapturer is an abstract class that defines the interfaces for video
+// capturing. The subclasses implement the video capturer for various types of
+// capturers and various platforms.
+//
+// The captured frames may need to be adapted (for example, cropping).
+// Video adaptation is built into and enabled by default. After a frame has
+// been captured from the device, it is sent to the video adapter, then out to
+// the sinks.
+//
+// Programming model:
+// Create an object of a subclass of VideoCapturer
+// Initialize
+// SignalStateChange.connect()
+// AddOrUpdateSink()
+// Find the capture format for Start() by either calling GetSupportedFormats()
+// and selecting one of the supported or calling GetBestCaptureFormat().
+// video_adapter()->OnOutputFormatRequest(desired_encoding_format)
+// Start()
+// GetCaptureFormat() optionally
+// Stop()
+//
+// Assumption:
+// The Start() and Stop() methods are called by a single thread (E.g., the
+// media engine thread). Hence, the VideoCapture subclasses dont need to be
+// thread safe.
+//
+class VideoCapturer : public sigslot::has_slots<>,
+ public rtc::VideoSourceInterface<webrtc::VideoFrame> {
+ public:
+ VideoCapturer();
+
+ virtual ~VideoCapturer() {}
+
+ // Gets the id of the underlying device, which is available after the capturer
+ // is initialized. Can be used to determine if two capturers reference the
+ // same device.
+ const std::string& GetId() const { return id_; }
+
+ // Get the capture formats supported by the video capturer. The supported
+ // formats are non empty after the device has been opened successfully.
+ const std::vector<VideoFormat>* GetSupportedFormats() const;
+
+ // Get the best capture format for the desired format. The best format is the
+ // same as one of the supported formats except that the frame interval may be
+ // different. If the application asks for 16x9 and the camera does not support
+ // 16x9 HD or the application asks for 16x10, we find the closest 4x3 and then
+ // crop; Otherwise, we find what the application asks for. Note that we assume
+ // that for HD, the desired format is always 16x9. The subclasses can override
+ // the default implementation.
+ // Parameters
+ // desired: the input desired format. If desired.fourcc is not kAnyFourcc,
+ // the best capture format has the exactly same fourcc. Otherwise,
+ // the best capture format uses a fourcc in GetPreferredFourccs().
+ // best_format: the output of the best capture format.
+ // Return false if there is no such a best format, that is, the desired format
+ // is not supported.
+ virtual bool GetBestCaptureFormat(const VideoFormat& desired,
+ VideoFormat* best_format);
+
+ // TODO(hellner): deprecate (make private) the Start API in favor of this one.
+ // Also remove CS_STARTING as it is implied by the return
+ // value of StartCapturing().
+ bool StartCapturing(const VideoFormat& capture_format);
+ // Start the video capturer with the specified capture format.
+ // Parameter
+ // capture_format: The caller got this parameter by either calling
+ // GetSupportedFormats() and selecting one of the supported
+ // or calling GetBestCaptureFormat().
+ // Return
+ // CS_STARTING: The capturer is trying to start. Success or failure will
+ // be notified via the |SignalStateChange| callback.
+ // CS_RUNNING: if the capturer is started and capturing.
+ // CS_FAILED: if the capturer failes to start..
+ // CS_NO_DEVICE: if the capturer has no device and fails to start.
+ virtual CaptureState Start(const VideoFormat& capture_format) = 0;
+
+ // Get the current capture format, which is set by the Start() call.
+ // Note that the width and height of the captured frames may differ from the
+ // capture format. For example, the capture format is HD but the captured
+ // frames may be smaller than HD.
+ const VideoFormat* GetCaptureFormat() const {
+ return capture_format_.get();
+ }
+
+ // Stop the video capturer.
+ virtual void Stop() = 0;
+ // Check if the video capturer is running.
+ virtual bool IsRunning() = 0;
+ CaptureState capture_state() const {
+ return capture_state_;
+ }
+
+ virtual bool apply_rotation() { return apply_rotation_; }
+
+ // Returns true if the capturer is screencasting. This can be used to
+ // implement screencast specific behavior.
+ virtual bool IsScreencast() const = 0;
+
+ // Caps the VideoCapturer's format according to max_format. It can e.g. be
+ // used to prevent cameras from capturing at a resolution or framerate that
+ // the capturer is capable of but not performing satisfactorily at.
+ // The capping is an upper bound for each component of the capturing format.
+ // The fourcc component is ignored.
+ void ConstrainSupportedFormats(const VideoFormat& max_format);
+
+ void set_enable_camera_list(bool enable_camera_list) {
+ enable_camera_list_ = enable_camera_list;
+ }
+ bool enable_camera_list() {
+ return enable_camera_list_;
+ }
+
+ // Signal all capture state changes that are not a direct result of calling
+ // Start().
+ sigslot::signal2<VideoCapturer*, CaptureState> SignalStateChange;
+
+ // If true, run video adaptation. By default, video adaptation is enabled
+ // and users must call video_adapter()->OnOutputFormatRequest()
+ // to receive frames.
+ bool enable_video_adapter() const { return enable_video_adapter_; }
+ void set_enable_video_adapter(bool enable_video_adapter) {
+ enable_video_adapter_ = enable_video_adapter;
+ }
+
+ bool GetInputSize(int* width, int* height);
+
+ // Implements VideoSourceInterface
+ void AddOrUpdateSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const rtc::VideoSinkWants& wants) override;
+ void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ protected:
+ // OnSinkWantsChanged can be overridden to change the default behavior
+ // when a sink changes its VideoSinkWants by calling AddOrUpdateSink.
+ virtual void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
+
+ // Reports the appropriate frame size after adaptation. Returns true
+ // if a frame is wanted. Returns false if there are no interested
+ // sinks, or if the VideoAdapter decides to drop the frame.
+
+ // This function also implements timestamp translation/filtering.
+ // |camera_time_ns| is the camera's timestamp for the captured
+ // frame; it is expected to have good accuracy, but it may use an
+ // arbitrary epoch and a small possibly free-running with a frequency
+ // slightly different from the system clock. |system_time_us| is the
+ // monotonic system time (in the same scale as rtc::TimeMicros) when
+ // the frame was captured; the application is expected to read the
+ // system time as soon as possible after frame capture, but it may
+ // suffer scheduling jitter or poor system clock resolution. The
+ // output |translated_camera_time_us| is a combined timestamp,
+ // taking advantage of the supposedly higher accuracy in the camera
+ // timestamp, but using the same epoch and frequency as system time.
+ bool AdaptFrame(int width,
+ int height,
+ int64_t camera_time_us,
+ int64_t system_time_us,
+ int* out_width,
+ int* out_height,
+ int* crop_width,
+ int* crop_height,
+ int* crop_x,
+ int* crop_y,
+ int64_t* translated_camera_time_us);
+
+ // Called when a frame has been captured and converted to a
+ // VideoFrame. OnFrame can be called directly by an implementation
+ // that does not use SignalFrameCaptured or OnFrameCaptured. The
+ // orig_width and orig_height are used only to produce stats.
+ void OnFrame(const webrtc::VideoFrame& frame,
+ int orig_width,
+ int orig_height);
+
+ VideoAdapter* video_adapter() { return &video_adapter_; }
+
+ void SetCaptureState(CaptureState state);
+
+ // subclasses override this virtual method to provide a vector of fourccs, in
+ // order of preference, that are expected by the media engine.
+ virtual bool GetPreferredFourccs(std::vector<uint32_t>* fourccs) = 0;
+
+ // mutators to set private attributes
+ void SetId(const std::string& id) {
+ id_ = id;
+ }
+
+ void SetCaptureFormat(const VideoFormat* format) {
+ capture_format_.reset(format ? new VideoFormat(*format) : NULL);
+ }
+
+ void SetSupportedFormats(const std::vector<VideoFormat>& formats);
+
+ private:
+ void Construct();
+ // Get the distance between the desired format and the supported format.
+ // Return the max distance if they mismatch. See the implementation for
+ // details.
+ int64_t GetFormatDistance(const VideoFormat& desired,
+ const VideoFormat& supported);
+
+ // Updates filtered_supported_formats_ so that it contains the formats in
+ // supported_formats_ that fulfill all applied restrictions.
+ void UpdateFilteredSupportedFormats();
+ // Returns true if format doesn't fulfill all applied restrictions.
+ bool ShouldFilterFormat(const VideoFormat& format) const;
+
+ void UpdateInputSize(int width, int height);
+
+ rtc::ThreadChecker thread_checker_;
+ std::string id_;
+ CaptureState capture_state_;
+ std::unique_ptr<VideoFormat> capture_format_;
+ std::vector<VideoFormat> supported_formats_;
+ std::unique_ptr<VideoFormat> max_format_;
+ std::vector<VideoFormat> filtered_supported_formats_;
+
+ bool enable_camera_list_;
+ int scaled_width_; // Current output size from ComputeScale.
+ int scaled_height_;
+
+ rtc::VideoBroadcaster broadcaster_;
+ bool enable_video_adapter_;
+ VideoAdapter video_adapter_;
+
+ rtc::CriticalSection frame_stats_crit_;
+ // The captured frame size before potential adapation.
+ bool input_size_valid_ RTC_GUARDED_BY(frame_stats_crit_) = false;
+ int input_width_ RTC_GUARDED_BY(frame_stats_crit_);
+ int input_height_ RTC_GUARDED_BY(frame_stats_crit_);
+
+ // Whether capturer should apply rotation to the frame before
+ // passing it on to the registered sinks.
+ bool apply_rotation_;
+
+ // State for the timestamp translation.
+ rtc::TimestampAligner timestamp_aligner_;
+ RTC_DISALLOW_COPY_AND_ASSIGN(VideoCapturer);
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_VIDEOCAPTURER_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videocapturer_unittest.cc b/third_party/libwebrtc/webrtc/media/base/videocapturer_unittest.cc
new file mode 100644
index 0000000000..7450694d21
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocapturer_unittest.cc
@@ -0,0 +1,786 @@
+/*
+ * Copyright (c) 2008 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <memory>
+#include <vector>
+
+#include "media/base/fakevideocapturer.h"
+#include "media/base/fakevideorenderer.h"
+#include "media/base/testutils.h"
+#include "media/base/videocapturer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/logging.h"
+#include "rtc_base/thread.h"
+
+using cricket::FakeVideoCapturer;
+
+namespace {
+
+const int kMsCallbackWait = 500;
+// For HD only the height matters.
+const int kMinHdHeight = 720;
+
+} // namespace
+
+class VideoCapturerTest
+ : public sigslot::has_slots<>,
+ public testing::Test {
+ public:
+ VideoCapturerTest()
+ : capture_state_(cricket::CS_STOPPED), num_state_changes_(0) {
+ InitCapturer(false);
+ }
+
+ protected:
+ void InitCapturer(bool is_screencast) {
+ capturer_ = std::unique_ptr<FakeVideoCapturer>(
+ new FakeVideoCapturer(is_screencast));
+ capturer_->SignalStateChange.connect(this,
+ &VideoCapturerTest::OnStateChange);
+ capturer_->AddOrUpdateSink(&renderer_, rtc::VideoSinkWants());
+ }
+ void InitScreencast() { InitCapturer(true); }
+
+ void OnStateChange(cricket::VideoCapturer*,
+ cricket::CaptureState capture_state) {
+ capture_state_ = capture_state;
+ ++num_state_changes_;
+ }
+ cricket::CaptureState capture_state() { return capture_state_; }
+ int num_state_changes() { return num_state_changes_; }
+
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer_;
+ cricket::CaptureState capture_state_;
+ int num_state_changes_;
+ cricket::FakeVideoRenderer renderer_;
+ bool expects_rotation_applied_;
+};
+
+TEST_F(VideoCapturerTest, CaptureState) {
+ EXPECT_TRUE(capturer_->enable_video_adapter());
+ EXPECT_EQ(cricket::CS_RUNNING, capturer_->Start(cricket::VideoFormat(
+ 640,
+ 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ_WAIT(cricket::CS_RUNNING, capture_state(), kMsCallbackWait);
+ EXPECT_EQ(1, num_state_changes());
+ capturer_->Stop();
+ EXPECT_EQ_WAIT(cricket::CS_STOPPED, capture_state(), kMsCallbackWait);
+ EXPECT_EQ(2, num_state_changes());
+ capturer_->Stop();
+ rtc::Thread::Current()->ProcessMessages(100);
+ EXPECT_EQ(2, num_state_changes());
+}
+
+TEST_F(VideoCapturerTest, ScreencastScaledOddWidth) {
+ InitScreencast();
+
+ int kWidth = 1281;
+ int kHeight = 720;
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(formats);
+
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+ EXPECT_EQ(kWidth, renderer_.width());
+ EXPECT_EQ(kHeight, renderer_.height());
+}
+
+TEST_F(VideoCapturerTest, TestRotationAppliedBySource) {
+ int kWidth = 800;
+ int kHeight = 400;
+ int frame_count = 0;
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420));
+
+ capturer_->ResetSupportedFormats(formats);
+ rtc::VideoSinkWants wants;
+ // |capturer_| should compensate rotation.
+ wants.rotation_applied = true;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+
+ // capturer_ should compensate rotation as default.
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+
+ // If the frame's rotation is compensated anywhere in the pipeline based on
+ // the rotation information, the renderer should be given the right dimension
+ // such that the frame could be rendered.
+
+ capturer_->SetRotation(webrtc::kVideoRotation_90);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ // Swapped width and height
+ EXPECT_EQ(kWidth, renderer_.height());
+ EXPECT_EQ(kHeight, renderer_.width());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_270);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ // Swapped width and height
+ EXPECT_EQ(kWidth, renderer_.height());
+ EXPECT_EQ(kHeight, renderer_.width());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_180);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ // Back to normal width and height
+ EXPECT_EQ(kWidth, renderer_.width());
+ EXPECT_EQ(kHeight, renderer_.height());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
+}
+
+TEST_F(VideoCapturerTest, TestRotationAppliedBySinkByDefault) {
+ int kWidth = 800;
+ int kHeight = 400;
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420));
+
+ capturer_->ResetSupportedFormats(formats);
+
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+
+ // If the frame's rotation is compensated anywhere in the pipeline, the frame
+ // won't have its original dimension out from capturer. Since the renderer
+ // here has the same dimension as the capturer, it will skip that frame as the
+ // resolution won't match anymore.
+
+ int frame_count = 0;
+ capturer_->SetRotation(webrtc::kVideoRotation_0);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_90);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_180);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+
+ capturer_->SetRotation(webrtc::kVideoRotation_270);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+}
+
+TEST_F(VideoCapturerTest, TestRotationAppliedBySourceWhenDifferentWants) {
+ int kWidth = 800;
+ int kHeight = 400;
+
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(kWidth, kHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420));
+
+ capturer_->ResetSupportedFormats(formats);
+ rtc::VideoSinkWants wants;
+ // capturer_ should not compensate rotation.
+ wants.rotation_applied = false;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ kWidth, kHeight, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+
+ int frame_count = 0;
+ capturer_->SetRotation(webrtc::kVideoRotation_90);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(capturer_->GetRotation(), renderer_.rotation());
+
+ // Add another sink that wants frames to be rotated.
+ cricket::FakeVideoRenderer renderer2;
+ wants.rotation_applied = true;
+ capturer_->AddOrUpdateSink(&renderer2, wants);
+
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(++frame_count, renderer_.num_rendered_frames());
+ EXPECT_EQ(1, renderer2.num_rendered_frames());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer_.rotation());
+ EXPECT_EQ(webrtc::kVideoRotation_0, renderer2.rotation());
+}
+
+// TODO(nisse): This test doesn't quite fit here. It tests two things:
+// Aggregation of VideoSinkWants, which is the responsibility of
+// VideoBroadcaster, and translation of VideoSinkWants to actual
+// resolution, which is the responsibility of the VideoAdapter.
+TEST_F(VideoCapturerTest, SinkWantsMaxPixelAndMaxPixelCountStepUp) {
+ EXPECT_EQ(cricket::CS_RUNNING,
+ capturer_->Start(cricket::VideoFormat(
+ 1280, 720, cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420)));
+ EXPECT_TRUE(capturer_->IsRunning());
+
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+ EXPECT_EQ(1280, renderer_.width());
+ EXPECT_EQ(720, renderer_.height());
+
+ // Request a lower resolution. The output resolution will have a resolution
+ // with less than or equal to |wants.max_pixel_count| depending on how the
+ // capturer can scale the input frame size.
+ rtc::VideoSinkWants wants;
+ wants.max_pixel_count = 1280 * 720 * 3 / 5;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(2, renderer_.num_rendered_frames());
+ EXPECT_EQ(960, renderer_.width());
+ EXPECT_EQ(540, renderer_.height());
+
+ // Request a lower resolution.
+ wants.max_pixel_count = (renderer_.width() * renderer_.height() * 3) / 5;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(3, renderer_.num_rendered_frames());
+ EXPECT_EQ(640, renderer_.width());
+ EXPECT_EQ(360, renderer_.height());
+
+ // Adding a new renderer should not affect resolution.
+ cricket::FakeVideoRenderer renderer2;
+ capturer_->AddOrUpdateSink(&renderer2, rtc::VideoSinkWants());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(4, renderer_.num_rendered_frames());
+ EXPECT_EQ(640, renderer_.width());
+ EXPECT_EQ(360, renderer_.height());
+ EXPECT_EQ(1, renderer2.num_rendered_frames());
+ EXPECT_EQ(640, renderer2.width());
+ EXPECT_EQ(360, renderer2.height());
+
+ // Request higher resolution.
+ wants.target_pixel_count.emplace((wants.max_pixel_count * 5) / 3);
+ wants.max_pixel_count = wants.max_pixel_count * 4;
+ capturer_->AddOrUpdateSink(&renderer_, wants);
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(5, renderer_.num_rendered_frames());
+ EXPECT_EQ(960, renderer_.width());
+ EXPECT_EQ(540, renderer_.height());
+ EXPECT_EQ(2, renderer2.num_rendered_frames());
+ EXPECT_EQ(960, renderer2.width());
+ EXPECT_EQ(540, renderer2.height());
+
+ // Updating with no wants should not affect resolution.
+ capturer_->AddOrUpdateSink(&renderer2, rtc::VideoSinkWants());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(6, renderer_.num_rendered_frames());
+ EXPECT_EQ(960, renderer_.width());
+ EXPECT_EQ(540, renderer_.height());
+ EXPECT_EQ(3, renderer2.num_rendered_frames());
+ EXPECT_EQ(960, renderer2.width());
+ EXPECT_EQ(540, renderer2.height());
+
+ // But resetting the wants should reset the resolution to what the camera is
+ // opened with.
+ capturer_->AddOrUpdateSink(&renderer_, rtc::VideoSinkWants());
+ EXPECT_TRUE(capturer_->CaptureFrame());
+ EXPECT_EQ(7, renderer_.num_rendered_frames());
+ EXPECT_EQ(1280, renderer_.width());
+ EXPECT_EQ(720, renderer_.height());
+ EXPECT_EQ(4, renderer2.num_rendered_frames());
+ EXPECT_EQ(1280, renderer2.width());
+ EXPECT_EQ(720, renderer2.height());
+}
+
+TEST_F(VideoCapturerTest, TestFourccMatch) {
+ cricket::VideoFormat desired(640, 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_ANY);
+ cricket::VideoFormat best;
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.fourcc = cricket::FOURCC_MJPG;
+ EXPECT_FALSE(capturer_->GetBestCaptureFormat(desired, &best));
+
+ desired.fourcc = cricket::FOURCC_I420;
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+}
+
+TEST_F(VideoCapturerTest, TestResolutionMatch) {
+ cricket::VideoFormat desired(1920, 1080,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_ANY);
+ cricket::VideoFormat best;
+ // Ask for 1920x1080. Get HD 1280x720 which is the highest.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(1280, best.width);
+ EXPECT_EQ(720, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 360;
+ desired.height = 250;
+ // Ask for a little higher than QVGA. Get QVGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 480;
+ desired.height = 270;
+ // Ask for HVGA. Get VGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 320;
+ desired.height = 240;
+ // Ask for QVGA. Get QVGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 80;
+ desired.height = 60;
+ // Ask for lower than QQVGA. Get QQVGA, which is the lowest.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(160, best.width);
+ EXPECT_EQ(120, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+}
+
+TEST_F(VideoCapturerTest, TestHDResolutionMatch) {
+ // Add some HD formats typical of a mediocre HD webcam.
+ std::vector<cricket::VideoFormat> formats;
+ formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(960, 544,
+ cricket::VideoFormat::FpsToInterval(24), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(15), cricket::FOURCC_I420));
+ formats.push_back(cricket::VideoFormat(2592, 1944,
+ cricket::VideoFormat::FpsToInterval(7), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(formats);
+
+ cricket::VideoFormat desired(960, 720,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_ANY);
+ cricket::VideoFormat best;
+ // Ask for 960x720 30 fps. Get qHD 24 fps
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(960, best.width);
+ EXPECT_EQ(544, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(24), best.interval);
+
+ desired.width = 960;
+ desired.height = 544;
+ desired.interval = cricket::VideoFormat::FpsToInterval(30);
+ // Ask for qHD 30 fps. Get qHD 24 fps
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(960, best.width);
+ EXPECT_EQ(544, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(24), best.interval);
+
+ desired.width = 360;
+ desired.height = 250;
+ desired.interval = cricket::VideoFormat::FpsToInterval(30);
+ // Ask for a little higher than QVGA. Get QVGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 480;
+ desired.height = 270;
+ // Ask for HVGA. Get VGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 320;
+ desired.height = 240;
+ // Ask for QVGA. Get QVGA.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 160;
+ desired.height = 120;
+ // Ask for lower than QVGA. Get QVGA, which is the lowest.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 1280;
+ desired.height = 720;
+ // Ask for HD. 720p fps is too low. Get VGA which has 30 fps.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ desired.width = 1280;
+ desired.height = 720;
+ desired.interval = cricket::VideoFormat::FpsToInterval(15);
+ // Ask for HD 15 fps. Fps matches. Get HD
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(1280, best.width);
+ EXPECT_EQ(720, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(15), best.interval);
+
+ desired.width = 1920;
+ desired.height = 1080;
+ desired.interval = cricket::VideoFormat::FpsToInterval(30);
+ // Ask for 1080p. Fps of HD formats is too low. Get VGA which can do 30 fps.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(desired, &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+}
+
+// Some cameras support 320x240 and 320x640. Verify we choose 320x240.
+TEST_F(VideoCapturerTest, TestStrangeFormats) {
+ std::vector<cricket::VideoFormat> supported_formats;
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(320, 640,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats;
+ required_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ required_formats.push_back(cricket::VideoFormat(320, 200,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ required_formats.push_back(cricket::VideoFormat(320, 180,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ cricket::VideoFormat best;
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ }
+
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(320, 640,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ }
+}
+
+// Some cameras only have very low fps. Verify we choose something sensible.
+TEST_F(VideoCapturerTest, TestPoorFpsFormats) {
+ // all formats are low framerate
+ std::vector<cricket::VideoFormat> supported_formats;
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(10), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(7), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(2), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats;
+ required_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ required_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ cricket::VideoFormat best;
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(required_formats[i].width, best.width);
+ EXPECT_EQ(required_formats[i].height, best.height);
+ }
+
+ // Increase framerate of 320x240. Expect low fps VGA avoided.
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(7), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(2), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ }
+}
+
+// Some cameras support same size with different frame rates. Verify we choose
+// the frame rate properly.
+TEST_F(VideoCapturerTest, TestSameSizeDifferentFpsFormats) {
+ std::vector<cricket::VideoFormat> supported_formats;
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(10), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(20), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(320, 240,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats = supported_formats;
+ cricket::VideoFormat best;
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(320, best.width);
+ EXPECT_EQ(240, best.height);
+ EXPECT_EQ(required_formats[i].interval, best.interval);
+ }
+}
+
+// Some cameras support the correct resolution but at a lower fps than
+// we'd like. This tests we get the expected resolution and fps.
+TEST_F(VideoCapturerTest, TestFpsFormats) {
+ // We have VGA but low fps. Choose VGA, not HD
+ std::vector<cricket::VideoFormat> supported_formats;
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(15), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats;
+ required_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_ANY));
+ required_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(20), cricket::FOURCC_ANY));
+ required_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(10), cricket::FOURCC_ANY));
+ cricket::VideoFormat best;
+
+ // Expect 30 fps to choose 30 fps format.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[0], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(400, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ // Expect 20 fps to choose 30 fps format.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[1], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(400, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(30), best.interval);
+
+ // Expect 10 fps to choose 15 fps format and set fps to 15.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[2], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(15), best.interval);
+
+ // We have VGA 60 fps and 15 fps. Choose best fps.
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(60), cricket::FOURCC_MJPG));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(15), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ // Expect 30 fps to choose 60 fps format and will set best fps to 60.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[0], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(60), best.interval);
+
+ // Expect 20 fps to choose 60 fps format, and will set best fps to 60.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[1], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(60), best.interval);
+
+ // Expect 10 fps to choose 15 fps.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[2], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(480, best.height);
+ EXPECT_EQ(cricket::VideoFormat::FpsToInterval(15), best.interval);
+}
+
+TEST_F(VideoCapturerTest, TestRequest16x10_9) {
+ std::vector<cricket::VideoFormat> supported_formats;
+ // We do not support HD, expect 4x3 for 4x3, 16x10, and 16x9 requests.
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ std::vector<cricket::VideoFormat> required_formats = supported_formats;
+ cricket::VideoFormat best;
+ // Expect 4x3, 16x10, and 16x9 requests are respected.
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(required_formats[i].width, best.width);
+ EXPECT_EQ(required_formats[i].height, best.height);
+ }
+
+ // We do not support 16x9 HD, expect 4x3 for 4x3, 16x10, and 16x9 requests.
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(960, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ // Expect 4x3, 16x10, and 16x9 requests are respected.
+ for (size_t i = 0; i < required_formats.size(); ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(required_formats[i].width, best.width);
+ EXPECT_EQ(required_formats[i].height, best.height);
+ }
+
+ // We support 16x9HD, Expect 4x3, 16x10, and 16x9 requests are respected.
+ supported_formats.clear();
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 480,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 400,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(640, 360,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+
+ // Expect 4x3 for 4x3 and 16x10 requests.
+ for (size_t i = 0; i < required_formats.size() - 1; ++i) {
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[i], &best));
+ EXPECT_EQ(required_formats[i].width, best.width);
+ EXPECT_EQ(required_formats[i].height, best.height);
+ }
+
+ // Expect 16x9 for 16x9 request.
+ EXPECT_TRUE(capturer_->GetBestCaptureFormat(required_formats[2], &best));
+ EXPECT_EQ(640, best.width);
+ EXPECT_EQ(360, best.height);
+}
+
+bool HdFormatInList(const std::vector<cricket::VideoFormat>& formats) {
+ for (std::vector<cricket::VideoFormat>::const_iterator found =
+ formats.begin(); found != formats.end(); ++found) {
+ if (found->height >= kMinHdHeight) {
+ return true;
+ }
+ }
+ return false;
+}
+
+TEST_F(VideoCapturerTest, Whitelist) {
+ // The definition of HD only applies to the height. Set the HD width to the
+ // smallest legal number to document this fact in this test.
+ const int kMinHdWidth = 1;
+ cricket::VideoFormat hd_format(kMinHdWidth,
+ kMinHdHeight,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420);
+ cricket::VideoFormat vga_format(640, 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420);
+ std::vector<cricket::VideoFormat> formats = *capturer_->GetSupportedFormats();
+ formats.push_back(hd_format);
+
+ // Enable whitelist. Expect HD not in list.
+ capturer_->set_enable_camera_list(true);
+ capturer_->ResetSupportedFormats(formats);
+ EXPECT_TRUE(HdFormatInList(*capturer_->GetSupportedFormats()));
+ capturer_->ConstrainSupportedFormats(vga_format);
+ EXPECT_FALSE(HdFormatInList(*capturer_->GetSupportedFormats()));
+
+ // Disable whitelist. Expect HD in list.
+ capturer_->set_enable_camera_list(false);
+ capturer_->ResetSupportedFormats(formats);
+ EXPECT_TRUE(HdFormatInList(*capturer_->GetSupportedFormats()));
+ capturer_->ConstrainSupportedFormats(vga_format);
+ EXPECT_TRUE(HdFormatInList(*capturer_->GetSupportedFormats()));
+}
+
+TEST_F(VideoCapturerTest, BlacklistAllFormats) {
+ cricket::VideoFormat vga_format(640, 480,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420);
+ std::vector<cricket::VideoFormat> supported_formats;
+ // Mock a device that only supports HD formats.
+ supported_formats.push_back(cricket::VideoFormat(1280, 720,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ supported_formats.push_back(cricket::VideoFormat(1920, 1080,
+ cricket::VideoFormat::FpsToInterval(30), cricket::FOURCC_I420));
+ capturer_->ResetSupportedFormats(supported_formats);
+ EXPECT_EQ(2u, capturer_->GetSupportedFormats()->size());
+ // Now, enable the list, which would exclude both formats. However, since
+ // only HD formats are available, we refuse to filter at all, so we don't
+ // break this camera.
+ capturer_->set_enable_camera_list(true);
+ capturer_->ConstrainSupportedFormats(vga_format);
+ EXPECT_EQ(2u, capturer_->GetSupportedFormats()->size());
+ // To make sure it's not just the camera list being broken, add in VGA and
+ // try again. This time, only the VGA format should be there.
+ supported_formats.push_back(vga_format);
+ capturer_->ResetSupportedFormats(supported_formats);
+ ASSERT_EQ(1u, capturer_->GetSupportedFormats()->size());
+ EXPECT_EQ(vga_format.height, capturer_->GetSupportedFormats()->at(0).height);
+}
diff --git a/third_party/libwebrtc/webrtc/media/base/videocapturerfactory.h b/third_party/libwebrtc/webrtc/media/base/videocapturerfactory.h
new file mode 100644
index 0000000000..219e95bb0d
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocapturerfactory.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOCAPTURERFACTORY_H_
+#define MEDIA_BASE_VIDEOCAPTURERFACTORY_H_
+
+#include <memory>
+
+#include "media/base/device.h"
+
+namespace cricket {
+
+class VideoCapturer;
+
+class VideoDeviceCapturerFactory {
+ public:
+ VideoDeviceCapturerFactory() {}
+ virtual ~VideoDeviceCapturerFactory() {}
+
+ virtual std::unique_ptr<VideoCapturer> Create(const Device& device) = 0;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_VIDEOCAPTURERFACTORY_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videocommon.cc b/third_party/libwebrtc/webrtc/media/base/videocommon.cc
new file mode 100644
index 0000000000..e5168b55ca
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocommon.cc
@@ -0,0 +1,79 @@
+/*
+ * Copyright (c) 2010 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videocommon.h"
+
+#include <limits.h> // For INT_MAX
+#include <math.h>
+#include <sstream>
+
+#include "rtc_base/arraysize.h"
+
+namespace cricket {
+
+struct FourCCAliasEntry {
+ uint32_t alias;
+ uint32_t canonical;
+};
+
+static const FourCCAliasEntry kFourCCAliases[] = {
+ {FOURCC_IYUV, FOURCC_I420},
+ {FOURCC_YU16, FOURCC_I422},
+ {FOURCC_YU24, FOURCC_I444},
+ {FOURCC_YUYV, FOURCC_YUY2},
+ {FOURCC_YUVS, FOURCC_YUY2},
+ {FOURCC_HDYC, FOURCC_UYVY},
+ {FOURCC_2VUY, FOURCC_UYVY},
+ {FOURCC_JPEG, FOURCC_MJPG}, // Note: JPEG has DHT while MJPG does not.
+ {FOURCC_DMB1, FOURCC_MJPG},
+ {FOURCC_BA81, FOURCC_BGGR},
+ {FOURCC_RGB3, FOURCC_RAW},
+ {FOURCC_BGR3, FOURCC_24BG},
+ {FOURCC_CM32, FOURCC_BGRA},
+ {FOURCC_CM24, FOURCC_RAW},
+};
+
+uint32_t CanonicalFourCC(uint32_t fourcc) {
+ for (uint32_t i = 0; i < arraysize(kFourCCAliases); ++i) {
+ if (kFourCCAliases[i].alias == fourcc) {
+ return kFourCCAliases[i].canonical;
+ }
+ }
+ // Not an alias, so return it as-is.
+ return fourcc;
+}
+
+// The C++ standard requires a namespace-scope definition of static const
+// integral types even when they are initialized in the declaration (see
+// [class.static.data]/4), but MSVC with /Ze is non-conforming and treats that
+// as a multiply defined symbol error. See Also:
+// http://msdn.microsoft.com/en-us/library/34h23df8.aspx
+#ifndef _MSC_EXTENSIONS
+const int64_t VideoFormat::kMinimumInterval; // Initialized in header.
+#endif
+
+std::string VideoFormat::ToString() const {
+ std::string fourcc_name = GetFourccName(fourcc) + " ";
+ for (std::string::const_iterator i = fourcc_name.begin();
+ i < fourcc_name.end(); ++i) {
+ // Test character is printable; Avoid isprint() which asserts on negatives.
+ if (*i < 32 || *i >= 127) {
+ fourcc_name = "";
+ break;
+ }
+ }
+
+ std::ostringstream ss;
+ ss << fourcc_name << width << "x" << height << "x"
+ << IntervalToFpsFloat(interval);
+ return ss.str();
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videocommon.h b/third_party/libwebrtc/webrtc/media/base/videocommon.h
new file mode 100644
index 0000000000..264482b7c8
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocommon.h
@@ -0,0 +1,229 @@
+/*
+ * Copyright (c) 2004 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Common definition for video, including fourcc and VideoFormat.
+
+#ifndef MEDIA_BASE_VIDEOCOMMON_H_
+#define MEDIA_BASE_VIDEOCOMMON_H_
+
+#include <stdint.h>
+
+#include <string>
+
+#include "rtc_base/timeutils.h"
+
+namespace cricket {
+
+// TODO(janahan): For now, a hard-coded ssrc is used as the video ssrc.
+// This is because when the video frame is passed to the mediaprocessor for
+// processing, it doesn't have the correct ssrc. Since currently only Tx
+// Video processing is supported, this is ok. When we switch over to trigger
+// from capturer, this should be fixed and this const removed.
+const uint32_t kDummyVideoSsrc = 0xFFFFFFFF;
+
+// Minimum interval is 10k fps.
+#define FPS_TO_INTERVAL(fps) \
+ (fps ? rtc::kNumNanosecsPerSec / fps : \
+ rtc::kNumNanosecsPerSec / 10000)
+
+//////////////////////////////////////////////////////////////////////////////
+// Definition of FourCC codes
+//////////////////////////////////////////////////////////////////////////////
+// Convert four characters to a FourCC code.
+// Needs to be a macro otherwise the OS X compiler complains when the kFormat*
+// constants are used in a switch.
+#define FOURCC(a, b, c, d) \
+ ((static_cast<uint32_t>(a)) | (static_cast<uint32_t>(b) << 8) | \
+ (static_cast<uint32_t>(c) << 16) | (static_cast<uint32_t>(d) << 24))
+// Some pages discussing FourCC codes:
+// http://www.fourcc.org/yuv.php
+// http://v4l2spec.bytesex.org/spec/book1.htm
+// http://developer.apple.com/quicktime/icefloe/dispatch020.html
+// http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12
+// http://people.xiph.org/~xiphmont/containers/nut/nut4cc.txt
+
+// FourCC codes grouped according to implementation efficiency.
+// Primary formats should convert in 1 efficient step.
+// Secondary formats are converted in 2 steps.
+// Auxilliary formats call primary converters.
+enum FourCC {
+ // 9 Primary YUV formats: 5 planar, 2 biplanar, 2 packed.
+ FOURCC_I420 = FOURCC('I', '4', '2', '0'),
+ FOURCC_I422 = FOURCC('I', '4', '2', '2'),
+ FOURCC_I444 = FOURCC('I', '4', '4', '4'),
+ FOURCC_I411 = FOURCC('I', '4', '1', '1'),
+ FOURCC_I400 = FOURCC('I', '4', '0', '0'),
+ FOURCC_NV21 = FOURCC('N', 'V', '2', '1'),
+ FOURCC_NV12 = FOURCC('N', 'V', '1', '2'),
+ FOURCC_YUY2 = FOURCC('Y', 'U', 'Y', '2'),
+ FOURCC_UYVY = FOURCC('U', 'Y', 'V', 'Y'),
+
+ // 2 Secondary YUV formats: row biplanar.
+ FOURCC_M420 = FOURCC('M', '4', '2', '0'),
+
+ // 9 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp.
+ FOURCC_ARGB = FOURCC('A', 'R', 'G', 'B'),
+ FOURCC_BGRA = FOURCC('B', 'G', 'R', 'A'),
+ FOURCC_ABGR = FOURCC('A', 'B', 'G', 'R'),
+ FOURCC_24BG = FOURCC('2', '4', 'B', 'G'),
+ FOURCC_RAW = FOURCC('r', 'a', 'w', ' '),
+ FOURCC_RGBA = FOURCC('R', 'G', 'B', 'A'),
+ FOURCC_RGBP = FOURCC('R', 'G', 'B', 'P'), // bgr565.
+ FOURCC_RGBO = FOURCC('R', 'G', 'B', 'O'), // abgr1555.
+ FOURCC_R444 = FOURCC('R', '4', '4', '4'), // argb4444.
+
+ // 4 Secondary RGB formats: 4 Bayer Patterns.
+ FOURCC_RGGB = FOURCC('R', 'G', 'G', 'B'),
+ FOURCC_BGGR = FOURCC('B', 'G', 'G', 'R'),
+ FOURCC_GRBG = FOURCC('G', 'R', 'B', 'G'),
+ FOURCC_GBRG = FOURCC('G', 'B', 'R', 'G'),
+
+ // 1 Primary Compressed YUV format.
+ FOURCC_MJPG = FOURCC('M', 'J', 'P', 'G'),
+
+ // 5 Auxiliary YUV variations: 3 with U and V planes are swapped, 1 Alias.
+ FOURCC_YV12 = FOURCC('Y', 'V', '1', '2'),
+ FOURCC_YV16 = FOURCC('Y', 'V', '1', '6'),
+ FOURCC_YV24 = FOURCC('Y', 'V', '2', '4'),
+ FOURCC_YU12 = FOURCC('Y', 'U', '1', '2'), // Linux version of I420.
+ FOURCC_J420 = FOURCC('J', '4', '2', '0'),
+ FOURCC_J400 = FOURCC('J', '4', '0', '0'),
+
+ // 14 Auxiliary aliases. CanonicalFourCC() maps these to canonical fourcc.
+ FOURCC_IYUV = FOURCC('I', 'Y', 'U', 'V'), // Alias for I420.
+ FOURCC_YU16 = FOURCC('Y', 'U', '1', '6'), // Alias for I422.
+ FOURCC_YU24 = FOURCC('Y', 'U', '2', '4'), // Alias for I444.
+ FOURCC_YUYV = FOURCC('Y', 'U', 'Y', 'V'), // Alias for YUY2.
+ FOURCC_YUVS = FOURCC('y', 'u', 'v', 's'), // Alias for YUY2 on Mac.
+ FOURCC_HDYC = FOURCC('H', 'D', 'Y', 'C'), // Alias for UYVY.
+ FOURCC_2VUY = FOURCC('2', 'v', 'u', 'y'), // Alias for UYVY on Mac.
+ FOURCC_JPEG = FOURCC('J', 'P', 'E', 'G'), // Alias for MJPG.
+ FOURCC_DMB1 = FOURCC('d', 'm', 'b', '1'), // Alias for MJPG on Mac.
+ FOURCC_BA81 = FOURCC('B', 'A', '8', '1'), // Alias for BGGR.
+ FOURCC_RGB3 = FOURCC('R', 'G', 'B', '3'), // Alias for RAW.
+ FOURCC_BGR3 = FOURCC('B', 'G', 'R', '3'), // Alias for 24BG.
+ FOURCC_CM32 = FOURCC(0, 0, 0, 32), // Alias for BGRA kCMPixelFormat_32ARGB
+ FOURCC_CM24 = FOURCC(0, 0, 0, 24), // Alias for RAW kCMPixelFormat_24RGB
+
+ // 1 Auxiliary compressed YUV format set aside for capturer.
+ FOURCC_H264 = FOURCC('H', '2', '6', '4'),
+};
+
+// Match any fourcc.
+
+// We move this out of the enum because using it in many places caused
+// the compiler to get grumpy, presumably since the above enum is
+// backed by an int.
+static const uint32_t FOURCC_ANY = 0xFFFFFFFF;
+
+// Converts fourcc aliases into canonical ones.
+uint32_t CanonicalFourCC(uint32_t fourcc);
+
+// Get FourCC code as a string.
+inline std::string GetFourccName(uint32_t fourcc) {
+ std::string name;
+ name.push_back(static_cast<char>(fourcc & 0xFF));
+ name.push_back(static_cast<char>((fourcc >> 8) & 0xFF));
+ name.push_back(static_cast<char>((fourcc >> 16) & 0xFF));
+ name.push_back(static_cast<char>((fourcc >> 24) & 0xFF));
+ return name;
+}
+
+//////////////////////////////////////////////////////////////////////////////
+// Definition of VideoFormat.
+//////////////////////////////////////////////////////////////////////////////
+
+// VideoFormat with Plain Old Data for global variables.
+struct VideoFormatPod {
+ int width; // Number of pixels.
+ int height; // Number of pixels.
+ int64_t interval; // Nanoseconds.
+ uint32_t fourcc; // Color space. FOURCC_ANY means that any color space is OK.
+};
+
+struct VideoFormat : VideoFormatPod {
+ static const int64_t kMinimumInterval =
+ rtc::kNumNanosecsPerSec / 10000; // 10k fps.
+
+ VideoFormat() {
+ Construct(0, 0, 0, 0);
+ }
+
+ VideoFormat(int w, int h, int64_t interval_ns, uint32_t cc) {
+ Construct(w, h, interval_ns, cc);
+ }
+
+ explicit VideoFormat(const VideoFormatPod& format) {
+ Construct(format.width, format.height, format.interval, format.fourcc);
+ }
+
+ void Construct(int w, int h, int64_t interval_ns, uint32_t cc) {
+ width = w;
+ height = h;
+ interval = interval_ns;
+ fourcc = cc;
+ }
+
+ static int64_t FpsToInterval(int fps) {
+ return fps ? rtc::kNumNanosecsPerSec / fps : kMinimumInterval;
+ }
+
+ static int IntervalToFps(int64_t interval) {
+ if (!interval) {
+ return 0;
+ }
+ return static_cast<int>(rtc::kNumNanosecsPerSec / interval);
+ }
+
+ static float IntervalToFpsFloat(int64_t interval) {
+ if (!interval) {
+ return 0.f;
+ }
+ return static_cast<float>(rtc::kNumNanosecsPerSec) /
+ static_cast<float>(interval);
+ }
+
+ bool operator==(const VideoFormat& format) const {
+ return width == format.width && height == format.height &&
+ interval == format.interval && fourcc == format.fourcc;
+ }
+
+ bool operator!=(const VideoFormat& format) const {
+ return !(*this == format);
+ }
+
+ bool operator<(const VideoFormat& format) const {
+ return (fourcc < format.fourcc) ||
+ (fourcc == format.fourcc && width < format.width) ||
+ (fourcc == format.fourcc && width == format.width &&
+ height < format.height) ||
+ (fourcc == format.fourcc && width == format.width &&
+ height == format.height && interval > format.interval);
+ }
+
+ int framerate() const { return IntervalToFps(interval); }
+
+ // Check if both width and height are 0.
+ bool IsSize0x0() const { return 0 == width && 0 == height; }
+
+ // Check if this format is less than another one by comparing the resolution
+ // and frame rate.
+ bool IsPixelRateLess(const VideoFormat& format) const {
+ return width * height * framerate() <
+ format.width * format.height * format.framerate();
+ }
+
+ // Get a string presentation in the form of "fourcc width x height x fps"
+ std::string ToString() const;
+};
+
+} // namespace cricket
+
+#endif // MEDIA_BASE_VIDEOCOMMON_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videocommon_unittest.cc b/third_party/libwebrtc/webrtc/media/base/videocommon_unittest.cc
new file mode 100644
index 0000000000..0e29375b2a
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videocommon_unittest.cc
@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2008 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videocommon.h"
+#include "rtc_base/gunit.h"
+
+namespace cricket {
+
+TEST(VideoCommonTest, TestCanonicalFourCC) {
+ // Canonical fourccs are not changed.
+ EXPECT_EQ(FOURCC_I420, CanonicalFourCC(FOURCC_I420));
+ // The special FOURCC_ANY value is not changed.
+ EXPECT_EQ(FOURCC_ANY, CanonicalFourCC(FOURCC_ANY));
+ // Aliases are translated to the canonical equivalent.
+ EXPECT_EQ(FOURCC_I420, CanonicalFourCC(FOURCC_IYUV));
+ EXPECT_EQ(FOURCC_I422, CanonicalFourCC(FOURCC_YU16));
+ EXPECT_EQ(FOURCC_I444, CanonicalFourCC(FOURCC_YU24));
+ EXPECT_EQ(FOURCC_YUY2, CanonicalFourCC(FOURCC_YUYV));
+ EXPECT_EQ(FOURCC_YUY2, CanonicalFourCC(FOURCC_YUVS));
+ EXPECT_EQ(FOURCC_UYVY, CanonicalFourCC(FOURCC_HDYC));
+ EXPECT_EQ(FOURCC_UYVY, CanonicalFourCC(FOURCC_2VUY));
+ EXPECT_EQ(FOURCC_MJPG, CanonicalFourCC(FOURCC_JPEG));
+ EXPECT_EQ(FOURCC_MJPG, CanonicalFourCC(FOURCC_DMB1));
+ EXPECT_EQ(FOURCC_BGGR, CanonicalFourCC(FOURCC_BA81));
+ EXPECT_EQ(FOURCC_RAW, CanonicalFourCC(FOURCC_RGB3));
+ EXPECT_EQ(FOURCC_24BG, CanonicalFourCC(FOURCC_BGR3));
+ EXPECT_EQ(FOURCC_BGRA, CanonicalFourCC(FOURCC_CM32));
+ EXPECT_EQ(FOURCC_RAW, CanonicalFourCC(FOURCC_CM24));
+}
+
+// Test conversion between interval and fps
+TEST(VideoCommonTest, TestVideoFormatFps) {
+ EXPECT_EQ(VideoFormat::kMinimumInterval, VideoFormat::FpsToInterval(0));
+ EXPECT_EQ(rtc::kNumNanosecsPerSec / 20, VideoFormat::FpsToInterval(20));
+ EXPECT_EQ(20, VideoFormat::IntervalToFps(rtc::kNumNanosecsPerSec / 20));
+ EXPECT_EQ(0, VideoFormat::IntervalToFps(0));
+}
+
+// Test IsSize0x0
+TEST(VideoCommonTest, TestVideoFormatIsSize0x0) {
+ VideoFormat format;
+ EXPECT_TRUE(format.IsSize0x0());
+ format.width = 320;
+ EXPECT_FALSE(format.IsSize0x0());
+}
+
+// Test ToString: print fourcc when it is printable.
+TEST(VideoCommonTest, TestVideoFormatToString) {
+ VideoFormat format;
+ EXPECT_EQ("0x0x0", format.ToString());
+
+ format.fourcc = FOURCC_I420;
+ format.width = 640;
+ format.height = 480;
+ format.interval = VideoFormat::FpsToInterval(20);
+ EXPECT_EQ("I420 640x480x20", format.ToString());
+
+ format.fourcc = FOURCC_ANY;
+ format.width = 640;
+ format.height = 480;
+ format.interval = VideoFormat::FpsToInterval(20);
+ EXPECT_EQ("640x480x20", format.ToString());
+}
+
+// Test comparison.
+TEST(VideoCommonTest, TestVideoFormatCompare) {
+ VideoFormat format(640, 480, VideoFormat::FpsToInterval(20), FOURCC_I420);
+ VideoFormat format2;
+ EXPECT_NE(format, format2);
+
+ // Same pixelrate, different fourcc.
+ format2 = format;
+ format2.fourcc = FOURCC_YUY2;
+ EXPECT_NE(format, format2);
+ EXPECT_FALSE(format.IsPixelRateLess(format2) ||
+ format2.IsPixelRateLess(format2));
+
+ format2 = format;
+ format2.interval /= 2;
+ EXPECT_TRUE(format.IsPixelRateLess(format2));
+
+ format2 = format;
+ format2.width *= 2;
+ EXPECT_TRUE(format.IsPixelRateLess(format2));
+}
+
+} // namespace cricket
diff --git a/third_party/libwebrtc/webrtc/media/base/videoengine_unittest.h b/third_party/libwebrtc/webrtc/media/base/videoengine_unittest.h
new file mode 100644
index 0000000000..d49f73b206
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videoengine_unittest.h
@@ -0,0 +1,951 @@
+/*
+ * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ // NOLINT
+#define MEDIA_BASE_VIDEOENGINE_UNITTEST_H_
+
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "call/call.h"
+#include "logging/rtc_event_log/rtc_event_log.h"
+#include "media/base/fakenetworkinterface.h"
+#include "media/base/fakevideocapturer.h"
+#include "media/base/fakevideorenderer.h"
+#include "media/base/mediachannel.h"
+#include "media/base/streamparams.h"
+#include "media/engine/fakewebrtccall.h"
+#include "rtc_base/bytebuffer.h"
+#include "rtc_base/gunit.h"
+#include "rtc_base/timeutils.h"
+
+namespace cricket {
+class WebRtcVideoEncoderFactory;
+class WebRtcVideoDecoderFactory;
+} // namespace cricket
+
+#define EXPECT_FRAME_WAIT(c, w, h, t) \
+ EXPECT_EQ_WAIT((c), renderer_.num_rendered_frames(), (t)); \
+ EXPECT_EQ((w), renderer_.width()); \
+ EXPECT_EQ((h), renderer_.height()); \
+ EXPECT_EQ(0, renderer_.errors()); \
+
+#define EXPECT_FRAME_ON_RENDERER_WAIT(r, c, w, h, t) \
+ EXPECT_EQ_WAIT((c), (r).num_rendered_frames(), (t)); \
+ EXPECT_EQ((w), (r).width()); \
+ EXPECT_EQ((h), (r).height()); \
+ EXPECT_EQ(0, (r).errors()); \
+
+#define EXPECT_GT_FRAME_ON_RENDERER_WAIT(r, c, w, h, t) \
+ EXPECT_TRUE_WAIT((r).num_rendered_frames() >= (c) && \
+ (w) == (r).width() && \
+ (h) == (r).height(), (t)); \
+ EXPECT_EQ(0, (r).errors());
+
+static const uint32_t kTimeout = 5000U;
+static const uint32_t kDefaultReceiveSsrc = 0;
+static const uint32_t kSsrc = 1234u;
+static const uint32_t kRtxSsrc = 4321u;
+static const uint32_t kSsrcs4[] = {1, 2, 3, 4};
+static const int kVideoWidth = 640;
+static const int kVideoHeight = 360;
+static const int kFramerate = 30;
+
+inline bool IsEqualCodec(const cricket::VideoCodec& a,
+ const cricket::VideoCodec& b) {
+ return a.id == b.id && a.name == b.name;
+}
+
+template<class E, class C>
+class VideoMediaChannelTest : public testing::Test,
+ public sigslot::has_slots<> {
+ protected:
+ VideoMediaChannelTest<E, C>()
+ : call_(webrtc::Call::Create(webrtc::Call::Config(&event_log_))),
+ engine_(std::unique_ptr<cricket::WebRtcVideoEncoderFactory>(),
+ std::unique_ptr<cricket::WebRtcVideoDecoderFactory>()) {}
+
+ virtual cricket::VideoCodec DefaultCodec() = 0;
+
+ virtual cricket::StreamParams DefaultSendStreamParams() {
+ return cricket::StreamParams::CreateLegacy(kSsrc);
+ }
+
+ virtual void SetUp() {
+ cricket::MediaConfig media_config;
+ // Disabling cpu overuse detection actually disables quality scaling too; it
+ // implies DegradationPreference kMaintainResolution. Automatic scaling
+ // needs to be disabled, otherwise, tests which check the size of received
+ // frames become flaky.
+ media_config.video.enable_cpu_overuse_detection = false;
+ channel_.reset(engine_.CreateChannel(call_.get(), media_config,
+ cricket::VideoOptions()));
+ channel_->OnReadyToSend(true);
+ EXPECT_TRUE(channel_.get() != NULL);
+ network_interface_.SetDestination(channel_.get());
+ channel_->SetInterface(&network_interface_);
+ media_error_ = cricket::VideoMediaChannel::ERROR_NONE;
+ cricket::VideoRecvParameters parameters;
+ parameters.codecs = engine_.codecs();
+ channel_->SetRecvParameters(parameters);
+ EXPECT_TRUE(channel_->AddSendStream(DefaultSendStreamParams()));
+ video_capturer_.reset(CreateFakeVideoCapturer());
+ cricket::VideoFormat format(640, 480,
+ cricket::VideoFormat::FpsToInterval(kFramerate),
+ cricket::FOURCC_I420);
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_->Start(format));
+ EXPECT_TRUE(
+ channel_->SetVideoSend(kSsrc, true, nullptr, video_capturer_.get()));
+ }
+
+ virtual cricket::FakeVideoCapturer* CreateFakeVideoCapturer() {
+ return new cricket::FakeVideoCapturer();
+ }
+
+ // Utility method to setup an additional stream to send and receive video.
+ // Used to test send and recv between two streams.
+ void SetUpSecondStream() {
+ SetUpSecondStreamWithNoRecv();
+ // Setup recv for second stream.
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc + 2)));
+ // Make the second renderer available for use by a new stream.
+ EXPECT_TRUE(channel_->SetSink(kSsrc + 2, &renderer2_));
+ }
+ // Setup an additional stream just to send video. Defer add recv stream.
+ // This is required if you want to test unsignalled recv of video rtp packets.
+ void SetUpSecondStreamWithNoRecv() {
+ // SetUp() already added kSsrc make sure duplicate SSRCs cant be added.
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_));
+ EXPECT_FALSE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrc + 2)));
+ // We dont add recv for the second stream.
+
+ // Setup the receive and renderer for second stream after send.
+ video_capturer_2_.reset(CreateFakeVideoCapturer());
+ cricket::VideoFormat format(640, 480,
+ cricket::VideoFormat::FpsToInterval(kFramerate),
+ cricket::FOURCC_I420);
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_2_->Start(format));
+
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc + 2, true, nullptr,
+ video_capturer_2_.get()));
+ }
+ virtual void TearDown() {
+ channel_.reset();
+ }
+ bool SetDefaultCodec() {
+ return SetOneCodec(DefaultCodec());
+ }
+
+ bool SetOneCodec(int pt, const char* name) {
+ return SetOneCodec(cricket::VideoCodec(pt, name));
+ }
+ bool SetOneCodec(const cricket::VideoCodec& codec) {
+ cricket::VideoFormat capture_format(
+ kVideoWidth, kVideoHeight,
+ cricket::VideoFormat::FpsToInterval(kFramerate), cricket::FOURCC_I420);
+
+ if (video_capturer_) {
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_->Start(capture_format));
+ }
+ if (video_capturer_2_) {
+ EXPECT_EQ(cricket::CS_RUNNING, video_capturer_2_->Start(capture_format));
+ }
+
+ bool sending = channel_->sending();
+ bool success = SetSend(false);
+ if (success) {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(codec);
+ success = channel_->SetSendParameters(parameters);
+ }
+ if (success) {
+ success = SetSend(sending);
+ }
+ return success;
+ }
+ bool SetSend(bool send) {
+ return channel_->SetSend(send);
+ }
+ int DrainOutgoingPackets() {
+ int packets = 0;
+ do {
+ packets = NumRtpPackets();
+ // 100 ms should be long enough.
+ rtc::Thread::Current()->ProcessMessages(100);
+ } while (NumRtpPackets() > packets);
+ return NumRtpPackets();
+ }
+ bool SendFrame() {
+ if (video_capturer_2_) {
+ video_capturer_2_->CaptureFrame();
+ }
+ return video_capturer_.get() &&
+ video_capturer_->CaptureFrame();
+ }
+ bool WaitAndSendFrame(int wait_ms) {
+ bool ret = rtc::Thread::Current()->ProcessMessages(wait_ms);
+ ret &= SendFrame();
+ return ret;
+ }
+ // Sends frames and waits for the decoder to be fully initialized.
+ // Returns the number of frames that were sent.
+ int WaitForDecoder() {
+#if defined(HAVE_OPENMAX)
+ // Send enough frames for the OpenMAX decoder to continue processing, and
+ // return the number of frames sent.
+ // Send frames for a full kTimeout's worth of 15fps video.
+ int frame_count = 0;
+ while (frame_count < static_cast<int>(kTimeout) / 66) {
+ EXPECT_TRUE(WaitAndSendFrame(66));
+ ++frame_count;
+ }
+ return frame_count;
+#else
+ return 0;
+#endif
+ }
+ bool SendCustomVideoFrame(int w, int h) {
+ if (!video_capturer_.get()) return false;
+ return video_capturer_->CaptureCustomFrame(w, h, cricket::FOURCC_I420);
+ }
+ int NumRtpBytes() {
+ return network_interface_.NumRtpBytes();
+ }
+ int NumRtpBytes(uint32_t ssrc) {
+ return network_interface_.NumRtpBytes(ssrc);
+ }
+ int NumRtpPackets() {
+ return network_interface_.NumRtpPackets();
+ }
+ int NumRtpPackets(uint32_t ssrc) {
+ return network_interface_.NumRtpPackets(ssrc);
+ }
+ int NumSentSsrcs() {
+ return network_interface_.NumSentSsrcs();
+ }
+ const rtc::CopyOnWriteBuffer* GetRtpPacket(int index) {
+ return network_interface_.GetRtpPacket(index);
+ }
+ int NumRtcpPackets() {
+ return network_interface_.NumRtcpPackets();
+ }
+ const rtc::CopyOnWriteBuffer* GetRtcpPacket(int index) {
+ return network_interface_.GetRtcpPacket(index);
+ }
+ static int GetPayloadType(const rtc::CopyOnWriteBuffer* p) {
+ int pt = -1;
+ ParseRtpPacket(p, NULL, &pt, NULL, NULL, NULL, NULL);
+ return pt;
+ }
+ static bool ParseRtpPacket(const rtc::CopyOnWriteBuffer* p,
+ bool* x,
+ int* pt,
+ int* seqnum,
+ uint32_t* tstamp,
+ uint32_t* ssrc,
+ std::string* payload) {
+ rtc::ByteBufferReader buf(p->data<char>(), p->size());
+ uint8_t u08 = 0;
+ uint16_t u16 = 0;
+ uint32_t u32 = 0;
+
+ // Read X and CC fields.
+ if (!buf.ReadUInt8(&u08)) return false;
+ bool extension = ((u08 & 0x10) != 0);
+ uint8_t cc = (u08 & 0x0F);
+ if (x) *x = extension;
+
+ // Read PT field.
+ if (!buf.ReadUInt8(&u08)) return false;
+ if (pt) *pt = (u08 & 0x7F);
+
+ // Read Sequence Number field.
+ if (!buf.ReadUInt16(&u16)) return false;
+ if (seqnum) *seqnum = u16;
+
+ // Read Timestamp field.
+ if (!buf.ReadUInt32(&u32)) return false;
+ if (tstamp) *tstamp = u32;
+
+ // Read SSRC field.
+ if (!buf.ReadUInt32(&u32)) return false;
+ if (ssrc) *ssrc = u32;
+
+ // Skip CSRCs.
+ for (uint8_t i = 0; i < cc; ++i) {
+ if (!buf.ReadUInt32(&u32)) return false;
+ }
+
+ // Skip extension header.
+ if (extension) {
+ // Read Profile-specific extension header ID
+ if (!buf.ReadUInt16(&u16)) return false;
+
+ // Read Extension header length
+ if (!buf.ReadUInt16(&u16)) return false;
+ uint16_t ext_header_len = u16;
+
+ // Read Extension header
+ for (uint16_t i = 0; i < ext_header_len; ++i) {
+ if (!buf.ReadUInt32(&u32)) return false;
+ }
+ }
+
+ if (payload) {
+ return buf.ReadString(payload, buf.Length());
+ }
+ return true;
+ }
+
+ // Parse all RTCP packet, from start_index to stop_index, and count how many
+ // FIR (PT=206 and FMT=4 according to RFC 5104). If successful, set the count
+ // and return true.
+ bool CountRtcpFir(int start_index, int stop_index, int* fir_count) {
+ int count = 0;
+ for (int i = start_index; i < stop_index; ++i) {
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtcpPacket(i));
+ rtc::ByteBufferReader buf(p->data<char>(), p->size());
+ size_t total_len = 0;
+ // The packet may be a compound RTCP packet.
+ while (total_len < p->size()) {
+ // Read FMT, type and length.
+ uint8_t fmt = 0;
+ uint8_t type = 0;
+ uint16_t length = 0;
+ if (!buf.ReadUInt8(&fmt)) return false;
+ fmt &= 0x1F;
+ if (!buf.ReadUInt8(&type)) return false;
+ if (!buf.ReadUInt16(&length)) return false;
+ buf.Consume(length * 4); // Skip RTCP data.
+ total_len += (length + 1) * 4;
+ if ((192 == type) || ((206 == type) && (4 == fmt))) {
+ ++count;
+ }
+ }
+ }
+
+ if (fir_count) {
+ *fir_count = count;
+ }
+ return true;
+ }
+
+ void OnVideoChannelError(uint32_t ssrc,
+ cricket::VideoMediaChannel::Error error) {
+ media_error_ = error;
+ }
+
+ // Test that SetSend works.
+ void SetSend() {
+ EXPECT_FALSE(channel_->sending());
+ EXPECT_TRUE(
+ channel_->SetVideoSend(kSsrc, true, nullptr, video_capturer_.get()));
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ EXPECT_FALSE(channel_->sending());
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->sending());
+ EXPECT_TRUE(SendFrame());
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ EXPECT_TRUE(SetSend(false));
+ EXPECT_FALSE(channel_->sending());
+ }
+ // Test that SetSend fails without codecs being set.
+ void SetSendWithoutCodecs() {
+ EXPECT_FALSE(channel_->sending());
+ EXPECT_FALSE(SetSend(true));
+ EXPECT_FALSE(channel_->sending());
+ }
+ // Test that we properly set the send and recv buffer sizes by the time
+ // SetSend is called.
+ void SetSendSetsTransportBufferSizes() {
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_EQ(64 * 1024, network_interface_.sendbuf_size());
+ EXPECT_EQ(64 * 1024, network_interface_.recvbuf_size());
+ }
+ // Tests that we can send frames and the right payload type is used.
+ void Send(const cricket::VideoCodec& codec) {
+ EXPECT_TRUE(SetOneCodec(codec));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ EXPECT_EQ(codec.id, GetPayloadType(p.get()));
+ }
+ // Tests that we can send and receive frames.
+ void SendAndReceive(const cricket::VideoCodec& codec) {
+ EXPECT_TRUE(SetOneCodec(codec));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ EXPECT_EQ(codec.id, GetPayloadType(p.get()));
+ }
+ void SendReceiveManyAndGetStats(const cricket::VideoCodec& codec,
+ int duration_sec, int fps) {
+ EXPECT_TRUE(SetOneCodec(codec));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ for (int i = 0; i < duration_sec; ++i) {
+ for (int frame = 1; frame <= fps; ++frame) {
+ EXPECT_TRUE(WaitAndSendFrame(1000 / fps));
+ EXPECT_FRAME_WAIT(frame + i * fps, kVideoWidth, kVideoHeight, kTimeout);
+ }
+ }
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ EXPECT_EQ(codec.id, GetPayloadType(p.get()));
+ }
+
+ // Test that stats work properly for a 1-1 call.
+ void GetStats() {
+ const int kDurationSec = 3;
+ const int kFps = 10;
+ SendReceiveManyAndGetStats(DefaultCodec(), kDurationSec, kFps);
+
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+
+ ASSERT_EQ(1U, info.senders.size());
+ // TODO(whyuan): bytes_sent and bytes_rcvd are different. Are both payload?
+ // For webrtc, bytes_sent does not include the RTP header length.
+ EXPECT_GT(info.senders[0].bytes_sent, 0);
+ EXPECT_EQ(NumRtpPackets(), info.senders[0].packets_sent);
+ EXPECT_EQ(0.0, info.senders[0].fraction_lost);
+ ASSERT_TRUE(info.senders[0].codec_payload_type);
+ EXPECT_EQ(DefaultCodec().id, *info.senders[0].codec_payload_type);
+ EXPECT_EQ(0, info.senders[0].firs_rcvd);
+ EXPECT_EQ(0, info.senders[0].plis_rcvd);
+ EXPECT_EQ(0, info.senders[0].nacks_rcvd);
+ EXPECT_EQ(kVideoWidth, info.senders[0].send_frame_width);
+ EXPECT_EQ(kVideoHeight, info.senders[0].send_frame_height);
+ EXPECT_GT(info.senders[0].framerate_input, 0);
+ EXPECT_GT(info.senders[0].framerate_sent, 0);
+
+ EXPECT_EQ(1U, info.send_codecs.count(DefaultCodec().id));
+ EXPECT_EQ(DefaultCodec().ToCodecParameters(),
+ info.send_codecs[DefaultCodec().id]);
+
+ ASSERT_EQ(1U, info.receivers.size());
+ EXPECT_EQ(1U, info.senders[0].ssrcs().size());
+ EXPECT_EQ(1U, info.receivers[0].ssrcs().size());
+ EXPECT_EQ(info.senders[0].ssrcs()[0], info.receivers[0].ssrcs()[0]);
+ ASSERT_TRUE(info.receivers[0].codec_payload_type);
+ EXPECT_EQ(DefaultCodec().id, *info.receivers[0].codec_payload_type);
+ EXPECT_EQ(NumRtpBytes(), info.receivers[0].bytes_rcvd);
+ EXPECT_EQ(NumRtpPackets(), info.receivers[0].packets_rcvd);
+ EXPECT_EQ(0.0, info.receivers[0].fraction_lost);
+ EXPECT_EQ(0, info.receivers[0].packets_lost);
+ // TODO(asapersson): Not set for webrtc. Handle missing stats.
+ // EXPECT_EQ(0, info.receivers[0].packets_concealed);
+ EXPECT_EQ(0, info.receivers[0].firs_sent);
+ EXPECT_EQ(0, info.receivers[0].plis_sent);
+ EXPECT_EQ(0, info.receivers[0].nacks_sent);
+ EXPECT_EQ(kVideoWidth, info.receivers[0].frame_width);
+ EXPECT_EQ(kVideoHeight, info.receivers[0].frame_height);
+ EXPECT_GT(info.receivers[0].framerate_rcvd, 0);
+ EXPECT_GT(info.receivers[0].framerate_decoded, 0);
+ EXPECT_GT(info.receivers[0].framerate_output, 0);
+
+ EXPECT_EQ(1U, info.receive_codecs.count(DefaultCodec().id));
+ EXPECT_EQ(DefaultCodec().ToCodecParameters(),
+ info.receive_codecs[DefaultCodec().id]);
+ }
+
+ cricket::VideoSenderInfo GetSenderStats(size_t i) {
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ return info.senders[i];
+ }
+
+ cricket::VideoReceiverInfo GetReceiverStats(size_t i) {
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ return info.receivers[i];
+ }
+
+ // Test that stats work properly for a conf call with multiple recv streams.
+ void GetStatsMultipleRecvStreams() {
+ cricket::FakeVideoRenderer renderer1, renderer2;
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(DefaultCodec());
+ parameters.conference_mode = true;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(1)));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(2)));
+ EXPECT_TRUE(channel_->SetSink(1, &renderer1));
+ EXPECT_TRUE(channel_->SetSink(2, &renderer2));
+ EXPECT_EQ(0, renderer1.num_rendered_frames());
+ EXPECT_EQ(0, renderer2.num_rendered_frames());
+ std::vector<uint32_t> ssrcs;
+ ssrcs.push_back(1);
+ ssrcs.push_back(2);
+ network_interface_.SetConferenceMode(true, ssrcs);
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_ON_RENDERER_WAIT(renderer1, 1, kVideoWidth, kVideoHeight,
+ kTimeout);
+ EXPECT_FRAME_ON_RENDERER_WAIT(renderer2, 1, kVideoWidth, kVideoHeight,
+ kTimeout);
+
+ EXPECT_TRUE(channel_->SetSend(false));
+
+ cricket::VideoMediaInfo info;
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(1U, info.senders.size());
+ // TODO(whyuan): bytes_sent and bytes_rcvd are different. Are both payload?
+ // For webrtc, bytes_sent does not include the RTP header length.
+ EXPECT_GT(GetSenderStats(0).bytes_sent, 0);
+ EXPECT_EQ_WAIT(NumRtpPackets(), GetSenderStats(0).packets_sent, kTimeout);
+ EXPECT_EQ(kVideoWidth, GetSenderStats(0).send_frame_width);
+ EXPECT_EQ(kVideoHeight, GetSenderStats(0).send_frame_height);
+
+ ASSERT_EQ(2U, info.receivers.size());
+ for (size_t i = 0; i < info.receivers.size(); ++i) {
+ EXPECT_EQ(1U, GetReceiverStats(i).ssrcs().size());
+ EXPECT_EQ(i + 1, GetReceiverStats(i).ssrcs()[0]);
+ EXPECT_EQ_WAIT(NumRtpBytes(), GetReceiverStats(i).bytes_rcvd, kTimeout);
+ EXPECT_EQ_WAIT(NumRtpPackets(), GetReceiverStats(i).packets_rcvd,
+ kTimeout);
+ EXPECT_EQ_WAIT(kVideoWidth, GetReceiverStats(i).frame_width, kTimeout);
+ EXPECT_EQ_WAIT(kVideoHeight, GetReceiverStats(i).frame_height, kTimeout);
+ }
+ }
+ // Test that stats work properly for a conf call with multiple send streams.
+ void GetStatsMultipleSendStreams() {
+ // Normal setup; note that we set the SSRC explicitly to ensure that
+ // it will come first in the senders map.
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(DefaultCodec());
+ parameters.conference_mode = true;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+
+ // Add an additional capturer, and hook up a renderer to receive it.
+ cricket::FakeVideoRenderer renderer2;
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer(
+ CreateFakeVideoCapturer());
+ const int kTestWidth = 160;
+ const int kTestHeight = 120;
+ cricket::VideoFormat format(kTestWidth, kTestHeight,
+ cricket::VideoFormat::FpsToInterval(5),
+ cricket::FOURCC_I420);
+ EXPECT_EQ(cricket::CS_RUNNING, capturer->Start(format));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(5678)));
+ EXPECT_TRUE(channel_->SetVideoSend(5678, true, nullptr, capturer.get()));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(5678)));
+ EXPECT_TRUE(channel_->SetSink(5678, &renderer2));
+ EXPECT_TRUE(capturer->CaptureCustomFrame(
+ kTestWidth, kTestHeight, cricket::FOURCC_I420));
+ EXPECT_FRAME_ON_RENDERER_WAIT(
+ renderer2, 1, kTestWidth, kTestHeight, kTimeout);
+
+ // Get stats, and make sure they are correct for two senders. We wait until
+ // the number of expected packets have been sent to avoid races where we
+ // check stats before it has been updated.
+ cricket::VideoMediaInfo info;
+ for (uint32_t i = 0; i < kTimeout; ++i) {
+ rtc::Thread::Current()->ProcessMessages(1);
+ EXPECT_TRUE(channel_->GetStats(&info));
+ ASSERT_EQ(2U, info.senders.size());
+ if (info.senders[0].packets_sent + info.senders[1].packets_sent ==
+ NumRtpPackets()) {
+ // Stats have been updated for both sent frames, expectations can be
+ // checked now.
+ break;
+ }
+ }
+ EXPECT_EQ(NumRtpPackets(),
+ info.senders[0].packets_sent + info.senders[1].packets_sent)
+ << "Timed out while waiting for packet counts for all sent packets.";
+ EXPECT_EQ(1U, info.senders[0].ssrcs().size());
+ EXPECT_EQ(1234U, info.senders[0].ssrcs()[0]);
+ EXPECT_EQ(kVideoWidth, info.senders[0].send_frame_width);
+ EXPECT_EQ(kVideoHeight, info.senders[0].send_frame_height);
+ EXPECT_EQ(1U, info.senders[1].ssrcs().size());
+ EXPECT_EQ(5678U, info.senders[1].ssrcs()[0]);
+ EXPECT_EQ(kTestWidth, info.senders[1].send_frame_width);
+ EXPECT_EQ(kTestHeight, info.senders[1].send_frame_height);
+ // The capturer must be unregistered here as it runs out of it's scope next.
+ channel_->SetVideoSend(5678, true, nullptr, nullptr);
+ }
+
+ // Test that we can set the bandwidth.
+ void SetSendBandwidth() {
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(DefaultCodec());
+ parameters.max_bandwidth_bps = -1; // <= 0 means unlimited.
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ parameters.max_bandwidth_bps = 128 * 1024;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ }
+ // Test that we can set the SSRC for the default send source.
+ void SetSendSsrc() {
+ EXPECT_TRUE(SetDefaultCodec());
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ uint32_t ssrc = 0;
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
+ EXPECT_EQ(kSsrc, ssrc);
+ // Packets are being paced out, so these can mismatch between the first and
+ // second call to NumRtpPackets until pending packets are paced out.
+ EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(ssrc), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(ssrc), kTimeout);
+ EXPECT_EQ(1, NumSentSsrcs());
+ EXPECT_EQ(0, NumRtpPackets(kSsrc - 1));
+ EXPECT_EQ(0, NumRtpBytes(kSsrc - 1));
+ }
+ // Test that we can set the SSRC even after codecs are set.
+ void SetSendSsrcAfterSetCodecs() {
+ // Remove stream added in Setup.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
+ EXPECT_TRUE(SetDefaultCodec());
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(999)));
+ EXPECT_TRUE(
+ channel_->SetVideoSend(999u, true, nullptr, video_capturer_.get()));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(WaitAndSendFrame(0));
+ EXPECT_TRUE_WAIT(NumRtpPackets() > 0, kTimeout);
+ uint32_t ssrc = 0;
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
+ EXPECT_EQ(999u, ssrc);
+ // Packets are being paced out, so these can mismatch between the first and
+ // second call to NumRtpPackets until pending packets are paced out.
+ EXPECT_EQ_WAIT(NumRtpPackets(), NumRtpPackets(ssrc), kTimeout);
+ EXPECT_EQ_WAIT(NumRtpBytes(), NumRtpBytes(ssrc), kTimeout);
+ EXPECT_EQ(1, NumSentSsrcs());
+ EXPECT_EQ(0, NumRtpPackets(kSsrc));
+ EXPECT_EQ(0, NumRtpBytes(kSsrc));
+ }
+ // Test that we can set the default video renderer before and after
+ // media is received.
+ void SetSink() {
+ uint8_t data1[] = {
+ 0x80, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00};
+
+ rtc::CopyOnWriteBuffer packet1(data1, sizeof(data1));
+ rtc::SetBE32(packet1.data() + 8, kSsrc);
+ channel_->SetSink(kDefaultReceiveSsrc, NULL);
+ EXPECT_TRUE(SetDefaultCodec());
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ channel_->OnPacketReceived(&packet1, rtc::PacketTime());
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ }
+
+ // Tests empty StreamParams is rejected.
+ void RejectEmptyStreamParams() {
+ // Remove the send stream that was added during Setup.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
+
+ cricket::StreamParams empty;
+ EXPECT_FALSE(channel_->AddSendStream(empty));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(789u)));
+ }
+
+ // Tests setting up and configuring a send stream.
+ void AddRemoveSendStreams() {
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ EXPECT_GT(NumRtpPackets(), 0);
+ uint32_t ssrc = 0;
+ size_t last_packet = NumRtpPackets() - 1;
+ std::unique_ptr<const rtc::CopyOnWriteBuffer>
+ p(GetRtpPacket(static_cast<int>(last_packet)));
+ ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
+ EXPECT_EQ(kSsrc, ssrc);
+
+ // Remove the send stream that was added during Setup.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
+ int rtp_packets = NumRtpPackets();
+
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(789u)));
+ EXPECT_TRUE(
+ channel_->SetVideoSend(789u, true, nullptr, video_capturer_.get()));
+ EXPECT_EQ(rtp_packets, NumRtpPackets());
+ // Wait 30ms to guarantee the engine does not drop the frame.
+ EXPECT_TRUE(WaitAndSendFrame(30));
+ EXPECT_TRUE_WAIT(NumRtpPackets() > rtp_packets, kTimeout);
+
+ last_packet = NumRtpPackets() - 1;
+ p.reset(GetRtpPacket(static_cast<int>(last_packet)));
+ ParseRtpPacket(p.get(), NULL, NULL, NULL, NULL, &ssrc, NULL);
+ EXPECT_EQ(789u, ssrc);
+ }
+
+ // Tests the behavior of incoming streams in a conference scenario.
+ void SimulateConference() {
+ cricket::FakeVideoRenderer renderer1, renderer2;
+ EXPECT_TRUE(SetDefaultCodec());
+ cricket::VideoSendParameters parameters;
+ parameters.codecs.push_back(DefaultCodec());
+ parameters.conference_mode = true;
+ EXPECT_TRUE(channel_->SetSendParameters(parameters));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(1)));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(2)));
+ EXPECT_TRUE(channel_->SetSink(1, &renderer1));
+ EXPECT_TRUE(channel_->SetSink(2, &renderer2));
+ EXPECT_EQ(0, renderer1.num_rendered_frames());
+ EXPECT_EQ(0, renderer2.num_rendered_frames());
+ std::vector<uint32_t> ssrcs;
+ ssrcs.push_back(1);
+ ssrcs.push_back(2);
+ network_interface_.SetConferenceMode(true, ssrcs);
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_ON_RENDERER_WAIT(renderer1, 1, kVideoWidth, kVideoHeight,
+ kTimeout);
+ EXPECT_FRAME_ON_RENDERER_WAIT(renderer2, 1, kVideoWidth, kVideoHeight,
+ kTimeout);
+
+ std::unique_ptr<const rtc::CopyOnWriteBuffer> p(GetRtpPacket(0));
+ EXPECT_EQ(DefaultCodec().id, GetPayloadType(p.get()));
+ EXPECT_EQ(kVideoWidth, renderer1.width());
+ EXPECT_EQ(kVideoHeight, renderer1.height());
+ EXPECT_EQ(kVideoWidth, renderer2.width());
+ EXPECT_EQ(kVideoHeight, renderer2.height());
+ EXPECT_TRUE(channel_->RemoveRecvStream(2));
+ EXPECT_TRUE(channel_->RemoveRecvStream(1));
+ }
+
+ // Tests that we can add and remove capturers and frames are sent out properly
+ void AddRemoveCapturer() {
+ cricket::VideoCodec codec = DefaultCodec();
+ const int time_between_send_ms =
+ cricket::VideoFormat::FpsToInterval(kFramerate);
+ EXPECT_TRUE(SetOneCodec(codec));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer(
+ CreateFakeVideoCapturer());
+
+ // TODO(nisse): This testcase fails if we don't configure
+ // screencast. It's unclear why, I see nothing obvious in this
+ // test which is related to screencast logic.
+ cricket::VideoOptions video_options;
+ video_options.is_screencast = true;
+ channel_->SetVideoSend(kSsrc, true, &video_options, nullptr);
+
+ cricket::VideoFormat format(480, 360,
+ cricket::VideoFormat::FpsToInterval(30),
+ cricket::FOURCC_I420);
+ EXPECT_EQ(cricket::CS_RUNNING, capturer->Start(format));
+ // All capturers start generating frames with the same timestamp. ViE does
+ // not allow the same timestamp to be used. Capture one frame before
+ // associating the capturer with the channel.
+ EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
+ cricket::FOURCC_I420));
+
+ int captured_frames = 1;
+ for (int iterations = 0; iterations < 2; ++iterations) {
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, capturer.get()));
+ rtc::Thread::Current()->ProcessMessages(time_between_send_ms);
+ EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
+ cricket::FOURCC_I420));
+ ++captured_frames;
+ // Wait until frame of right size is captured.
+ EXPECT_TRUE_WAIT(renderer_.num_rendered_frames() >= captured_frames &&
+ format.width == renderer_.width() &&
+ format.height == renderer_.height() &&
+ !renderer_.black_frame(), kTimeout);
+ EXPECT_GE(renderer_.num_rendered_frames(), captured_frames);
+ EXPECT_EQ(format.width, renderer_.width());
+ EXPECT_EQ(format.height, renderer_.height());
+ captured_frames = renderer_.num_rendered_frames() + 1;
+ EXPECT_FALSE(renderer_.black_frame());
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, nullptr));
+ // Make sure a black frame is generated within the specified timeout.
+ // The black frame should be the resolution of the previous frame to
+ // prevent expensive encoder reconfigurations.
+ EXPECT_TRUE_WAIT(renderer_.num_rendered_frames() >= captured_frames &&
+ format.width == renderer_.width() &&
+ format.height == renderer_.height() &&
+ renderer_.black_frame(), kTimeout);
+ EXPECT_GE(renderer_.num_rendered_frames(), captured_frames);
+ EXPECT_EQ(format.width, renderer_.width());
+ EXPECT_EQ(format.height, renderer_.height());
+ EXPECT_TRUE(renderer_.black_frame());
+
+ // The black frame has the same timestamp as the next frame since it's
+ // timestamp is set to the last frame's timestamp + interval. WebRTC will
+ // not render a frame with the same timestamp so capture another frame
+ // with the frame capturer to increment the next frame's timestamp.
+ EXPECT_TRUE(capturer->CaptureCustomFrame(format.width, format.height,
+ cricket::FOURCC_I420));
+ }
+ }
+
+ // Tests that if SetVideoSend is called with a NULL capturer after the
+ // capturer was already removed, the application doesn't crash (and no black
+ // frame is sent).
+ void RemoveCapturerWithoutAdd() {
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ EXPECT_TRUE(SetSend(true));
+ EXPECT_TRUE(channel_->SetSink(kDefaultReceiveSsrc, &renderer_));
+ EXPECT_EQ(0, renderer_.num_rendered_frames());
+ EXPECT_TRUE(SendFrame());
+ EXPECT_FRAME_WAIT(1, kVideoWidth, kVideoHeight, kTimeout);
+ // Wait for one frame so they don't get dropped because we send frames too
+ // tightly.
+ rtc::Thread::Current()->ProcessMessages(30);
+ // Remove the capturer.
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, nullptr));
+
+ // No capturer was added, so this SetVideoSend shouldn't do anything.
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, nullptr));
+ rtc::Thread::Current()->ProcessMessages(300);
+ // Verify no more frames were sent.
+ EXPECT_EQ(1, renderer_.num_rendered_frames());
+ }
+
+ // Tests that we can add and remove capturer as unique sources.
+ void AddRemoveCapturerMultipleSources() {
+ // WebRTC implementation will drop frames if pushed to quickly. Wait the
+ // interval time to avoid that.
+ // WebRTC implementation will drop frames if pushed to quickly. Wait the
+ // interval time to avoid that.
+ // Set up the stream associated with the engine.
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(kSsrc)));
+ EXPECT_TRUE(channel_->SetSink(kSsrc, &renderer_));
+ cricket::VideoFormat capture_format; // default format
+ capture_format.interval = cricket::VideoFormat::FpsToInterval(kFramerate);
+ // Set up additional stream 1.
+ cricket::FakeVideoRenderer renderer1;
+ EXPECT_FALSE(channel_->SetSink(1, &renderer1));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(1)));
+ EXPECT_TRUE(channel_->SetSink(1, &renderer1));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(1)));
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer1(
+ CreateFakeVideoCapturer());
+ EXPECT_EQ(cricket::CS_RUNNING, capturer1->Start(capture_format));
+ // Set up additional stream 2.
+ cricket::FakeVideoRenderer renderer2;
+ EXPECT_FALSE(channel_->SetSink(2, &renderer2));
+ EXPECT_TRUE(channel_->AddRecvStream(
+ cricket::StreamParams::CreateLegacy(2)));
+ EXPECT_TRUE(channel_->SetSink(2, &renderer2));
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(2)));
+ std::unique_ptr<cricket::FakeVideoCapturer> capturer2(
+ CreateFakeVideoCapturer());
+ EXPECT_EQ(cricket::CS_RUNNING, capturer2->Start(capture_format));
+ // State for all the streams.
+ EXPECT_TRUE(SetOneCodec(DefaultCodec()));
+ // A limitation in the lmi implementation requires that SetVideoSend() is
+ // called after SetOneCodec().
+ // TODO(hellner): this seems like an unnecessary constraint, fix it.
+ EXPECT_TRUE(channel_->SetVideoSend(1, true, nullptr, capturer1.get()));
+ EXPECT_TRUE(channel_->SetVideoSend(2, true, nullptr, capturer2.get()));
+ EXPECT_TRUE(SetSend(true));
+ // Test capturer associated with engine.
+ const int kTestWidth = 160;
+ const int kTestHeight = 120;
+ EXPECT_TRUE(capturer1->CaptureCustomFrame(
+ kTestWidth, kTestHeight, cricket::FOURCC_I420));
+ EXPECT_FRAME_ON_RENDERER_WAIT(
+ renderer1, 1, kTestWidth, kTestHeight, kTimeout);
+ // Capture a frame with additional capturer2, frames should be received
+ EXPECT_TRUE(capturer2->CaptureCustomFrame(
+ kTestWidth, kTestHeight, cricket::FOURCC_I420));
+ EXPECT_FRAME_ON_RENDERER_WAIT(
+ renderer2, 1, kTestWidth, kTestHeight, kTimeout);
+ // Successfully remove the capturer.
+ EXPECT_TRUE(channel_->SetVideoSend(kSsrc, true, nullptr, nullptr));
+ // The capturers must be unregistered here as it runs out of it's scope
+ // next.
+ EXPECT_TRUE(channel_->SetVideoSend(1, true, nullptr, nullptr));
+ EXPECT_TRUE(channel_->SetVideoSend(2, true, nullptr, nullptr));
+ }
+
+ // Test that multiple send streams can be created and deleted properly.
+ void MultipleSendStreams() {
+ // Remove stream added in Setup. I.e. remove stream corresponding to default
+ // channel.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrc));
+ const unsigned int kSsrcsSize = sizeof(kSsrcs4)/sizeof(kSsrcs4[0]);
+ for (unsigned int i = 0; i < kSsrcsSize; ++i) {
+ EXPECT_TRUE(channel_->AddSendStream(
+ cricket::StreamParams::CreateLegacy(kSsrcs4[i])));
+ }
+ // Delete one of the non default channel streams, let the destructor delete
+ // the remaining ones.
+ EXPECT_TRUE(channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1]));
+ // Stream should already be deleted.
+ EXPECT_FALSE(channel_->RemoveSendStream(kSsrcs4[kSsrcsSize - 1]));
+ }
+
+ // Two streams one channel tests.
+
+ // Tests that we can send and receive frames.
+ void TwoStreamsSendAndReceive(const cricket::VideoCodec& codec) {
+ SetUpSecondStream();
+ // Test sending and receiving on first stream.
+ SendAndReceive(codec);
+ // Test sending and receiving on second stream.
+ EXPECT_EQ_WAIT(1, renderer2_.num_rendered_frames(), kTimeout);
+ EXPECT_GT(NumRtpPackets(), 0);
+ EXPECT_EQ(1, renderer2_.num_rendered_frames());
+ }
+
+ webrtc::RtcEventLogNullImpl event_log_;
+ const std::unique_ptr<webrtc::Call> call_;
+ E engine_;
+ std::unique_ptr<cricket::FakeVideoCapturer> video_capturer_;
+ std::unique_ptr<cricket::FakeVideoCapturer> video_capturer_2_;
+ std::unique_ptr<C> channel_;
+ cricket::FakeNetworkInterface network_interface_;
+ cricket::FakeVideoRenderer renderer_;
+ cricket::VideoMediaChannel::Error media_error_;
+
+ // Used by test cases where 2 streams are run on the same channel.
+ cricket::FakeVideoRenderer renderer2_;
+};
+
+#endif // MEDIA_BASE_VIDEOENGINE_UNITTEST_H_ NOLINT
diff --git a/third_party/libwebrtc/webrtc/media/base/videosinkinterface.h b/third_party/libwebrtc/webrtc/media/base/videosinkinterface.h
new file mode 100644
index 0000000000..900e786f24
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosinkinterface.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOSINKINTERFACE_H_
+#define MEDIA_BASE_VIDEOSINKINTERFACE_H_
+
+#include <rtc_base/checks.h>
+// TODO(nisse): Consider moving this interface (and possibly
+// VideoSourceInterface too) from media/base to common_video, to
+// reduce dependency cycles.
+namespace rtc {
+
+template <typename VideoFrameT>
+class VideoSinkInterface {
+ public:
+ virtual ~VideoSinkInterface() {}
+
+ virtual void OnFrame(const VideoFrameT& frame) = 0;
+
+ // Should be called by the source when it discards the frame due to rate
+ // limiting.
+ virtual void OnDiscardedFrame() {}
+};
+
+} // namespace rtc
+
+#endif // MEDIA_BASE_VIDEOSINKINTERFACE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videosourcebase.cc b/third_party/libwebrtc/webrtc/media/base/videosourcebase.cc
new file mode 100644
index 0000000000..299795fca6
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosourcebase.cc
@@ -0,0 +1,58 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videosourcebase.h"
+
+#include "rtc_base/checks.h"
+
+namespace rtc {
+
+VideoSourceBase::VideoSourceBase() {
+ thread_checker_.DetachFromThread();
+}
+
+void VideoSourceBase::AddOrUpdateSink(
+ VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(sink != nullptr);
+
+ SinkPair* sink_pair = FindSinkPair(sink);
+ if (!sink_pair) {
+ sinks_.push_back(SinkPair(sink, wants));
+ } else {
+ sink_pair->wants = wants;
+ }
+}
+
+void VideoSourceBase::RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ RTC_DCHECK(sink != nullptr);
+ RTC_DCHECK(FindSinkPair(sink));
+ sinks_.erase(std::remove_if(sinks_.begin(), sinks_.end(),
+ [sink](const SinkPair& sink_pair) {
+ return sink_pair.sink == sink;
+ }),
+ sinks_.end());
+}
+
+VideoSourceBase::SinkPair* VideoSourceBase::FindSinkPair(
+ const VideoSinkInterface<webrtc::VideoFrame>* sink) {
+ RTC_DCHECK(thread_checker_.CalledOnValidThread());
+ auto sink_pair_it = std::find_if(
+ sinks_.begin(), sinks_.end(),
+ [sink](const SinkPair& sink_pair) { return sink_pair.sink == sink; });
+ if (sink_pair_it != sinks_.end()) {
+ return &*sink_pair_it;
+ }
+ return nullptr;
+}
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/webrtc/media/base/videosourcebase.h b/third_party/libwebrtc/webrtc/media/base/videosourcebase.h
new file mode 100644
index 0000000000..f19d8fb587
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosourcebase.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOSOURCEBASE_H_
+#define MEDIA_BASE_VIDEOSOURCEBASE_H_
+
+#include <vector>
+
+#include "api/video/video_frame.h"
+#include "media/base/videosourceinterface.h"
+#include "rtc_base/thread_checker.h"
+
+namespace rtc {
+
+// VideoSourceBase is not thread safe.
+class VideoSourceBase : public VideoSourceInterface<webrtc::VideoFrame> {
+ public:
+ VideoSourceBase();
+ void AddOrUpdateSink(VideoSinkInterface<webrtc::VideoFrame>* sink,
+ const VideoSinkWants& wants) override;
+ void RemoveSink(VideoSinkInterface<webrtc::VideoFrame>* sink) override;
+
+ protected:
+ struct SinkPair {
+ SinkPair(VideoSinkInterface<webrtc::VideoFrame>* sink, VideoSinkWants wants)
+ : sink(sink), wants(wants) {}
+ VideoSinkInterface<webrtc::VideoFrame>* sink;
+ VideoSinkWants wants;
+ };
+ SinkPair* FindSinkPair(const VideoSinkInterface<webrtc::VideoFrame>* sink);
+
+ const std::vector<SinkPair>& sink_pairs() const { return sinks_; }
+ ThreadChecker thread_checker_;
+
+ private:
+ std::vector<SinkPair> sinks_;
+};
+
+} // namespace rtc
+
+#endif // MEDIA_BASE_VIDEOSOURCEBASE_H_
diff --git a/third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc b/third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc
new file mode 100644
index 0000000000..42ea04ea62
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosourceinterface.cc
@@ -0,0 +1,17 @@
+/*
+ * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media/base/videosourceinterface.h"
+namespace rtc {
+
+VideoSinkWants::VideoSinkWants() = default;
+VideoSinkWants::~VideoSinkWants() = default;
+
+} // namespace rtc
diff --git a/third_party/libwebrtc/webrtc/media/base/videosourceinterface.h b/third_party/libwebrtc/webrtc/media/base/videosourceinterface.h
new file mode 100644
index 0000000000..701ddec492
--- /dev/null
+++ b/third_party/libwebrtc/webrtc/media/base/videosourceinterface.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MEDIA_BASE_VIDEOSOURCEINTERFACE_H_
+#define MEDIA_BASE_VIDEOSOURCEINTERFACE_H_
+
+#include <limits>
+
+#include "api/optional.h"
+#include "media/base/videosinkinterface.h"
+
+namespace rtc {
+
+// VideoSinkWants is used for notifying the source of properties a video frame
+// should have when it is delivered to a certain sink.
+struct VideoSinkWants {
+ VideoSinkWants();
+ ~VideoSinkWants();
+ // Tells the source whether the sink wants frames with rotation applied.
+ // By default, any rotation must be applied by the sink.
+ bool rotation_applied = false;
+
+ // Tells the source that the sink only wants black frames.
+ bool black_frames = false;
+
+ // Tells the source the maximum number of pixels the sink wants.
+ int max_pixel_count = std::numeric_limits<int>::max();
+ // Tells the source the desired number of pixels the sinks wants. This will
+ // typically be used when stepping the resolution up again when conditions
+ // have improved after an earlier downgrade. The source should select the
+ // closest resolution to this pixel count, but if max_pixel_count is set, it
+ // still sets the absolute upper bound.
+ rtc::Optional<int> target_pixel_count;
+ // Tells the source the maximum framerate the sink wants.
+ int max_framerate_fps = std::numeric_limits<int>::max();
+};
+
+template <typename VideoFrameT>
+class VideoSourceInterface {
+ public:
+ virtual void AddOrUpdateSink(VideoSinkInterface<VideoFrameT>* sink,
+ const VideoSinkWants& wants) = 0;
+ // RemoveSink must guarantee that at the time the method returns,
+ // there is no current and no future calls to VideoSinkInterface::OnFrame.
+ virtual void RemoveSink(VideoSinkInterface<VideoFrameT>* sink) = 0;
+
+ protected:
+ virtual ~VideoSourceInterface() {}
+};
+
+} // namespace rtc
+#endif // MEDIA_BASE_VIDEOSOURCEINTERFACE_H_