summaryrefslogtreecommitdiffstats
path: root/third_party/libwebrtc/modules/video_coding/codecs
diff options
context:
space:
mode:
Diffstat (limited to 'third_party/libwebrtc/modules/video_coding/codecs')
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build5
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc193
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h75
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc127
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.cc278
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.h62
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc148
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc888
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc437
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h45
-rw-r--r--third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc205
11 files changed, 327 insertions, 2136 deletions
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build b/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build
index e67bb6616d..bfe37b935d 100644
--- a/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build
+++ b/third_party/libwebrtc/modules/video_coding/codecs/av1/av1_svc_config_gn/moz.build
@@ -195,7 +195,6 @@ if CONFIG["MOZ_X11"] == "1" and CONFIG["OS_TARGET"] == "Linux":
if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "arm":
OS_LIBS += [
- "android_support",
"unwind"
]
@@ -205,10 +204,6 @@ if CONFIG["OS_TARGET"] == "Android" and CONFIG["TARGET_CPU"] == "x86":
"-msse2"
]
- OS_LIBS += [
- "android_support"
- ]
-
if CONFIG["OS_TARGET"] == "Linux" and CONFIG["TARGET_CPU"] == "aarch64":
DEFINES["_GNU_SOURCE"] = True
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc
deleted file mode 100644
index 772c15734a..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.cc
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_analyzer.h"
-
-#include <memory>
-
-#include "api/task_queue/default_task_queue_factory.h"
-#include "api/video/i420_buffer.h"
-#include "api/video/video_codec_constants.h"
-#include "api/video/video_frame.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/event.h"
-#include "rtc_base/time_utils.h"
-#include "third_party/libyuv/include/libyuv/compare.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using Psnr = VideoCodecStats::Frame::Psnr;
-
-Psnr CalcPsnr(const I420BufferInterface& ref_buffer,
- const I420BufferInterface& dec_buffer) {
- RTC_CHECK_EQ(ref_buffer.width(), dec_buffer.width());
- RTC_CHECK_EQ(ref_buffer.height(), dec_buffer.height());
-
- uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane(
- dec_buffer.DataY(), dec_buffer.StrideY(), ref_buffer.DataY(),
- ref_buffer.StrideY(), dec_buffer.width(), dec_buffer.height());
-
- uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane(
- dec_buffer.DataU(), dec_buffer.StrideU(), ref_buffer.DataU(),
- ref_buffer.StrideU(), dec_buffer.width() / 2, dec_buffer.height() / 2);
-
- uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane(
- dec_buffer.DataV(), dec_buffer.StrideV(), ref_buffer.DataV(),
- ref_buffer.StrideV(), dec_buffer.width() / 2, dec_buffer.height() / 2);
-
- int num_y_samples = dec_buffer.width() * dec_buffer.height();
- Psnr psnr;
- psnr.y = libyuv::SumSquareErrorToPsnr(sse_y, num_y_samples);
- psnr.u = libyuv::SumSquareErrorToPsnr(sse_u, num_y_samples / 4);
- psnr.v = libyuv::SumSquareErrorToPsnr(sse_v, num_y_samples / 4);
-
- return psnr;
-}
-
-} // namespace
-
-VideoCodecAnalyzer::VideoCodecAnalyzer(
- ReferenceVideoSource* reference_video_source)
- : reference_video_source_(reference_video_source), num_frames_(0) {
- sequence_checker_.Detach();
-}
-
-void VideoCodecAnalyzer::StartEncode(const VideoFrame& input_frame) {
- int64_t encode_start_us = rtc::TimeMicros();
- task_queue_.PostTask(
- [this, timestamp_rtp = input_frame.timestamp(), encode_start_us]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
-
- RTC_CHECK(frame_num_.find(timestamp_rtp) == frame_num_.end());
- frame_num_[timestamp_rtp] = num_frames_++;
-
- stats_.AddFrame({.frame_num = frame_num_[timestamp_rtp],
- .timestamp_rtp = timestamp_rtp,
- .encode_start = Timestamp::Micros(encode_start_us)});
- });
-}
-
-void VideoCodecAnalyzer::FinishEncode(const EncodedImage& frame) {
- int64_t encode_finished_us = rtc::TimeMicros();
-
- task_queue_.PostTask([this, timestamp_rtp = frame.RtpTimestamp(),
- spatial_idx = frame.SpatialIndex().value_or(0),
- temporal_idx = frame.TemporalIndex().value_or(0),
- width = frame._encodedWidth,
- height = frame._encodedHeight,
- frame_type = frame._frameType,
- frame_size_bytes = frame.size(), qp = frame.qp_,
- encode_finished_us]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
-
- if (spatial_idx > 0) {
- VideoCodecStats::Frame* base_frame =
- stats_.GetFrame(timestamp_rtp, /*spatial_idx=*/0);
-
- stats_.AddFrame({.frame_num = base_frame->frame_num,
- .timestamp_rtp = timestamp_rtp,
- .spatial_idx = spatial_idx,
- .encode_start = base_frame->encode_start});
- }
-
- VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx);
- fs->spatial_idx = spatial_idx;
- fs->temporal_idx = temporal_idx;
- fs->width = width;
- fs->height = height;
- fs->frame_size = DataSize::Bytes(frame_size_bytes);
- fs->qp = qp;
- fs->keyframe = frame_type == VideoFrameType::kVideoFrameKey;
- fs->encode_time = Timestamp::Micros(encode_finished_us) - fs->encode_start;
- fs->encoded = true;
- });
-}
-
-void VideoCodecAnalyzer::StartDecode(const EncodedImage& frame) {
- int64_t decode_start_us = rtc::TimeMicros();
- task_queue_.PostTask([this, timestamp_rtp = frame.RtpTimestamp(),
- spatial_idx = frame.SpatialIndex().value_or(0),
- frame_size_bytes = frame.size(), decode_start_us]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
-
- VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx);
- if (fs == nullptr) {
- if (frame_num_.find(timestamp_rtp) == frame_num_.end()) {
- frame_num_[timestamp_rtp] = num_frames_++;
- }
- stats_.AddFrame({.frame_num = frame_num_[timestamp_rtp],
- .timestamp_rtp = timestamp_rtp,
- .spatial_idx = spatial_idx,
- .frame_size = DataSize::Bytes(frame_size_bytes)});
- fs = stats_.GetFrame(timestamp_rtp, spatial_idx);
- }
-
- fs->decode_start = Timestamp::Micros(decode_start_us);
- });
-}
-
-void VideoCodecAnalyzer::FinishDecode(const VideoFrame& frame,
- int spatial_idx) {
- int64_t decode_finished_us = rtc::TimeMicros();
- task_queue_.PostTask([this, timestamp_rtp = frame.timestamp(), spatial_idx,
- width = frame.width(), height = frame.height(),
- decode_finished_us]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- VideoCodecStats::Frame* fs = stats_.GetFrame(timestamp_rtp, spatial_idx);
- fs->decode_time = Timestamp::Micros(decode_finished_us) - fs->decode_start;
-
- if (!fs->encoded) {
- fs->width = width;
- fs->height = height;
- }
-
- fs->decoded = true;
- });
-
- if (reference_video_source_ != nullptr) {
- // Copy hardware-backed frame into main memory to release output buffers
- // which number may be limited in hardware decoders.
- rtc::scoped_refptr<I420BufferInterface> decoded_buffer =
- frame.video_frame_buffer()->ToI420();
-
- task_queue_.PostTask([this, decoded_buffer,
- timestamp_rtp = frame.timestamp(), spatial_idx]() {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- VideoFrame ref_frame = reference_video_source_->GetFrame(
- timestamp_rtp, {.width = decoded_buffer->width(),
- .height = decoded_buffer->height()});
- rtc::scoped_refptr<I420BufferInterface> ref_buffer =
- ref_frame.video_frame_buffer()->ToI420();
-
- Psnr psnr = CalcPsnr(*decoded_buffer, *ref_buffer);
-
- VideoCodecStats::Frame* fs =
- this->stats_.GetFrame(timestamp_rtp, spatial_idx);
- fs->psnr = psnr;
- });
- }
-}
-
-std::unique_ptr<VideoCodecStats> VideoCodecAnalyzer::GetStats() {
- std::unique_ptr<VideoCodecStats> stats;
- rtc::Event ready;
- task_queue_.PostTask([this, &stats, &ready]() mutable {
- RTC_DCHECK_RUN_ON(&sequence_checker_);
- stats.reset(new VideoCodecStatsImpl(stats_));
- ready.Set();
- });
- ready.Wait(rtc::Event::kForever);
- return stats;
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h
deleted file mode 100644
index 29ca8ee2ff..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_
-#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_
-
-#include <map>
-#include <memory>
-
-#include "absl/types/optional.h"
-#include "api/sequence_checker.h"
-#include "api/test/video_codec_tester.h"
-#include "api/video/encoded_image.h"
-#include "api/video/resolution.h"
-#include "api/video/video_frame.h"
-#include "modules/video_coding/codecs/test/video_codec_stats_impl.h"
-#include "rtc_base/system/no_unique_address.h"
-#include "rtc_base/task_queue_for_test.h"
-
-namespace webrtc {
-namespace test {
-
-// Analyzer measures and collects metrics necessary for evaluation of video
-// codec quality and performance. This class is thread-safe.
-class VideoCodecAnalyzer {
- public:
- // An interface that provides reference frames for spatial quality analysis.
- class ReferenceVideoSource {
- public:
- virtual ~ReferenceVideoSource() = default;
-
- virtual VideoFrame GetFrame(uint32_t timestamp_rtp,
- Resolution resolution) = 0;
- };
-
- explicit VideoCodecAnalyzer(
- ReferenceVideoSource* reference_video_source = nullptr);
-
- void StartEncode(const VideoFrame& frame);
-
- void FinishEncode(const EncodedImage& frame);
-
- void StartDecode(const EncodedImage& frame);
-
- void FinishDecode(const VideoFrame& frame, int spatial_idx);
-
- std::unique_ptr<VideoCodecStats> GetStats();
-
- protected:
- TaskQueueForTest task_queue_;
-
- ReferenceVideoSource* const reference_video_source_;
-
- VideoCodecStatsImpl stats_ RTC_GUARDED_BY(sequence_checker_);
-
- // Map from RTP timestamp to frame number.
- std::map<uint32_t, int> frame_num_ RTC_GUARDED_BY(sequence_checker_);
-
- // Processed frames counter.
- int num_frames_ RTC_GUARDED_BY(sequence_checker_);
-
- RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
-};
-
-} // namespace test
-} // namespace webrtc
-
-#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_ANALYZER_H_
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc
deleted file mode 100644
index 03146417da..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_analyzer_unittest.cc
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_analyzer.h"
-
-#include "absl/types/optional.h"
-#include "api/video/i420_buffer.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-#include "third_party/libyuv/include/libyuv/planar_functions.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using ::testing::Return;
-using ::testing::Values;
-using Psnr = VideoCodecStats::Frame::Psnr;
-
-const uint32_t kTimestamp = 3000;
-const int kSpatialIdx = 2;
-
-class MockReferenceVideoSource
- : public VideoCodecAnalyzer::ReferenceVideoSource {
- public:
- MOCK_METHOD(VideoFrame, GetFrame, (uint32_t, Resolution), (override));
-};
-
-VideoFrame CreateVideoFrame(uint32_t timestamp_rtp,
- uint8_t y = 0,
- uint8_t u = 0,
- uint8_t v = 0) {
- rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(2, 2));
-
- libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(),
- buffer->MutableDataU(), buffer->StrideU(),
- buffer->MutableDataV(), buffer->StrideV(), 0, 0,
- buffer->width(), buffer->height(), y, u, v);
-
- return VideoFrame::Builder()
- .set_video_frame_buffer(buffer)
- .set_timestamp_rtp(timestamp_rtp)
- .build();
-}
-
-EncodedImage CreateEncodedImage(uint32_t timestamp_rtp, int spatial_idx = 0) {
- EncodedImage encoded_image;
- encoded_image.SetRtpTimestamp(timestamp_rtp);
- encoded_image.SetSpatialIndex(spatial_idx);
- return encoded_image;
-}
-} // namespace
-
-TEST(VideoCodecAnalyzerTest, StartEncode) {
- VideoCodecAnalyzer analyzer;
- analyzer.StartEncode(CreateVideoFrame(kTimestamp));
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(1u, fs.size());
- EXPECT_EQ(fs[0].timestamp_rtp, kTimestamp);
-}
-
-TEST(VideoCodecAnalyzerTest, FinishEncode) {
- VideoCodecAnalyzer analyzer;
- analyzer.StartEncode(CreateVideoFrame(kTimestamp));
-
- EncodedImage encoded_frame = CreateEncodedImage(kTimestamp, kSpatialIdx);
- analyzer.FinishEncode(encoded_frame);
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(2u, fs.size());
- EXPECT_EQ(kSpatialIdx, fs[1].spatial_idx);
-}
-
-TEST(VideoCodecAnalyzerTest, StartDecode) {
- VideoCodecAnalyzer analyzer;
- analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(1u, fs.size());
- EXPECT_EQ(kTimestamp, fs[0].timestamp_rtp);
-}
-
-TEST(VideoCodecAnalyzerTest, FinishDecode) {
- VideoCodecAnalyzer analyzer;
- analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
- VideoFrame decoded_frame = CreateVideoFrame(kTimestamp);
- analyzer.FinishDecode(decoded_frame, kSpatialIdx);
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(1u, fs.size());
- EXPECT_EQ(decoded_frame.width(), fs[0].width);
- EXPECT_EQ(decoded_frame.height(), fs[0].height);
-}
-
-TEST(VideoCodecAnalyzerTest, ReferenceVideoSource) {
- MockReferenceVideoSource reference_video_source;
- VideoCodecAnalyzer analyzer(&reference_video_source);
- analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
-
- EXPECT_CALL(reference_video_source, GetFrame)
- .WillOnce(Return(CreateVideoFrame(kTimestamp, /*y=*/0,
- /*u=*/0, /*v=*/0)));
-
- analyzer.FinishDecode(
- CreateVideoFrame(kTimestamp, /*value_y=*/1, /*value_u=*/2, /*value_v=*/3),
- kSpatialIdx);
-
- auto fs = analyzer.GetStats()->Slice();
- EXPECT_EQ(1u, fs.size());
- EXPECT_TRUE(fs[0].psnr.has_value());
-
- const Psnr& psnr = *fs[0].psnr;
- EXPECT_NEAR(psnr.y, 48, 1);
- EXPECT_NEAR(psnr.u, 42, 1);
- EXPECT_NEAR(psnr.v, 38, 1);
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.cc
deleted file mode 100644
index 9808e2a601..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.cc
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
- * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_stats_impl.h"
-
-#include <algorithm>
-
-#include "api/numerics/samples_stats_counter.h"
-#include "api/test/metrics/metrics_logger.h"
-#include "rtc_base/checks.h"
-#include "rtc_base/time_utils.h"
-
-namespace webrtc {
-namespace test {
-namespace {
-using Frame = VideoCodecStats::Frame;
-using Stream = VideoCodecStats::Stream;
-
-constexpr Frequency k90kHz = Frequency::Hertz(90000);
-
-class LeakyBucket {
- public:
- LeakyBucket() : level_bits_(0) {}
-
- // Updates bucket level and returns its current level in bits. Data is remove
- // from bucket with rate equal to target bitrate of previous frame. Bucket
- // level is tracked with floating point precision. Returned value of bucket
- // level is rounded up.
- int Update(const Frame& frame) {
- RTC_CHECK(frame.target_bitrate) << "Bitrate must be specified.";
-
- if (prev_frame_) {
- RTC_CHECK_GT(frame.timestamp_rtp, prev_frame_->timestamp_rtp)
- << "Timestamp must increase.";
- TimeDelta passed =
- (frame.timestamp_rtp - prev_frame_->timestamp_rtp) / k90kHz;
- level_bits_ -=
- prev_frame_->target_bitrate->bps() * passed.us() / 1000000.0;
- level_bits_ = std::max(level_bits_, 0.0);
- }
-
- prev_frame_ = frame;
-
- level_bits_ += frame.frame_size.bytes() * 8;
- return static_cast<int>(std::ceil(level_bits_));
- }
-
- private:
- absl::optional<Frame> prev_frame_;
- double level_bits_;
-};
-
-// Merges spatial layer frames into superframes.
-std::vector<Frame> Merge(const std::vector<Frame>& frames) {
- std::vector<Frame> superframes;
- // Map from frame timestamp to index in `superframes` vector.
- std::map<uint32_t, int> index;
-
- for (const auto& f : frames) {
- if (index.find(f.timestamp_rtp) == index.end()) {
- index[f.timestamp_rtp] = static_cast<int>(superframes.size());
- superframes.push_back(f);
- continue;
- }
-
- Frame& sf = superframes[index[f.timestamp_rtp]];
-
- sf.width = std::max(sf.width, f.width);
- sf.height = std::max(sf.height, f.height);
- sf.frame_size += f.frame_size;
- sf.keyframe |= f.keyframe;
-
- sf.encode_time = std::max(sf.encode_time, f.encode_time);
- sf.decode_time = std::max(sf.decode_time, f.decode_time);
-
- if (f.spatial_idx > sf.spatial_idx) {
- if (f.qp) {
- sf.qp = f.qp;
- }
- if (f.psnr) {
- sf.psnr = f.psnr;
- }
- }
-
- sf.spatial_idx = std::max(sf.spatial_idx, f.spatial_idx);
- sf.temporal_idx = std::max(sf.temporal_idx, f.temporal_idx);
-
- sf.encoded |= f.encoded;
- sf.decoded |= f.decoded;
- }
-
- return superframes;
-}
-
-Timestamp RtpToTime(uint32_t timestamp_rtp) {
- return Timestamp::Micros((timestamp_rtp / k90kHz).us());
-}
-
-SamplesStatsCounter::StatsSample StatsSample(double value, Timestamp time) {
- return SamplesStatsCounter::StatsSample{value, time};
-}
-
-TimeDelta CalcTotalDuration(const std::vector<Frame>& frames) {
- RTC_CHECK(!frames.empty());
- TimeDelta duration = TimeDelta::Zero();
- if (frames.size() > 1) {
- duration +=
- (frames.rbegin()->timestamp_rtp - frames.begin()->timestamp_rtp) /
- k90kHz;
- }
-
- // Add last frame duration. If target frame rate is provided, calculate frame
- // duration from it. Otherwise, assume duration of last frame is the same as
- // duration of preceding frame.
- if (frames.rbegin()->target_framerate) {
- duration += 1 / *frames.rbegin()->target_framerate;
- } else {
- RTC_CHECK_GT(frames.size(), 1u);
- duration += (frames.rbegin()->timestamp_rtp -
- std::next(frames.rbegin())->timestamp_rtp) /
- k90kHz;
- }
-
- return duration;
-}
-} // namespace
-
-std::vector<Frame> VideoCodecStatsImpl::Slice(
- absl::optional<Filter> filter) const {
- std::vector<Frame> frames;
- for (const auto& [frame_id, f] : frames_) {
- if (filter.has_value()) {
- if (filter->first_frame.has_value() &&
- f.frame_num < *filter->first_frame) {
- continue;
- }
- if (filter->last_frame.has_value() && f.frame_num > *filter->last_frame) {
- continue;
- }
- if (filter->spatial_idx.has_value() &&
- f.spatial_idx != *filter->spatial_idx) {
- continue;
- }
- if (filter->temporal_idx.has_value() &&
- f.temporal_idx > *filter->temporal_idx) {
- continue;
- }
- }
- frames.push_back(f);
- }
- return frames;
-}
-
-Stream VideoCodecStatsImpl::Aggregate(const std::vector<Frame>& frames) const {
- std::vector<Frame> superframes = Merge(frames);
- RTC_CHECK(!superframes.empty());
-
- LeakyBucket leacky_bucket;
- Stream stream;
- for (size_t i = 0; i < superframes.size(); ++i) {
- Frame& f = superframes[i];
- Timestamp time = RtpToTime(f.timestamp_rtp);
-
- if (!f.frame_size.IsZero()) {
- stream.width.AddSample(StatsSample(f.width, time));
- stream.height.AddSample(StatsSample(f.height, time));
- stream.frame_size_bytes.AddSample(
- StatsSample(f.frame_size.bytes(), time));
- stream.keyframe.AddSample(StatsSample(f.keyframe, time));
- if (f.qp) {
- stream.qp.AddSample(StatsSample(*f.qp, time));
- }
- }
-
- if (f.encoded) {
- stream.encode_time_ms.AddSample(StatsSample(f.encode_time.ms(), time));
- }
-
- if (f.decoded) {
- stream.decode_time_ms.AddSample(StatsSample(f.decode_time.ms(), time));
- }
-
- if (f.psnr) {
- stream.psnr.y.AddSample(StatsSample(f.psnr->y, time));
- stream.psnr.u.AddSample(StatsSample(f.psnr->u, time));
- stream.psnr.v.AddSample(StatsSample(f.psnr->v, time));
- }
-
- if (f.target_framerate) {
- stream.target_framerate_fps.AddSample(
- StatsSample(f.target_framerate->millihertz() / 1000.0, time));
- }
-
- if (f.target_bitrate) {
- stream.target_bitrate_kbps.AddSample(
- StatsSample(f.target_bitrate->bps() / 1000.0, time));
-
- int buffer_level_bits = leacky_bucket.Update(f);
- stream.transmission_time_ms.AddSample(
- StatsSample(buffer_level_bits * rtc::kNumMillisecsPerSec /
- f.target_bitrate->bps(),
- RtpToTime(f.timestamp_rtp)));
- }
- }
-
- TimeDelta duration = CalcTotalDuration(superframes);
- DataRate encoded_bitrate =
- DataSize::Bytes(stream.frame_size_bytes.GetSum()) / duration;
-
- int num_encoded_frames = stream.frame_size_bytes.NumSamples();
- Frequency encoded_framerate = num_encoded_frames / duration;
-
- absl::optional<double> bitrate_mismatch_pct;
- if (auto target_bitrate = superframes.begin()->target_bitrate;
- target_bitrate) {
- bitrate_mismatch_pct = 100.0 *
- (encoded_bitrate.bps() - target_bitrate->bps()) /
- target_bitrate->bps();
- }
-
- absl::optional<double> framerate_mismatch_pct;
- if (auto target_framerate = superframes.begin()->target_framerate;
- target_framerate) {
- framerate_mismatch_pct =
- 100.0 *
- (encoded_framerate.millihertz() - target_framerate->millihertz()) /
- target_framerate->millihertz();
- }
-
- for (auto& f : superframes) {
- Timestamp time = RtpToTime(f.timestamp_rtp);
- stream.encoded_bitrate_kbps.AddSample(
- StatsSample(encoded_bitrate.bps() / 1000.0, time));
-
- stream.encoded_framerate_fps.AddSample(
- StatsSample(encoded_framerate.millihertz() / 1000.0, time));
-
- if (bitrate_mismatch_pct) {
- stream.bitrate_mismatch_pct.AddSample(
- StatsSample(*bitrate_mismatch_pct, time));
- }
-
- if (framerate_mismatch_pct) {
- stream.framerate_mismatch_pct.AddSample(
- StatsSample(*framerate_mismatch_pct, time));
- }
- }
-
- return stream;
-}
-
-void VideoCodecStatsImpl::AddFrame(const Frame& frame) {
- FrameId frame_id{.timestamp_rtp = frame.timestamp_rtp,
- .spatial_idx = frame.spatial_idx};
- RTC_CHECK(frames_.find(frame_id) == frames_.end())
- << "Frame with timestamp_rtp=" << frame.timestamp_rtp
- << " and spatial_idx=" << frame.spatial_idx << " already exists";
-
- frames_[frame_id] = frame;
-}
-
-Frame* VideoCodecStatsImpl::GetFrame(uint32_t timestamp_rtp, int spatial_idx) {
- FrameId frame_id{.timestamp_rtp = timestamp_rtp, .spatial_idx = spatial_idx};
- if (frames_.find(frame_id) == frames_.end()) {
- return nullptr;
- }
- return &frames_.find(frame_id)->second;
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.h b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.h
deleted file mode 100644
index 77471d2ecd..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_
-#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_
-
-#include <map>
-#include <string>
-#include <vector>
-
-#include "absl/types/optional.h"
-#include "api/test/video_codec_stats.h"
-
-namespace webrtc {
-namespace test {
-
-// Implementation of `VideoCodecStats`. This class is not thread-safe.
-class VideoCodecStatsImpl : public VideoCodecStats {
- public:
- std::vector<Frame> Slice(
- absl::optional<Filter> filter = absl::nullopt) const override;
-
- Stream Aggregate(const std::vector<Frame>& frames) const override;
-
- void AddFrame(const Frame& frame);
-
- // Returns raw pointers to previously added frame. If frame does not exist,
- // returns `nullptr`.
- Frame* GetFrame(uint32_t timestamp_rtp, int spatial_idx);
-
- private:
- struct FrameId {
- uint32_t timestamp_rtp;
- int spatial_idx;
-
- bool operator==(const FrameId& o) const {
- return timestamp_rtp == o.timestamp_rtp && spatial_idx == o.spatial_idx;
- }
-
- bool operator<(const FrameId& o) const {
- if (timestamp_rtp < o.timestamp_rtp)
- return true;
- if (timestamp_rtp == o.timestamp_rtp && spatial_idx < o.spatial_idx)
- return true;
- return false;
- }
- };
-
- std::map<FrameId, Frame> frames_;
-};
-
-} // namespace test
-} // namespace webrtc
-
-#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_STATS_IMPL_H_
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc
deleted file mode 100644
index ce11d5abe6..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_stats_impl_unittest.cc
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Copyright (c) 2023 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_stats_impl.h"
-
-#include <tuple>
-
-#include "absl/types/optional.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using ::testing::Return;
-using ::testing::Values;
-using Filter = VideoCodecStats::Filter;
-using Frame = VideoCodecStatsImpl::Frame;
-using Stream = VideoCodecStats::Stream;
-} // namespace
-
-TEST(VideoCodecStatsImpl, AddAndGetFrame) {
- VideoCodecStatsImpl stats;
- stats.AddFrame({.timestamp_rtp = 0, .spatial_idx = 0});
- stats.AddFrame({.timestamp_rtp = 0, .spatial_idx = 1});
- stats.AddFrame({.timestamp_rtp = 1, .spatial_idx = 0});
-
- Frame* fs = stats.GetFrame(/*timestamp_rtp=*/0, /*spatial_idx=*/0);
- ASSERT_NE(fs, nullptr);
- EXPECT_EQ(fs->timestamp_rtp, 0u);
- EXPECT_EQ(fs->spatial_idx, 0);
-
- fs = stats.GetFrame(/*timestamp_rtp=*/0, /*spatial_idx=*/1);
- ASSERT_NE(fs, nullptr);
- EXPECT_EQ(fs->timestamp_rtp, 0u);
- EXPECT_EQ(fs->spatial_idx, 1);
-
- fs = stats.GetFrame(/*timestamp_rtp=*/1, /*spatial_idx=*/0);
- ASSERT_NE(fs, nullptr);
- EXPECT_EQ(fs->timestamp_rtp, 1u);
- EXPECT_EQ(fs->spatial_idx, 0);
-
- fs = stats.GetFrame(/*timestamp_rtp=*/1, /*spatial_idx=*/1);
- EXPECT_EQ(fs, nullptr);
-}
-
-class VideoCodecStatsImplSlicingTest
- : public ::testing::TestWithParam<std::tuple<Filter, std::vector<int>>> {};
-
-TEST_P(VideoCodecStatsImplSlicingTest, Slice) {
- Filter filter = std::get<0>(GetParam());
- std::vector<int> expected_frames = std::get<1>(GetParam());
- std::vector<VideoCodecStats::Frame> frames = {
- {.frame_num = 0, .timestamp_rtp = 0, .spatial_idx = 0, .temporal_idx = 0},
- {.frame_num = 0, .timestamp_rtp = 0, .spatial_idx = 1, .temporal_idx = 0},
- {.frame_num = 1, .timestamp_rtp = 1, .spatial_idx = 0, .temporal_idx = 1},
- {.frame_num = 1,
- .timestamp_rtp = 1,
- .spatial_idx = 1,
- .temporal_idx = 1}};
-
- VideoCodecStatsImpl stats;
- stats.AddFrame(frames[0]);
- stats.AddFrame(frames[1]);
- stats.AddFrame(frames[2]);
- stats.AddFrame(frames[3]);
-
- std::vector<VideoCodecStats::Frame> slice = stats.Slice(filter);
- ASSERT_EQ(slice.size(), expected_frames.size());
- for (size_t i = 0; i < expected_frames.size(); ++i) {
- Frame& expected = frames[expected_frames[i]];
- EXPECT_EQ(slice[i].frame_num, expected.frame_num);
- EXPECT_EQ(slice[i].timestamp_rtp, expected.timestamp_rtp);
- EXPECT_EQ(slice[i].spatial_idx, expected.spatial_idx);
- EXPECT_EQ(slice[i].temporal_idx, expected.temporal_idx);
- }
-}
-
-INSTANTIATE_TEST_SUITE_P(
- All,
- VideoCodecStatsImplSlicingTest,
- ::testing::Values(
- std::make_tuple(Filter{}, std::vector<int>{0, 1, 2, 3}),
- std::make_tuple(Filter{.first_frame = 1}, std::vector<int>{2, 3}),
- std::make_tuple(Filter{.last_frame = 0}, std::vector<int>{0, 1}),
- std::make_tuple(Filter{.spatial_idx = 0}, std::vector<int>{0, 2}),
- std::make_tuple(Filter{.temporal_idx = 1},
- std::vector<int>{0, 1, 2, 3})));
-
-TEST(VideoCodecStatsImpl, AggregateBitrate) {
- std::vector<VideoCodecStats::Frame> frames = {
- {.frame_num = 0,
- .timestamp_rtp = 0,
- .frame_size = DataSize::Bytes(1000),
- .target_bitrate = DataRate::BytesPerSec(1000)},
- {.frame_num = 1,
- .timestamp_rtp = 90000,
- .frame_size = DataSize::Bytes(2000),
- .target_bitrate = DataRate::BytesPerSec(1000)}};
-
- Stream stream = VideoCodecStatsImpl().Aggregate(frames);
- EXPECT_EQ(stream.encoded_bitrate_kbps.GetAverage(), 12.0);
- EXPECT_EQ(stream.bitrate_mismatch_pct.GetAverage(), 50.0);
-}
-
-TEST(VideoCodecStatsImpl, AggregateFramerate) {
- std::vector<VideoCodecStats::Frame> frames = {
- {.frame_num = 0,
- .timestamp_rtp = 0,
- .frame_size = DataSize::Bytes(1),
- .target_framerate = Frequency::Hertz(1)},
- {.frame_num = 1,
- .timestamp_rtp = 90000,
- .frame_size = DataSize::Zero(),
- .target_framerate = Frequency::Hertz(1)}};
-
- Stream stream = VideoCodecStatsImpl().Aggregate(frames);
- EXPECT_EQ(stream.encoded_framerate_fps.GetAverage(), 0.5);
- EXPECT_EQ(stream.framerate_mismatch_pct.GetAverage(), -50.0);
-}
-
-TEST(VideoCodecStatsImpl, AggregateTransmissionTime) {
- std::vector<VideoCodecStats::Frame> frames = {
- {.frame_num = 0,
- .timestamp_rtp = 0,
- .frame_size = DataSize::Bytes(2),
- .target_bitrate = DataRate::BytesPerSec(1)},
- {.frame_num = 1,
- .timestamp_rtp = 90000,
- .frame_size = DataSize::Bytes(3),
- .target_bitrate = DataRate::BytesPerSec(1)}};
-
- Stream stream = VideoCodecStatsImpl().Aggregate(frames);
- ASSERT_EQ(stream.transmission_time_ms.NumSamples(), 2);
- ASSERT_EQ(stream.transmission_time_ms.GetSamples()[0], 2000);
- ASSERT_EQ(stream.transmission_time_ms.GetSamples()[1], 4000);
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc
index 1c8fe97e84..60c2fcbb6e 100644
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc
+++ b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_test.cc
@@ -8,41 +8,62 @@
* be found in the AUTHORS file in the root of the source tree.
*/
-#include "api/video_codecs/video_codec.h"
-
-#include <cstddef>
#include <memory>
#include <string>
#include <vector>
#include "absl/flags/flag.h"
#include "absl/functional/any_invocable.h"
-#include "api/test/create_video_codec_tester.h"
#include "api/test/metrics/global_metrics_logger_and_exporter.h"
-#include "api/test/video_codec_tester.h"
-#include "api/test/videocodec_test_stats.h"
#include "api/units/data_rate.h"
#include "api/units/frequency.h"
-#include "api/video/encoded_image.h"
-#include "api/video/i420_buffer.h"
#include "api/video/resolution.h"
-#include "api/video/video_frame.h"
-#include "api/video_codecs/scalability_mode.h"
-#include "api/video_codecs/video_decoder.h"
-#include "api/video_codecs/video_encoder.h"
-#include "media/engine/internal_decoder_factory.h"
-#include "media/engine/internal_encoder_factory.h"
-#include "modules/rtp_rtcp/include/rtp_rtcp_defines.h"
-#include "modules/video_coding/include/video_error_codes.h"
-#include "modules/video_coding/svc/scalability_mode_util.h"
+#include "api/video_codecs/builtin_video_decoder_factory.h"
+#include "api/video_codecs/builtin_video_encoder_factory.h"
#if defined(WEBRTC_ANDROID)
#include "modules/video_coding/codecs/test/android_codec_factory_helper.h"
#endif
+#include "modules/video_coding/svc/scalability_mode_util.h"
#include "rtc_base/logging.h"
+#include "rtc_base/strings/string_builder.h"
#include "test/gtest.h"
#include "test/test_flags.h"
#include "test/testsupport/file_utils.h"
-#include "test/testsupport/frame_reader.h"
+#include "test/video_codec_tester.h"
+
+ABSL_FLAG(std::string,
+ video_name,
+ "FourPeople_1280x720_30",
+ "Name of input video sequence.");
+ABSL_FLAG(std::string,
+ encoder,
+ "libaom-av1",
+ "Encoder: libaom-av1, libvpx-vp9, libvpx-vp8, openh264, hw-vp8, "
+ "hw-vp9, hw-av1, hw-h264, hw-h265");
+ABSL_FLAG(std::string,
+ decoder,
+ "dav1d",
+ "Decoder: dav1d, libvpx-vp9, libvpx-vp8, ffmpeg-h264, hw-vp8, "
+ "hw-vp9, hw-av1, hw-h264, hw-h265");
+ABSL_FLAG(std::string, scalability_mode, "L1T1", "Scalability mode.");
+ABSL_FLAG(int, width, 1280, "Width.");
+ABSL_FLAG(int, height, 720, "Height.");
+ABSL_FLAG(std::vector<std::string>,
+ bitrate_kbps,
+ {"1024"},
+ "Encode target bitrate per layer (l0t0,l0t1,...l1t0,l1t1 and so on) "
+ "in kbps.");
+ABSL_FLAG(double,
+ framerate_fps,
+ 30.0,
+ "Encode target frame rate of the top temporal layer in fps.");
+ABSL_FLAG(int, num_frames, 300, "Number of frames to encode and/or decode.");
+ABSL_FLAG(std::string, test_name, "", "Test name.");
+ABSL_FLAG(bool, dump_decoder_input, false, "Dump decoder input.");
+ABSL_FLAG(bool, dump_decoder_output, false, "Dump decoder output.");
+ABSL_FLAG(bool, dump_encoder_input, false, "Dump encoder input.");
+ABSL_FLAG(bool, dump_encoder_output, false, "Dump encoder output.");
+ABSL_FLAG(bool, write_csv, false, "Write metrics to a CSV file.");
namespace webrtc {
namespace test {
@@ -50,6 +71,10 @@ namespace test {
namespace {
using ::testing::Combine;
using ::testing::Values;
+using VideoSourceSettings = VideoCodecTester::VideoSourceSettings;
+using EncodingSettings = VideoCodecTester::EncodingSettings;
+using VideoCodecStats = VideoCodecTester::VideoCodecStats;
+using Filter = VideoCodecStats::Filter;
using PacingMode = VideoCodecTester::PacingSettings::PacingMode;
struct VideoInfo {
@@ -58,405 +83,93 @@ struct VideoInfo {
Frequency framerate;
};
-struct LayerId {
- int spatial_idx;
- int temporal_idx;
-
- bool operator==(const LayerId& o) const {
- return spatial_idx == o.spatial_idx && temporal_idx == o.temporal_idx;
- }
-
- bool operator<(const LayerId& o) const {
- if (spatial_idx < o.spatial_idx)
- return true;
- if (spatial_idx == o.spatial_idx && temporal_idx < o.temporal_idx)
- return true;
- return false;
- }
-};
-
-struct EncodingSettings {
- ScalabilityMode scalability_mode;
- struct LayerSettings {
- Resolution resolution;
- Frequency framerate;
- DataRate bitrate;
- };
- std::map<LayerId, LayerSettings> layer_settings;
-
- bool IsSameSettings(const EncodingSettings& other) const {
- if (scalability_mode != other.scalability_mode) {
- return false;
- }
-
- for (auto [layer_id, layer] : layer_settings) {
- const auto& other_layer = other.layer_settings.at(layer_id);
- if (layer.resolution != other_layer.resolution) {
- return false;
- }
- }
-
- return true;
- }
-
- bool IsSameRate(const EncodingSettings& other) const {
- for (auto [layer_id, layer] : layer_settings) {
- const auto& other_layer = other.layer_settings.at(layer_id);
- if (layer.bitrate != other_layer.bitrate ||
- layer.framerate != other_layer.framerate) {
- return false;
- }
- }
-
- return true;
- }
-};
-
-const VideoInfo kFourPeople_1280x720_30 = {
- .name = "FourPeople_1280x720_30",
- .resolution = {.width = 1280, .height = 720},
- .framerate = Frequency::Hertz(30)};
-
-class TestRawVideoSource : public VideoCodecTester::RawVideoSource {
- public:
- static constexpr Frequency k90kHz = Frequency::Hertz(90000);
-
- TestRawVideoSource(VideoInfo video_info,
- const std::map<int, EncodingSettings>& frame_settings,
- int num_frames)
- : video_info_(video_info),
- frame_settings_(frame_settings),
- num_frames_(num_frames),
- frame_num_(0),
- // Start with non-zero timestamp to force using frame RTP timestamps in
- // IvfFrameWriter.
- timestamp_rtp_(90000) {
- // Ensure settings for the first frame are provided.
- RTC_CHECK_GT(frame_settings_.size(), 0u);
- RTC_CHECK_EQ(frame_settings_.begin()->first, 0);
-
- frame_reader_ = CreateYuvFrameReader(
- ResourcePath(video_info_.name, "yuv"), video_info_.resolution,
- YuvFrameReaderImpl::RepeatMode::kPingPong);
- RTC_CHECK(frame_reader_);
- }
-
- // Pulls next frame. Frame RTP timestamp is set accordingly to
- // `EncodingSettings::framerate`.
- absl::optional<VideoFrame> PullFrame() override {
- if (frame_num_ >= num_frames_) {
- return absl::nullopt; // End of stream.
- }
-
- const EncodingSettings& encoding_settings =
- std::prev(frame_settings_.upper_bound(frame_num_))->second;
-
- Resolution resolution =
- encoding_settings.layer_settings.begin()->second.resolution;
- Frequency framerate =
- encoding_settings.layer_settings.begin()->second.framerate;
-
- int pulled_frame;
- auto buffer = frame_reader_->PullFrame(
- &pulled_frame, resolution,
- {.num = static_cast<int>(framerate.millihertz()),
- .den = static_cast<int>(video_info_.framerate.millihertz())});
- RTC_CHECK(buffer) << "Cannot pull frame " << frame_num_;
-
- auto frame = VideoFrame::Builder()
- .set_video_frame_buffer(buffer)
- .set_timestamp_rtp(timestamp_rtp_)
- .set_timestamp_us((timestamp_rtp_ / k90kHz).us())
- .build();
-
- pulled_frames_[timestamp_rtp_] = pulled_frame;
- timestamp_rtp_ += k90kHz / framerate;
- ++frame_num_;
-
- return frame;
- }
-
- // Reads frame specified by `timestamp_rtp`, scales it to `resolution` and
- // returns. Frame with the given `timestamp_rtp` is expected to be pulled
- // before.
- VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override {
- RTC_CHECK(pulled_frames_.find(timestamp_rtp) != pulled_frames_.end())
- << "Frame with RTP timestamp " << timestamp_rtp
- << " was not pulled before";
- auto buffer =
- frame_reader_->ReadFrame(pulled_frames_[timestamp_rtp], resolution);
- return VideoFrame::Builder()
- .set_video_frame_buffer(buffer)
- .set_timestamp_rtp(timestamp_rtp)
- .build();
- }
-
- protected:
- VideoInfo video_info_;
- std::unique_ptr<FrameReader> frame_reader_;
- const std::map<int, EncodingSettings>& frame_settings_;
- int num_frames_;
- int frame_num_;
- uint32_t timestamp_rtp_;
- std::map<uint32_t, int> pulled_frames_;
-};
-
-class TestEncoder : public VideoCodecTester::Encoder,
- public EncodedImageCallback {
- public:
- TestEncoder(std::unique_ptr<VideoEncoder> encoder,
- const std::string codec_type,
- const std::map<int, EncodingSettings>& frame_settings)
- : encoder_(std::move(encoder)),
- codec_type_(codec_type),
- frame_settings_(frame_settings),
- frame_num_(0) {
- // Ensure settings for the first frame is provided.
- RTC_CHECK_GT(frame_settings_.size(), 0u);
- RTC_CHECK_EQ(frame_settings_.begin()->first, 0);
-
- encoder_->RegisterEncodeCompleteCallback(this);
- }
-
- void Initialize() override {
- const EncodingSettings& first_frame_settings = frame_settings_.at(0);
- Configure(first_frame_settings);
- SetRates(first_frame_settings);
- }
-
- void Encode(const VideoFrame& frame, EncodeCallback callback) override {
- {
- MutexLock lock(&mutex_);
- callbacks_[frame.timestamp()] = std::move(callback);
- }
-
- if (auto fs = frame_settings_.find(frame_num_);
- fs != frame_settings_.begin() && fs != frame_settings_.end()) {
- if (!fs->second.IsSameSettings(std::prev(fs)->second)) {
- Configure(fs->second);
- } else if (!fs->second.IsSameRate(std::prev(fs)->second)) {
- SetRates(fs->second);
- }
- }
-
- encoder_->Encode(frame, nullptr);
- ++frame_num_;
- }
-
- void Flush() override {
- // TODO(webrtc:14852): For codecs which buffer frames we need a to
- // flush them to get last frames. Add such functionality to VideoEncoder
- // API. On Android it will map directly to `MediaCodec.flush()`.
- encoder_->Release();
- }
-
- VideoEncoder* encoder() { return encoder_.get(); }
-
- protected:
- Result OnEncodedImage(const EncodedImage& encoded_image,
- const CodecSpecificInfo* codec_specific_info) override {
- MutexLock lock(&mutex_);
- auto cb = callbacks_.find(encoded_image.RtpTimestamp());
- RTC_CHECK(cb != callbacks_.end());
- cb->second(encoded_image);
-
- callbacks_.erase(callbacks_.begin(), cb);
- return Result(Result::Error::OK);
- }
-
- void Configure(const EncodingSettings& es) {
- VideoCodec vc;
- const EncodingSettings::LayerSettings& layer_settings =
- es.layer_settings.begin()->second;
- vc.width = layer_settings.resolution.width;
- vc.height = layer_settings.resolution.height;
- const DataRate& bitrate = layer_settings.bitrate;
- vc.startBitrate = bitrate.kbps();
- vc.maxBitrate = bitrate.kbps();
- vc.minBitrate = 0;
- vc.maxFramerate = static_cast<uint32_t>(layer_settings.framerate.hertz());
- vc.active = true;
- vc.qpMax = 63;
- vc.numberOfSimulcastStreams = 0;
- vc.mode = webrtc::VideoCodecMode::kRealtimeVideo;
- vc.SetFrameDropEnabled(true);
- vc.SetScalabilityMode(es.scalability_mode);
-
- vc.codecType = PayloadStringToCodecType(codec_type_);
- if (vc.codecType == kVideoCodecVP8) {
- *(vc.VP8()) = VideoEncoder::GetDefaultVp8Settings();
- } else if (vc.codecType == kVideoCodecVP9) {
- *(vc.VP9()) = VideoEncoder::GetDefaultVp9Settings();
- } else if (vc.codecType == kVideoCodecH264) {
- *(vc.H264()) = VideoEncoder::GetDefaultH264Settings();
- }
-
- VideoEncoder::Settings ves(
- VideoEncoder::Capabilities(/*loss_notification=*/false),
- /*number_of_cores=*/1,
- /*max_payload_size=*/1440);
-
- int result = encoder_->InitEncode(&vc, ves);
- ASSERT_EQ(result, WEBRTC_VIDEO_CODEC_OK);
-
- SetRates(es);
- }
-
- void SetRates(const EncodingSettings& es) {
- VideoEncoder::RateControlParameters rc;
- int num_spatial_layers =
- ScalabilityModeToNumSpatialLayers(es.scalability_mode);
- int num_temporal_layers =
- ScalabilityModeToNumSpatialLayers(es.scalability_mode);
- for (int sidx = 0; sidx < num_spatial_layers; ++sidx) {
- for (int tidx = 0; tidx < num_temporal_layers; ++tidx) {
- auto layer_settings =
- es.layer_settings.find({.spatial_idx = sidx, .temporal_idx = tidx});
- RTC_CHECK(layer_settings != es.layer_settings.end())
- << "Bitrate for layer S=" << sidx << " T=" << tidx << " is not set";
- rc.bitrate.SetBitrate(sidx, tidx, layer_settings->second.bitrate.bps());
- }
- }
-
- rc.framerate_fps =
- es.layer_settings.begin()->second.framerate.millihertz() / 1000.0;
- encoder_->SetRates(rc);
- }
-
- std::unique_ptr<VideoEncoder> encoder_;
- const std::string codec_type_;
- const std::map<int, EncodingSettings>& frame_settings_;
- int frame_num_;
- std::map<uint32_t, EncodeCallback> callbacks_ RTC_GUARDED_BY(mutex_);
- Mutex mutex_;
-};
-
-class TestDecoder : public VideoCodecTester::Decoder,
- public DecodedImageCallback {
- public:
- TestDecoder(std::unique_ptr<VideoDecoder> decoder,
- const std::string codec_type)
- : decoder_(std::move(decoder)), codec_type_(codec_type) {
- decoder_->RegisterDecodeCompleteCallback(this);
- }
-
- void Initialize() override {
- VideoDecoder::Settings ds;
- ds.set_codec_type(PayloadStringToCodecType(codec_type_));
- ds.set_number_of_cores(1);
- ds.set_max_render_resolution({1280, 720});
-
- bool result = decoder_->Configure(ds);
- ASSERT_TRUE(result);
- }
-
- void Decode(const EncodedImage& frame, DecodeCallback callback) override {
- {
- MutexLock lock(&mutex_);
- callbacks_[frame.RtpTimestamp()] = std::move(callback);
- }
-
- decoder_->Decode(frame, /*render_time_ms=*/0);
- }
-
- void Flush() override {
- // TODO(webrtc:14852): For codecs which buffer frames we need a to
- // flush them to get last frames. Add such functionality to VideoDecoder
- // API. On Android it will map directly to `MediaCodec.flush()`.
- decoder_->Release();
- }
-
- VideoDecoder* decoder() { return decoder_.get(); }
-
- protected:
- int Decoded(VideoFrame& decoded_frame) override {
- MutexLock lock(&mutex_);
- auto cb = callbacks_.find(decoded_frame.timestamp());
- RTC_CHECK(cb != callbacks_.end());
- cb->second(decoded_frame);
+const std::map<std::string, VideoInfo> kRawVideos = {
+ {"FourPeople_1280x720_30",
+ {.name = "FourPeople_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}},
+ {"vidyo1_1280x720_30",
+ {.name = "vidyo1_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}},
+ {"vidyo4_1280x720_30",
+ {.name = "vidyo4_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}},
+ {"KristenAndSara_1280x720_30",
+ {.name = "KristenAndSara_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}},
+ {"Johnny_1280x720_30",
+ {.name = "Johnny_1280x720_30",
+ .resolution = {.width = 1280, .height = 720},
+ .framerate = Frequency::Hertz(30)}}};
+
+static constexpr Frequency k90kHz = Frequency::Hertz(90000);
+
+std::string CodecNameToCodecType(std::string name) {
+ if (name.find("av1") != std::string::npos) {
+ return "AV1";
+ }
+ if (name.find("vp9") != std::string::npos) {
+ return "VP9";
+ }
+ if (name.find("vp8") != std::string::npos) {
+ return "VP8";
+ }
+ if (name.find("h264") != std::string::npos) {
+ return "H264";
+ }
+ if (name.find("h265") != std::string::npos) {
+ return "H265";
+ }
+ RTC_CHECK_NOTREACHED();
+}
- callbacks_.erase(callbacks_.begin(), cb);
- return WEBRTC_VIDEO_CODEC_OK;
+// TODO(webrtc:14852): Make Create[Encoder,Decoder]Factory to work with codec
+// name directly.
+std::string CodecNameToCodecImpl(std::string name) {
+ if (name.find("hw") != std::string::npos) {
+ return "mediacodec";
}
-
- std::unique_ptr<VideoDecoder> decoder_;
- const std::string codec_type_;
- std::map<uint32_t, DecodeCallback> callbacks_ RTC_GUARDED_BY(mutex_);
- Mutex mutex_;
-};
-
-std::unique_ptr<TestRawVideoSource> CreateVideoSource(
- const VideoInfo& video,
- const std::map<int, EncodingSettings>& frame_settings,
- int num_frames) {
- return std::make_unique<TestRawVideoSource>(video, frame_settings,
- num_frames);
+ return "builtin";
}
-std::unique_ptr<TestEncoder> CreateEncoder(
- std::string type,
- std::string impl,
- const std::map<int, EncodingSettings>& frame_settings) {
- std::unique_ptr<VideoEncoderFactory> factory;
+std::unique_ptr<VideoEncoderFactory> CreateEncoderFactory(std::string impl) {
if (impl == "builtin") {
- factory = std::make_unique<InternalEncoderFactory>();
- } else if (impl == "mediacodec") {
+ return CreateBuiltinVideoEncoderFactory();
+ }
#if defined(WEBRTC_ANDROID)
- InitializeAndroidObjects();
- factory = CreateAndroidEncoderFactory();
+ InitializeAndroidObjects();
+ return CreateAndroidEncoderFactory();
+#else
+ return nullptr;
#endif
- }
- std::unique_ptr<VideoEncoder> encoder =
- factory->CreateVideoEncoder(SdpVideoFormat(type));
- if (encoder == nullptr) {
- return nullptr;
- }
- return std::make_unique<TestEncoder>(std::move(encoder), type,
- frame_settings);
}
-std::unique_ptr<TestDecoder> CreateDecoder(std::string type, std::string impl) {
- std::unique_ptr<VideoDecoderFactory> factory;
+std::unique_ptr<VideoDecoderFactory> CreateDecoderFactory(std::string impl) {
if (impl == "builtin") {
- factory = std::make_unique<InternalDecoderFactory>();
- } else if (impl == "mediacodec") {
+ return CreateBuiltinVideoDecoderFactory();
+ }
#if defined(WEBRTC_ANDROID)
- InitializeAndroidObjects();
- factory = CreateAndroidDecoderFactory();
+ InitializeAndroidObjects();
+ return CreateAndroidDecoderFactory();
+#else
+ return nullptr;
#endif
- }
- std::unique_ptr<VideoDecoder> decoder =
- factory->CreateVideoDecoder(SdpVideoFormat(type));
- if (decoder == nullptr) {
- return nullptr;
- }
- return std::make_unique<TestDecoder>(std::move(decoder), type);
}
-void SetTargetRates(const std::map<int, EncodingSettings>& frame_settings,
- std::vector<VideoCodecStats::Frame>& frames) {
- for (VideoCodecStats::Frame& f : frames) {
- const EncodingSettings& encoding_settings =
- std::prev(frame_settings.upper_bound(f.frame_num))->second;
- LayerId layer_id = {.spatial_idx = f.spatial_idx,
- .temporal_idx = f.temporal_idx};
- RTC_CHECK(encoding_settings.layer_settings.find(layer_id) !=
- encoding_settings.layer_settings.end())
- << "Frame frame_num=" << f.frame_num
- << " belongs to spatial_idx=" << f.spatial_idx
- << " temporal_idx=" << f.temporal_idx
- << " but settings for this layer are not provided.";
- const EncodingSettings::LayerSettings& layer_settings =
- encoding_settings.layer_settings.at(layer_id);
- f.target_bitrate = layer_settings.bitrate;
- f.target_framerate = layer_settings.framerate;
+std::string TestName() {
+ std::string test_name = absl::GetFlag(FLAGS_test_name);
+ if (!test_name.empty()) {
+ return test_name;
}
+ return ::testing::UnitTest::GetInstance()->current_test_info()->name();
}
std::string TestOutputPath() {
std::string output_path =
- OutputPath() +
- ::testing::UnitTest::GetInstance()->current_test_info()->name();
+ (rtc::StringBuilder() << OutputPath() << TestName()).str();
std::string output_dir = DirName(output_path);
bool result = CreateDir(output_dir);
RTC_CHECK(result) << "Cannot create " << output_dir;
@@ -465,116 +178,120 @@ std::string TestOutputPath() {
} // namespace
std::unique_ptr<VideoCodecStats> RunEncodeDecodeTest(
- std::string codec_type,
std::string codec_impl,
const VideoInfo& video_info,
- const std::map<int, EncodingSettings>& frame_settings,
- int num_frames,
- bool save_codec_input,
- bool save_codec_output) {
- std::unique_ptr<TestRawVideoSource> video_source =
- CreateVideoSource(video_info, frame_settings, num_frames);
-
- std::unique_ptr<TestEncoder> encoder =
- CreateEncoder(codec_type, codec_impl, frame_settings);
- if (encoder == nullptr) {
+ const std::map<uint32_t, EncodingSettings>& encoding_settings) {
+ VideoSourceSettings source_settings{
+ .file_path = ResourcePath(video_info.name, "yuv"),
+ .resolution = video_info.resolution,
+ .framerate = video_info.framerate};
+
+ const SdpVideoFormat& sdp_video_format =
+ encoding_settings.begin()->second.sdp_video_format;
+
+ std::unique_ptr<VideoEncoderFactory> encoder_factory =
+ CreateEncoderFactory(codec_impl);
+ if (!encoder_factory
+ ->QueryCodecSupport(sdp_video_format,
+ /*scalability_mode=*/absl::nullopt)
+ .is_supported) {
+ RTC_LOG(LS_WARNING) << "No encoder for video format "
+ << sdp_video_format.ToString();
return nullptr;
}
- std::unique_ptr<TestDecoder> decoder = CreateDecoder(codec_type, codec_impl);
- if (decoder == nullptr) {
- // If platform decoder is not available try built-in one.
- if (codec_impl == "builtin") {
- return nullptr;
- }
-
- decoder = CreateDecoder(codec_type, "builtin");
- if (decoder == nullptr) {
+ std::unique_ptr<VideoDecoderFactory> decoder_factory =
+ CreateDecoderFactory(codec_impl);
+ if (!decoder_factory
+ ->QueryCodecSupport(sdp_video_format,
+ /*reference_scaling=*/false)
+ .is_supported) {
+ decoder_factory = CreateDecoderFactory("builtin");
+ if (!decoder_factory
+ ->QueryCodecSupport(sdp_video_format,
+ /*reference_scaling=*/false)
+ .is_supported) {
+ RTC_LOG(LS_WARNING) << "No decoder for video format "
+ << sdp_video_format.ToString();
return nullptr;
}
}
- RTC_LOG(LS_INFO) << "Encoder implementation: "
- << encoder->encoder()->GetEncoderInfo().implementation_name;
- RTC_LOG(LS_INFO) << "Decoder implementation: "
- << decoder->decoder()->GetDecoderInfo().implementation_name;
+ std::string output_path = TestOutputPath();
VideoCodecTester::EncoderSettings encoder_settings;
- encoder_settings.pacing.mode =
- encoder->encoder()->GetEncoderInfo().is_hardware_accelerated
- ? PacingMode::kRealTime
- : PacingMode::kNoPacing;
+ encoder_settings.pacing_settings.mode =
+ codec_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime;
+ if (absl::GetFlag(FLAGS_dump_encoder_input)) {
+ encoder_settings.encoder_input_base_path = output_path + "_enc_input";
+ }
+ if (absl::GetFlag(FLAGS_dump_encoder_output)) {
+ encoder_settings.encoder_output_base_path = output_path + "_enc_output";
+ }
VideoCodecTester::DecoderSettings decoder_settings;
- decoder_settings.pacing.mode =
- decoder->decoder()->GetDecoderInfo().is_hardware_accelerated
- ? PacingMode::kRealTime
- : PacingMode::kNoPacing;
-
- std::string output_path = TestOutputPath();
- if (save_codec_input) {
- encoder_settings.encoder_input_base_path = output_path + "_enc_input";
+ decoder_settings.pacing_settings.mode =
+ codec_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime;
+ if (absl::GetFlag(FLAGS_dump_decoder_input)) {
decoder_settings.decoder_input_base_path = output_path + "_dec_input";
}
- if (save_codec_output) {
- encoder_settings.encoder_output_base_path = output_path + "_enc_output";
+ if (absl::GetFlag(FLAGS_dump_decoder_output)) {
decoder_settings.decoder_output_base_path = output_path + "_dec_output";
}
- std::unique_ptr<VideoCodecTester> tester = CreateVideoCodecTester();
- return tester->RunEncodeDecodeTest(video_source.get(), encoder.get(),
- decoder.get(), encoder_settings,
- decoder_settings);
+ return VideoCodecTester::RunEncodeDecodeTest(
+ source_settings, encoder_factory.get(), decoder_factory.get(),
+ encoder_settings, decoder_settings, encoding_settings);
}
std::unique_ptr<VideoCodecStats> RunEncodeTest(
std::string codec_type,
std::string codec_impl,
const VideoInfo& video_info,
- const std::map<int, EncodingSettings>& frame_settings,
- int num_frames,
- bool save_codec_input,
- bool save_codec_output) {
- std::unique_ptr<TestRawVideoSource> video_source =
- CreateVideoSource(video_info, frame_settings, num_frames);
-
- std::unique_ptr<TestEncoder> encoder =
- CreateEncoder(codec_type, codec_impl, frame_settings);
- if (encoder == nullptr) {
+ const std::map<uint32_t, EncodingSettings>& encoding_settings) {
+ VideoSourceSettings source_settings{
+ .file_path = ResourcePath(video_info.name, "yuv"),
+ .resolution = video_info.resolution,
+ .framerate = video_info.framerate};
+
+ const SdpVideoFormat& sdp_video_format =
+ encoding_settings.begin()->second.sdp_video_format;
+
+ std::unique_ptr<VideoEncoderFactory> encoder_factory =
+ CreateEncoderFactory(codec_impl);
+ if (!encoder_factory
+ ->QueryCodecSupport(sdp_video_format,
+ /*scalability_mode=*/absl::nullopt)
+ .is_supported) {
+ RTC_LOG(LS_WARNING) << "No encoder for video format "
+ << sdp_video_format.ToString();
return nullptr;
}
- RTC_LOG(LS_INFO) << "Encoder implementation: "
- << encoder->encoder()->GetEncoderInfo().implementation_name;
-
- VideoCodecTester::EncoderSettings encoder_settings;
- encoder_settings.pacing.mode =
- encoder->encoder()->GetEncoderInfo().is_hardware_accelerated
- ? PacingMode::kRealTime
- : PacingMode::kNoPacing;
-
std::string output_path = TestOutputPath();
- if (save_codec_input) {
+ VideoCodecTester::EncoderSettings encoder_settings;
+ encoder_settings.pacing_settings.mode =
+ codec_impl == "builtin" ? PacingMode::kNoPacing : PacingMode::kRealTime;
+ if (absl::GetFlag(FLAGS_dump_encoder_input)) {
encoder_settings.encoder_input_base_path = output_path + "_enc_input";
}
- if (save_codec_output) {
+ if (absl::GetFlag(FLAGS_dump_encoder_output)) {
encoder_settings.encoder_output_base_path = output_path + "_enc_output";
}
- std::unique_ptr<VideoCodecTester> tester = CreateVideoCodecTester();
- return tester->RunEncodeTest(video_source.get(), encoder.get(),
- encoder_settings);
+ return VideoCodecTester::RunEncodeTest(source_settings, encoder_factory.get(),
+ encoder_settings, encoding_settings);
}
-class SpatialQualityTest : public ::testing::TestWithParam<
- std::tuple</*codec_type=*/std::string,
- /*codec_impl=*/std::string,
- VideoInfo,
- std::tuple</*width=*/int,
- /*height=*/int,
- /*framerate_fps=*/double,
- /*bitrate_kbps=*/int,
- /*min_psnr=*/double>>> {
+class SpatialQualityTest : public ::testing::TestWithParam<std::tuple<
+ /*codec_type=*/std::string,
+ /*codec_impl=*/std::string,
+ VideoInfo,
+ std::tuple</*width=*/int,
+ /*height=*/int,
+ /*framerate_fps=*/double,
+ /*bitrate_kbps=*/int,
+ /*expected_min_psnr=*/double>>> {
public:
static std::string TestParamsToString(
const ::testing::TestParamInfo<SpatialQualityTest::ParamType>& info) {
@@ -590,41 +307,35 @@ class SpatialQualityTest : public ::testing::TestWithParam<
TEST_P(SpatialQualityTest, SpatialQuality) {
auto [codec_type, codec_impl, video_info, coding_settings] = GetParam();
- auto [width, height, framerate_fps, bitrate_kbps, psnr] = coding_settings;
-
- std::map<int, EncodingSettings> frame_settings = {
- {0,
- {.scalability_mode = ScalabilityMode::kL1T1,
- .layer_settings = {
- {LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = width, .height = height},
- .framerate = Frequency::MilliHertz(1000 * framerate_fps),
- .bitrate = DataRate::KilobitsPerSec(bitrate_kbps)}}}}}};
-
+ auto [width, height, framerate_fps, bitrate_kbps, expected_min_psnr] =
+ coding_settings;
int duration_s = 10;
int num_frames = duration_s * framerate_fps;
- std::unique_ptr<VideoCodecStats> stats = RunEncodeDecodeTest(
- codec_type, codec_impl, video_info, frame_settings, num_frames,
- /*save_codec_input=*/false, /*save_codec_output=*/false);
+ std::map<uint32_t, EncodingSettings> frames_settings =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1", width, height,
+ {bitrate_kbps}, framerate_fps, num_frames);
+
+ std::unique_ptr<VideoCodecStats> stats =
+ RunEncodeDecodeTest(codec_impl, video_info, frames_settings);
VideoCodecStats::Stream stream;
if (stats != nullptr) {
- std::vector<VideoCodecStats::Frame> frames = stats->Slice();
- SetTargetRates(frame_settings, frames);
- stream = stats->Aggregate(frames);
+ stream = stats->Aggregate(Filter{});
if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) {
- EXPECT_GE(stream.psnr.y.GetAverage(), psnr);
+ EXPECT_GE(stream.psnr.y.GetAverage(), expected_min_psnr);
}
}
stream.LogMetrics(
GetGlobalMetricsLogger(),
::testing::UnitTest::GetInstance()->current_test_info()->name(),
+ /*prefix=*/"",
/*metadata=*/
- {{"codec_type", codec_type},
- {"codec_impl", codec_impl},
- {"video_name", video_info.name}});
+ {{"video_name", video_info.name},
+ {"codec_type", codec_type},
+ {"codec_impl", codec_impl}});
}
INSTANTIATE_TEST_SUITE_P(
@@ -636,7 +347,7 @@ INSTANTIATE_TEST_SUITE_P(
#else
Values("builtin"),
#endif
- Values(kFourPeople_1280x720_30),
+ Values(kRawVideos.at("FourPeople_1280x720_30")),
Values(std::make_tuple(320, 180, 30, 32, 28),
std::make_tuple(320, 180, 30, 64, 30),
std::make_tuple(320, 180, 30, 128, 33),
@@ -671,33 +382,32 @@ TEST_P(BitrateAdaptationTest, BitrateAdaptation) {
auto [codec_type, codec_impl, video_info, bitrate_kbps] = GetParam();
int duration_s = 10; // Duration of fixed rate interval.
- int first_frame = duration_s * video_info.framerate.millihertz() / 1000;
- int num_frames = 2 * duration_s * video_info.framerate.millihertz() / 1000;
-
- std::map<int, EncodingSettings> frame_settings = {
- {0,
- {.layer_settings = {{LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = 640, .height = 360},
- .framerate = video_info.framerate,
- .bitrate = DataRate::KilobitsPerSec(
- bitrate_kbps.first)}}}}},
- {first_frame,
- {.layer_settings = {
- {LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = 640, .height = 360},
- .framerate = video_info.framerate,
- .bitrate = DataRate::KilobitsPerSec(bitrate_kbps.second)}}}}}};
-
- std::unique_ptr<VideoCodecStats> stats = RunEncodeTest(
- codec_type, codec_impl, video_info, frame_settings, num_frames,
- /*save_codec_input=*/false, /*save_codec_output=*/false);
+ int num_frames =
+ static_cast<int>(duration_s * video_info.framerate.hertz<double>());
+
+ std::map<uint32_t, EncodingSettings> encoding_settings =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1",
+ /*width=*/640, /*height=*/360, {bitrate_kbps.first},
+ /*framerate_fps=*/30, num_frames);
+
+ uint32_t initial_timestamp_rtp =
+ encoding_settings.rbegin()->first + k90kHz / Frequency::Hertz(30);
+
+ std::map<uint32_t, EncodingSettings> encoding_settings2 =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1",
+ /*width=*/640, /*height=*/360, {bitrate_kbps.second},
+ /*framerate_fps=*/30, num_frames, initial_timestamp_rtp);
+
+ encoding_settings.merge(encoding_settings2);
+
+ std::unique_ptr<VideoCodecStats> stats =
+ RunEncodeTest(codec_type, codec_impl, video_info, encoding_settings);
VideoCodecStats::Stream stream;
if (stats != nullptr) {
- std::vector<VideoCodecStats::Frame> frames =
- stats->Slice(VideoCodecStats::Filter{.first_frame = first_frame});
- SetTargetRates(frame_settings, frames);
- stream = stats->Aggregate(frames);
+ stream = stats->Aggregate({.min_timestamp_rtp = initial_timestamp_rtp});
if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) {
EXPECT_NEAR(stream.bitrate_mismatch_pct.GetAverage(), 0, 10);
EXPECT_NEAR(stream.framerate_mismatch_pct.GetAverage(), 0, 10);
@@ -707,6 +417,7 @@ TEST_P(BitrateAdaptationTest, BitrateAdaptation) {
stream.LogMetrics(
GetGlobalMetricsLogger(),
::testing::UnitTest::GetInstance()->current_test_info()->name(),
+ /*prefix=*/"",
/*metadata=*/
{{"codec_type", codec_type},
{"codec_impl", codec_impl},
@@ -715,18 +426,18 @@ TEST_P(BitrateAdaptationTest, BitrateAdaptation) {
std::to_string(bitrate_kbps.second)}});
}
-INSTANTIATE_TEST_SUITE_P(All,
- BitrateAdaptationTest,
- Combine(Values("AV1", "VP9", "VP8", "H264", "H265"),
+INSTANTIATE_TEST_SUITE_P(
+ All,
+ BitrateAdaptationTest,
+ Combine(Values("AV1", "VP9", "VP8", "H264", "H265"),
#if defined(WEBRTC_ANDROID)
- Values("builtin", "mediacodec"),
+ Values("builtin", "mediacodec"),
#else
- Values("builtin"),
+ Values("builtin"),
#endif
- Values(kFourPeople_1280x720_30),
- Values(std::pair(1024, 512),
- std::pair(512, 1024))),
- BitrateAdaptationTest::TestParamsToString);
+ Values(kRawVideos.at("FourPeople_1280x720_30")),
+ Values(std::pair(1024, 512), std::pair(512, 1024))),
+ BitrateAdaptationTest::TestParamsToString);
class FramerateAdaptationTest
: public ::testing::TestWithParam<std::tuple</*codec_type=*/std::string,
@@ -749,34 +460,34 @@ TEST_P(FramerateAdaptationTest, FramerateAdaptation) {
auto [codec_type, codec_impl, video_info, framerate_fps] = GetParam();
int duration_s = 10; // Duration of fixed rate interval.
- int first_frame = static_cast<int>(duration_s * framerate_fps.first);
- int num_frames = static_cast<int>(
- duration_s * (framerate_fps.first + framerate_fps.second));
-
- std::map<int, EncodingSettings> frame_settings = {
- {0,
- {.layer_settings = {{LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = 640, .height = 360},
- .framerate = Frequency::MilliHertz(
- 1000 * framerate_fps.first),
- .bitrate = DataRate::KilobitsPerSec(512)}}}}},
- {first_frame,
- {.layer_settings = {
- {LayerId{.spatial_idx = 0, .temporal_idx = 0},
- {.resolution = {.width = 640, .height = 360},
- .framerate = Frequency::MilliHertz(1000 * framerate_fps.second),
- .bitrate = DataRate::KilobitsPerSec(512)}}}}}};
-
- std::unique_ptr<VideoCodecStats> stats = RunEncodeTest(
- codec_type, codec_impl, video_info, frame_settings, num_frames,
- /*save_codec_input=*/false, /*save_codec_output=*/false);
+
+ std::map<uint32_t, EncodingSettings> encoding_settings =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1",
+ /*width=*/640, /*height=*/360,
+ /*layer_bitrates_kbps=*/{512}, framerate_fps.first,
+ static_cast<int>(duration_s * framerate_fps.first));
+
+ uint32_t initial_timestamp_rtp =
+ encoding_settings.rbegin()->first +
+ k90kHz / Frequency::Hertz(framerate_fps.first);
+
+ std::map<uint32_t, EncodingSettings> encoding_settings2 =
+ VideoCodecTester::CreateEncodingSettings(
+ codec_type, /*scalability_mode=*/"L1T1", /*width=*/640,
+ /*height=*/360,
+ /*layer_bitrates_kbps=*/{512}, framerate_fps.second,
+ static_cast<int>(duration_s * framerate_fps.second),
+ initial_timestamp_rtp);
+
+ encoding_settings.merge(encoding_settings2);
+
+ std::unique_ptr<VideoCodecStats> stats =
+ RunEncodeTest(codec_type, codec_impl, video_info, encoding_settings);
VideoCodecStats::Stream stream;
if (stats != nullptr) {
- std::vector<VideoCodecStats::Frame> frames =
- stats->Slice(VideoCodecStats::Filter{.first_frame = first_frame});
- SetTargetRates(frame_settings, frames);
- stream = stats->Aggregate(frames);
+ stream = stats->Aggregate({.min_timestamp_rtp = initial_timestamp_rtp});
if (absl::GetFlag(FLAGS_webrtc_quick_perf_test)) {
EXPECT_NEAR(stream.bitrate_mismatch_pct.GetAverage(), 0, 10);
EXPECT_NEAR(stream.framerate_mismatch_pct.GetAverage(), 0, 10);
@@ -786,6 +497,7 @@ TEST_P(FramerateAdaptationTest, FramerateAdaptation) {
stream.LogMetrics(
GetGlobalMetricsLogger(),
::testing::UnitTest::GetInstance()->current_test_info()->name(),
+ /*prefix=*/"",
/*metadata=*/
{{"codec_type", codec_type},
{"codec_impl", codec_impl},
@@ -794,17 +506,71 @@ TEST_P(FramerateAdaptationTest, FramerateAdaptation) {
std::to_string(framerate_fps.second)}});
}
-INSTANTIATE_TEST_SUITE_P(All,
- FramerateAdaptationTest,
- Combine(Values("AV1", "VP9", "VP8", "H264", "H265"),
+INSTANTIATE_TEST_SUITE_P(
+ All,
+ FramerateAdaptationTest,
+ Combine(Values("AV1", "VP9", "VP8", "H264", "H265"),
#if defined(WEBRTC_ANDROID)
- Values("builtin", "mediacodec"),
+ Values("builtin", "mediacodec"),
#else
- Values("builtin"),
+ Values("builtin"),
#endif
- Values(kFourPeople_1280x720_30),
- Values(std::pair(30, 15), std::pair(15, 30))),
- FramerateAdaptationTest::TestParamsToString);
+ Values(kRawVideos.at("FourPeople_1280x720_30")),
+ Values(std::pair(30, 15), std::pair(15, 30))),
+ FramerateAdaptationTest::TestParamsToString);
+
+TEST(VideoCodecTest, DISABLED_EncodeDecode) {
+ std::vector<std::string> bitrate_str = absl::GetFlag(FLAGS_bitrate_kbps);
+ std::vector<int> bitrate_kbps;
+ std::transform(bitrate_str.begin(), bitrate_str.end(),
+ std::back_inserter(bitrate_kbps),
+ [](const std::string& str) { return std::stoi(str); });
+
+ std::map<uint32_t, EncodingSettings> frames_settings =
+ VideoCodecTester::CreateEncodingSettings(
+ CodecNameToCodecType(absl::GetFlag(FLAGS_encoder)),
+ absl::GetFlag(FLAGS_scalability_mode), absl::GetFlag(FLAGS_width),
+ absl::GetFlag(FLAGS_height), {bitrate_kbps},
+ absl::GetFlag(FLAGS_framerate_fps), absl::GetFlag(FLAGS_num_frames));
+
+ // TODO(webrtc:14852): Pass encoder and decoder names directly, and update
+ // logged test name (implies lossing history in the chromeperf dashboard).
+ // Sync with changes in Stream::LogMetrics (see TODOs there).
+ std::unique_ptr<VideoCodecStats> stats = RunEncodeDecodeTest(
+ CodecNameToCodecImpl(absl::GetFlag(FLAGS_encoder)),
+ kRawVideos.at(absl::GetFlag(FLAGS_video_name)), frames_settings);
+ ASSERT_NE(nullptr, stats);
+
+ // Log unsliced metrics.
+ VideoCodecStats::Stream stream = stats->Aggregate(Filter{});
+ stream.LogMetrics(GetGlobalMetricsLogger(), TestName(), /*prefix=*/"",
+ /*metadata=*/{});
+
+ // Log metrics sliced on spatial and temporal layer.
+ ScalabilityMode scalability_mode =
+ *ScalabilityModeFromString(absl::GetFlag(FLAGS_scalability_mode));
+ int num_spatial_layers = ScalabilityModeToNumSpatialLayers(scalability_mode);
+ int num_temporal_layers =
+ ScalabilityModeToNumTemporalLayers(scalability_mode);
+ for (int sidx = 0; sidx < num_spatial_layers; ++sidx) {
+ for (int tidx = 0; tidx < num_temporal_layers; ++tidx) {
+ std::string metric_name_prefix =
+ (rtc::StringBuilder() << "s" << sidx << "t" << tidx << "_").str();
+ stream = stats->Aggregate(
+ {.layer_id = {{.spatial_idx = sidx, .temporal_idx = tidx}}});
+ stream.LogMetrics(GetGlobalMetricsLogger(), TestName(),
+ metric_name_prefix,
+ /*metadata=*/{});
+ }
+ }
+
+ if (absl::GetFlag(FLAGS_write_csv)) {
+ stats->LogMetrics(
+ (rtc::StringBuilder() << TestOutputPath() << ".csv").str(),
+ stats->Slice(Filter{}, /*merge=*/false), /*metadata=*/
+ {{"test_name", TestName()}});
+ }
+}
} // namespace test
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc
deleted file mode 100644
index f15b1b35f3..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.cc
+++ /dev/null
@@ -1,437 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_tester_impl.h"
-
-#include <map>
-#include <memory>
-#include <string>
-#include <utility>
-
-#include "api/task_queue/default_task_queue_factory.h"
-#include "api/units/frequency.h"
-#include "api/units/time_delta.h"
-#include "api/units/timestamp.h"
-#include "api/video/encoded_image.h"
-#include "api/video/i420_buffer.h"
-#include "api/video/video_codec_type.h"
-#include "api/video/video_frame.h"
-#include "modules/video_coding/codecs/test/video_codec_analyzer.h"
-#include "modules/video_coding/utility/ivf_file_writer.h"
-#include "rtc_base/event.h"
-#include "rtc_base/time_utils.h"
-#include "system_wrappers/include/sleep.h"
-#include "test/testsupport/video_frame_writer.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using RawVideoSource = VideoCodecTester::RawVideoSource;
-using CodedVideoSource = VideoCodecTester::CodedVideoSource;
-using Decoder = VideoCodecTester::Decoder;
-using Encoder = VideoCodecTester::Encoder;
-using EncoderSettings = VideoCodecTester::EncoderSettings;
-using DecoderSettings = VideoCodecTester::DecoderSettings;
-using PacingSettings = VideoCodecTester::PacingSettings;
-using PacingMode = PacingSettings::PacingMode;
-
-constexpr Frequency k90kHz = Frequency::Hertz(90000);
-
-// A thread-safe wrapper for video source to be shared with the quality analyzer
-// that reads reference frames from a separate thread.
-class SyncRawVideoSource : public VideoCodecAnalyzer::ReferenceVideoSource {
- public:
- explicit SyncRawVideoSource(RawVideoSource* video_source)
- : video_source_(video_source) {}
-
- absl::optional<VideoFrame> PullFrame() {
- MutexLock lock(&mutex_);
- return video_source_->PullFrame();
- }
-
- VideoFrame GetFrame(uint32_t timestamp_rtp, Resolution resolution) override {
- MutexLock lock(&mutex_);
- return video_source_->GetFrame(timestamp_rtp, resolution);
- }
-
- protected:
- RawVideoSource* const video_source_ RTC_GUARDED_BY(mutex_);
- Mutex mutex_;
-};
-
-// Pacer calculates delay necessary to keep frame encode or decode call spaced
-// from the previous calls by the pacing time. `Delay` is expected to be called
-// as close as possible to posting frame encode or decode task. This class is
-// not thread safe.
-class Pacer {
- public:
- explicit Pacer(PacingSettings settings)
- : settings_(settings), delay_(TimeDelta::Zero()) {}
- Timestamp Schedule(Timestamp timestamp) {
- Timestamp now = Timestamp::Micros(rtc::TimeMicros());
- if (settings_.mode == PacingMode::kNoPacing) {
- return now;
- }
-
- Timestamp scheduled = now;
- if (prev_scheduled_) {
- scheduled = *prev_scheduled_ + PacingTime(timestamp);
- if (scheduled < now) {
- scheduled = now;
- }
- }
-
- prev_timestamp_ = timestamp;
- prev_scheduled_ = scheduled;
- return scheduled;
- }
-
- private:
- TimeDelta PacingTime(Timestamp timestamp) {
- if (settings_.mode == PacingMode::kRealTime) {
- return timestamp - *prev_timestamp_;
- }
- RTC_CHECK_EQ(PacingMode::kConstantRate, settings_.mode);
- return 1 / settings_.constant_rate;
- }
-
- PacingSettings settings_;
- absl::optional<Timestamp> prev_timestamp_;
- absl::optional<Timestamp> prev_scheduled_;
- TimeDelta delay_;
-};
-
-// Task queue that keeps the number of queued tasks below a certain limit. If
-// the limit is reached, posting of a next task is blocked until execution of a
-// previously posted task starts. This class is not thread-safe.
-class LimitedTaskQueue {
- public:
- // The codec tester reads frames from video source in the main thread.
- // Encoding and decoding are done in separate threads. If encoding or
- // decoding is slow, the reading may go far ahead and may buffer too many
- // frames in memory. To prevent this we limit the encoding/decoding queue
- // size. When the queue is full, the main thread and, hence, reading frames
- // from video source is blocked until a previously posted encoding/decoding
- // task starts.
- static constexpr int kMaxTaskQueueSize = 3;
-
- LimitedTaskQueue() : queue_size_(0) {}
-
- void PostScheduledTask(absl::AnyInvocable<void() &&> task, Timestamp start) {
- ++queue_size_;
- task_queue_.PostTask([this, task = std::move(task), start]() mutable {
- int wait_ms = static_cast<int>(start.ms() - rtc::TimeMillis());
- if (wait_ms > 0) {
- SleepMs(wait_ms);
- }
-
- std::move(task)();
- --queue_size_;
- task_executed_.Set();
- });
-
- task_executed_.Reset();
- if (queue_size_ > kMaxTaskQueueSize) {
- task_executed_.Wait(rtc::Event::kForever);
- }
- RTC_CHECK(queue_size_ <= kMaxTaskQueueSize);
- }
-
- void WaitForPreviouslyPostedTasks() {
- task_queue_.SendTask([] {});
- }
-
- TaskQueueForTest task_queue_;
- std::atomic_int queue_size_;
- rtc::Event task_executed_;
-};
-
-class TesterY4mWriter {
- public:
- explicit TesterY4mWriter(absl::string_view base_path)
- : base_path_(base_path) {}
-
- ~TesterY4mWriter() {
- task_queue_.SendTask([] {});
- }
-
- void Write(const VideoFrame& frame, int spatial_idx) {
- task_queue_.PostTask([this, frame, spatial_idx] {
- if (y4m_writers_.find(spatial_idx) == y4m_writers_.end()) {
- std::string file_path =
- base_path_ + "_s" + std::to_string(spatial_idx) + ".y4m";
-
- Y4mVideoFrameWriterImpl* y4m_writer = new Y4mVideoFrameWriterImpl(
- file_path, frame.width(), frame.height(), /*fps=*/30);
- RTC_CHECK(y4m_writer);
-
- y4m_writers_[spatial_idx] =
- std::unique_ptr<VideoFrameWriter>(y4m_writer);
- }
-
- y4m_writers_.at(spatial_idx)->WriteFrame(frame);
- });
- }
-
- protected:
- std::string base_path_;
- std::map<int, std::unique_ptr<VideoFrameWriter>> y4m_writers_;
- TaskQueueForTest task_queue_;
-};
-
-class TesterIvfWriter {
- public:
- explicit TesterIvfWriter(absl::string_view base_path)
- : base_path_(base_path) {}
-
- ~TesterIvfWriter() {
- task_queue_.SendTask([] {});
- }
-
- void Write(const EncodedImage& encoded_frame) {
- task_queue_.PostTask([this, encoded_frame] {
- int spatial_idx = encoded_frame.SpatialIndex().value_or(0);
- if (ivf_file_writers_.find(spatial_idx) == ivf_file_writers_.end()) {
- std::string ivf_path =
- base_path_ + "_s" + std::to_string(spatial_idx) + ".ivf";
-
- FileWrapper ivf_file = FileWrapper::OpenWriteOnly(ivf_path);
- RTC_CHECK(ivf_file.is_open());
-
- std::unique_ptr<IvfFileWriter> ivf_writer =
- IvfFileWriter::Wrap(std::move(ivf_file), /*byte_limit=*/0);
- RTC_CHECK(ivf_writer);
-
- ivf_file_writers_[spatial_idx] = std::move(ivf_writer);
- }
-
- // To play: ffplay -vcodec vp8|vp9|av1|hevc|h264 filename
- ivf_file_writers_.at(spatial_idx)
- ->WriteFrame(encoded_frame, VideoCodecType::kVideoCodecGeneric);
- });
- }
-
- protected:
- std::string base_path_;
- std::map<int, std::unique_ptr<IvfFileWriter>> ivf_file_writers_;
- TaskQueueForTest task_queue_;
-};
-
-class TesterDecoder {
- public:
- TesterDecoder(Decoder* decoder,
- VideoCodecAnalyzer* analyzer,
- const DecoderSettings& settings)
- : decoder_(decoder),
- analyzer_(analyzer),
- settings_(settings),
- pacer_(settings.pacing) {
- RTC_CHECK(analyzer_) << "Analyzer must be provided";
-
- if (settings.decoder_input_base_path) {
- input_writer_ =
- std::make_unique<TesterIvfWriter>(*settings.decoder_input_base_path);
- }
-
- if (settings.decoder_output_base_path) {
- output_writer_ =
- std::make_unique<TesterY4mWriter>(*settings.decoder_output_base_path);
- }
- }
-
- void Initialize() {
- task_queue_.PostScheduledTask([this] { decoder_->Initialize(); },
- Timestamp::Zero());
- task_queue_.WaitForPreviouslyPostedTasks();
- }
-
- void Decode(const EncodedImage& input_frame) {
- Timestamp timestamp =
- Timestamp::Micros((input_frame.RtpTimestamp() / k90kHz).us());
-
- task_queue_.PostScheduledTask(
- [this, input_frame] {
- analyzer_->StartDecode(input_frame);
-
- decoder_->Decode(
- input_frame,
- [this, spatial_idx = input_frame.SpatialIndex().value_or(0)](
- const VideoFrame& output_frame) {
- analyzer_->FinishDecode(output_frame, spatial_idx);
-
- if (output_writer_) {
- output_writer_->Write(output_frame, spatial_idx);
- }
- });
-
- if (input_writer_) {
- input_writer_->Write(input_frame);
- }
- },
- pacer_.Schedule(timestamp));
- }
-
- void Flush() {
- task_queue_.PostScheduledTask([this] { decoder_->Flush(); },
- Timestamp::Zero());
- task_queue_.WaitForPreviouslyPostedTasks();
- }
-
- protected:
- Decoder* const decoder_;
- VideoCodecAnalyzer* const analyzer_;
- const DecoderSettings& settings_;
- Pacer pacer_;
- LimitedTaskQueue task_queue_;
- std::unique_ptr<TesterIvfWriter> input_writer_;
- std::unique_ptr<TesterY4mWriter> output_writer_;
-};
-
-class TesterEncoder {
- public:
- TesterEncoder(Encoder* encoder,
- TesterDecoder* decoder,
- VideoCodecAnalyzer* analyzer,
- const EncoderSettings& settings)
- : encoder_(encoder),
- decoder_(decoder),
- analyzer_(analyzer),
- settings_(settings),
- pacer_(settings.pacing) {
- RTC_CHECK(analyzer_) << "Analyzer must be provided";
- if (settings.encoder_input_base_path) {
- input_writer_ =
- std::make_unique<TesterY4mWriter>(*settings.encoder_input_base_path);
- }
-
- if (settings.encoder_output_base_path) {
- output_writer_ =
- std::make_unique<TesterIvfWriter>(*settings.encoder_output_base_path);
- }
- }
-
- void Initialize() {
- task_queue_.PostScheduledTask([this] { encoder_->Initialize(); },
- Timestamp::Zero());
- task_queue_.WaitForPreviouslyPostedTasks();
- }
-
- void Encode(const VideoFrame& input_frame) {
- Timestamp timestamp =
- Timestamp::Micros((input_frame.timestamp() / k90kHz).us());
-
- task_queue_.PostScheduledTask(
- [this, input_frame] {
- analyzer_->StartEncode(input_frame);
- encoder_->Encode(input_frame,
- [this](const EncodedImage& encoded_frame) {
- analyzer_->FinishEncode(encoded_frame);
-
- if (decoder_ != nullptr) {
- decoder_->Decode(encoded_frame);
- }
-
- if (output_writer_ != nullptr) {
- output_writer_->Write(encoded_frame);
- }
- });
-
- if (input_writer_) {
- input_writer_->Write(input_frame, /*spatial_idx=*/0);
- }
- },
- pacer_.Schedule(timestamp));
- }
-
- void Flush() {
- task_queue_.PostScheduledTask([this] { encoder_->Flush(); },
- Timestamp::Zero());
- task_queue_.WaitForPreviouslyPostedTasks();
- }
-
- protected:
- Encoder* const encoder_;
- TesterDecoder* const decoder_;
- VideoCodecAnalyzer* const analyzer_;
- const EncoderSettings& settings_;
- std::unique_ptr<TesterY4mWriter> input_writer_;
- std::unique_ptr<TesterIvfWriter> output_writer_;
- Pacer pacer_;
- LimitedTaskQueue task_queue_;
-};
-
-} // namespace
-
-std::unique_ptr<VideoCodecStats> VideoCodecTesterImpl::RunDecodeTest(
- CodedVideoSource* video_source,
- Decoder* decoder,
- const DecoderSettings& decoder_settings) {
- VideoCodecAnalyzer perf_analyzer;
- TesterDecoder tester_decoder(decoder, &perf_analyzer, decoder_settings);
-
- tester_decoder.Initialize();
-
- while (auto frame = video_source->PullFrame()) {
- tester_decoder.Decode(*frame);
- }
-
- tester_decoder.Flush();
-
- return perf_analyzer.GetStats();
-}
-
-std::unique_ptr<VideoCodecStats> VideoCodecTesterImpl::RunEncodeTest(
- RawVideoSource* video_source,
- Encoder* encoder,
- const EncoderSettings& encoder_settings) {
- SyncRawVideoSource sync_source(video_source);
- VideoCodecAnalyzer perf_analyzer;
- TesterEncoder tester_encoder(encoder, /*decoder=*/nullptr, &perf_analyzer,
- encoder_settings);
-
- tester_encoder.Initialize();
-
- while (auto frame = sync_source.PullFrame()) {
- tester_encoder.Encode(*frame);
- }
-
- tester_encoder.Flush();
-
- return perf_analyzer.GetStats();
-}
-
-std::unique_ptr<VideoCodecStats> VideoCodecTesterImpl::RunEncodeDecodeTest(
- RawVideoSource* video_source,
- Encoder* encoder,
- Decoder* decoder,
- const EncoderSettings& encoder_settings,
- const DecoderSettings& decoder_settings) {
- SyncRawVideoSource sync_source(video_source);
- VideoCodecAnalyzer perf_analyzer(&sync_source);
- TesterDecoder tester_decoder(decoder, &perf_analyzer, decoder_settings);
- TesterEncoder tester_encoder(encoder, &tester_decoder, &perf_analyzer,
- encoder_settings);
-
- tester_encoder.Initialize();
- tester_decoder.Initialize();
-
- while (auto frame = sync_source.PullFrame()) {
- tester_encoder.Encode(*frame);
- }
-
- tester_encoder.Flush();
- tester_decoder.Flush();
-
- return perf_analyzer.GetStats();
-}
-
-} // namespace test
-} // namespace webrtc
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h
deleted file mode 100644
index 32191b5a98..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl.h
+++ /dev/null
@@ -1,45 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_
-#define MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_
-
-#include <memory>
-
-#include "api/test/video_codec_tester.h"
-
-namespace webrtc {
-namespace test {
-
-// A stateless implementation of `VideoCodecTester`. This class is thread safe.
-class VideoCodecTesterImpl : public VideoCodecTester {
- public:
- std::unique_ptr<VideoCodecStats> RunDecodeTest(
- CodedVideoSource* video_source,
- Decoder* decoder,
- const DecoderSettings& decoder_settings) override;
-
- std::unique_ptr<VideoCodecStats> RunEncodeTest(
- RawVideoSource* video_source,
- Encoder* encoder,
- const EncoderSettings& encoder_settings) override;
-
- std::unique_ptr<VideoCodecStats> RunEncodeDecodeTest(
- RawVideoSource* video_source,
- Encoder* encoder,
- Decoder* decoder,
- const EncoderSettings& encoder_settings,
- const DecoderSettings& decoder_settings) override;
-};
-
-} // namespace test
-} // namespace webrtc
-
-#endif // MODULES_VIDEO_CODING_CODECS_TEST_VIDEO_CODEC_TESTER_IMPL_H_
diff --git a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc b/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc
deleted file mode 100644
index a8c118ef20..0000000000
--- a/third_party/libwebrtc/modules/video_coding/codecs/test/video_codec_tester_impl_unittest.cc
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
- *
- * Use of this source code is governed by a BSD-style license
- * that can be found in the LICENSE file in the root of the source
- * tree. An additional intellectual property rights grant can be found
- * in the file PATENTS. All contributing project authors may
- * be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "modules/video_coding/codecs/test/video_codec_tester_impl.h"
-
-#include <memory>
-#include <tuple>
-#include <utility>
-#include <vector>
-
-#include "api/units/frequency.h"
-#include "api/units/time_delta.h"
-#include "api/video/encoded_image.h"
-#include "api/video/i420_buffer.h"
-#include "api/video/video_frame.h"
-#include "rtc_base/fake_clock.h"
-#include "rtc_base/gunit.h"
-#include "rtc_base/task_queue_for_test.h"
-#include "rtc_base/time_utils.h"
-#include "test/gmock.h"
-#include "test/gtest.h"
-
-namespace webrtc {
-namespace test {
-
-namespace {
-using ::testing::_;
-using ::testing::Invoke;
-using ::testing::InvokeWithoutArgs;
-using ::testing::Return;
-
-using Decoder = VideoCodecTester::Decoder;
-using Encoder = VideoCodecTester::Encoder;
-using CodedVideoSource = VideoCodecTester::CodedVideoSource;
-using RawVideoSource = VideoCodecTester::RawVideoSource;
-using DecoderSettings = VideoCodecTester::DecoderSettings;
-using EncoderSettings = VideoCodecTester::EncoderSettings;
-using PacingSettings = VideoCodecTester::PacingSettings;
-using PacingMode = PacingSettings::PacingMode;
-
-constexpr Frequency k90kHz = Frequency::Hertz(90000);
-
-struct PacingTestParams {
- PacingSettings pacing_settings;
- Frequency framerate;
- int num_frames;
- std::vector<int> expected_delta_ms;
-};
-
-VideoFrame CreateVideoFrame(uint32_t timestamp_rtp) {
- rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(2, 2));
- return VideoFrame::Builder()
- .set_video_frame_buffer(buffer)
- .set_timestamp_rtp(timestamp_rtp)
- .build();
-}
-
-EncodedImage CreateEncodedImage(uint32_t timestamp_rtp) {
- EncodedImage encoded_image;
- encoded_image.SetRtpTimestamp(timestamp_rtp);
- return encoded_image;
-}
-
-class MockRawVideoSource : public RawVideoSource {
- public:
- MockRawVideoSource(int num_frames, Frequency framerate)
- : num_frames_(num_frames), frame_num_(0), framerate_(framerate) {}
-
- absl::optional<VideoFrame> PullFrame() override {
- if (frame_num_ >= num_frames_) {
- return absl::nullopt;
- }
- uint32_t timestamp_rtp = frame_num_ * k90kHz / framerate_;
- ++frame_num_;
- return CreateVideoFrame(timestamp_rtp);
- }
-
- MOCK_METHOD(VideoFrame,
- GetFrame,
- (uint32_t timestamp_rtp, Resolution),
- (override));
-
- private:
- int num_frames_;
- int frame_num_;
- Frequency framerate_;
-};
-
-class MockCodedVideoSource : public CodedVideoSource {
- public:
- MockCodedVideoSource(int num_frames, Frequency framerate)
- : num_frames_(num_frames), frame_num_(0), framerate_(framerate) {}
-
- absl::optional<EncodedImage> PullFrame() override {
- if (frame_num_ >= num_frames_) {
- return absl::nullopt;
- }
- uint32_t timestamp_rtp = frame_num_ * k90kHz / framerate_;
- ++frame_num_;
- return CreateEncodedImage(timestamp_rtp);
- }
-
- private:
- int num_frames_;
- int frame_num_;
- Frequency framerate_;
-};
-
-class MockDecoder : public Decoder {
- public:
- MOCK_METHOD(void, Initialize, (), (override));
- MOCK_METHOD(void,
- Decode,
- (const EncodedImage& frame, DecodeCallback callback),
- (override));
- MOCK_METHOD(void, Flush, (), (override));
-};
-
-class MockEncoder : public Encoder {
- public:
- MOCK_METHOD(void, Initialize, (), (override));
- MOCK_METHOD(void,
- Encode,
- (const VideoFrame& frame, EncodeCallback callback),
- (override));
- MOCK_METHOD(void, Flush, (), (override));
-};
-
-} // namespace
-
-class VideoCodecTesterImplPacingTest
- : public ::testing::TestWithParam<PacingTestParams> {
- public:
- VideoCodecTesterImplPacingTest() : test_params_(GetParam()) {}
-
- protected:
- PacingTestParams test_params_;
-};
-
-TEST_P(VideoCodecTesterImplPacingTest, PaceEncode) {
- MockRawVideoSource video_source(test_params_.num_frames,
- test_params_.framerate);
- MockEncoder encoder;
- EncoderSettings encoder_settings;
- encoder_settings.pacing = test_params_.pacing_settings;
-
- VideoCodecTesterImpl tester;
- auto fs =
- tester.RunEncodeTest(&video_source, &encoder, encoder_settings)->Slice();
- ASSERT_EQ(static_cast<int>(fs.size()), test_params_.num_frames);
-
- for (size_t i = 1; i < fs.size(); ++i) {
- int delta_ms = (fs[i].encode_start - fs[i - 1].encode_start).ms();
- EXPECT_NEAR(delta_ms, test_params_.expected_delta_ms[i - 1], 10);
- }
-}
-
-TEST_P(VideoCodecTesterImplPacingTest, PaceDecode) {
- MockCodedVideoSource video_source(test_params_.num_frames,
- test_params_.framerate);
- MockDecoder decoder;
- DecoderSettings decoder_settings;
- decoder_settings.pacing = test_params_.pacing_settings;
-
- VideoCodecTesterImpl tester;
- auto fs =
- tester.RunDecodeTest(&video_source, &decoder, decoder_settings)->Slice();
- ASSERT_EQ(static_cast<int>(fs.size()), test_params_.num_frames);
-
- for (size_t i = 1; i < fs.size(); ++i) {
- int delta_ms = (fs[i].decode_start - fs[i - 1].decode_start).ms();
- EXPECT_NEAR(delta_ms, test_params_.expected_delta_ms[i - 1], 20);
- }
-}
-
-INSTANTIATE_TEST_SUITE_P(
- DISABLED_All,
- VideoCodecTesterImplPacingTest,
- ::testing::ValuesIn(
- {// No pacing.
- PacingTestParams({.pacing_settings = {.mode = PacingMode::kNoPacing},
- .framerate = Frequency::Hertz(10),
- .num_frames = 3,
- .expected_delta_ms = {0, 0}}),
- // Real-time pacing.
- PacingTestParams({.pacing_settings = {.mode = PacingMode::kRealTime},
- .framerate = Frequency::Hertz(10),
- .num_frames = 3,
- .expected_delta_ms = {100, 100}}),
- // Pace with specified constant rate.
- PacingTestParams(
- {.pacing_settings = {.mode = PacingMode::kConstantRate,
- .constant_rate = Frequency::Hertz(20)},
- .framerate = Frequency::Hertz(10),
- .num_frames = 3,
- .expected_delta_ms = {50, 50}})}));
-} // namespace test
-} // namespace webrtc