diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 09:22:09 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-07 09:22:09 +0000 |
commit | 43a97878ce14b72f0981164f87f2e35e14151312 (patch) | |
tree | 620249daf56c0258faa40cbdcf9cfba06de2a846 /third_party/libwebrtc/common_video/libyuv | |
parent | Initial commit. (diff) | |
download | firefox-43a97878ce14b72f0981164f87f2e35e14151312.tar.xz firefox-43a97878ce14b72f0981164f87f2e35e14151312.zip |
Adding upstream version 110.0.1.upstream/110.0.1upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'third_party/libwebrtc/common_video/libyuv')
3 files changed, 1026 insertions, 0 deletions
diff --git a/third_party/libwebrtc/common_video/libyuv/include/webrtc_libyuv.h b/third_party/libwebrtc/common_video/libyuv/include/webrtc_libyuv.h new file mode 100644 index 0000000000..3c186929a7 --- /dev/null +++ b/third_party/libwebrtc/common_video/libyuv/include/webrtc_libyuv.h @@ -0,0 +1,168 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +/* + * WebRTC's wrapper to libyuv. + */ + +#ifndef COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_ +#define COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_ + +#include <stdint.h> +#include <stdio.h> + +#include <vector> + +#include "api/scoped_refptr.h" +#include "api/video/video_frame.h" +#include "api/video/video_frame_buffer.h" +#include "rtc_base/system/rtc_export.h" + +namespace webrtc { + +enum class VideoType { + kUnknown, + kI420, + kIYUV, + kRGB24, + kABGR, + kARGB, + kARGB4444, + kRGB565, + kARGB1555, + kYUY2, + kYV12, + kUYVY, + kMJPEG, + kNV21, + kNV12, + kBGRA, +}; + +// This is the max PSNR value our algorithms can return. +const double kPerfectPSNR = 48.0f; + +// Calculate the required buffer size. +// Input: +// - type :The type of the designated video frame. +// - width :frame width in pixels. +// - height :frame height in pixels. +// Return value: :The required size in bytes to accommodate the specified +// video frame. +size_t CalcBufferSize(VideoType type, int width, int height); + +// Extract buffer from VideoFrame or I420BufferInterface (consecutive +// planes, no stride) +// Input: +// - frame : Reference to video frame. +// - size : pointer to the size of the allocated buffer. If size is +// insufficient, an error will be returned. +// - buffer : Pointer to buffer +// Return value: length of buffer if OK, < 0 otherwise. +int ExtractBuffer(const rtc::scoped_refptr<I420BufferInterface>& input_frame, + size_t size, + uint8_t* buffer); +int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer); +// Convert From I420 +// Input: +// - src_frame : Reference to a source frame. +// - dst_video_type : Type of output video. +// - dst_sample_size : Required only for the parsing of MJPG. +// - dst_frame : Pointer to a destination frame. +// Return value: 0 if OK, < 0 otherwise. +// It is assumed that source and destination have equal height. +int ConvertFromI420(const VideoFrame& src_frame, + VideoType dst_video_type, + int dst_sample_size, + uint8_t* dst_frame); + +rtc::scoped_refptr<I420BufferInterface> ScaleVideoFrameBuffer( + const I420BufferInterface& source, + int dst_width, + int dst_height); + +double I420SSE(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer); + +// Compute PSNR for an I420 frame (all planes). +// Returns the PSNR in decibel, to a maximum of kPerfectPSNR. +double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame); +double I420PSNR(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer); + +// Computes the weighted PSNR-YUV for an I420 buffer. +// +// For the definition and motivation, see +// J. Ohm, G. J. Sullivan, H. Schwarz, T. K. Tan and T. Wiegand, +// "Comparison of the Coding Efficiency of Video Coding Standards—Including +// High Efficiency Video Coding (HEVC)," in IEEE Transactions on Circuits and +// Systems for Video Technology, vol. 22, no. 12, pp. 1669-1684, Dec. 2012 +// doi: 10.1109/TCSVT.2012.2221192. +// +// Returns the PSNR-YUV in decibel, to a maximum of kPerfectPSNR. +double I420WeightedPSNR(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer); + +// Compute SSIM for an I420 frame (all planes). +double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame); +double I420SSIM(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer); + +// Helper function for scaling NV12 to NV12. +// If the `src_width` and `src_height` matches the `dst_width` and `dst_height`, +// then `tmp_buffer` is not used. In other cases, the minimum size of +// `tmp_buffer` should be: +// (src_width/2) * (src_height/2) * 2 + (dst_width/2) * (dst_height/2) * 2 +void NV12Scale(uint8_t* tmp_buffer, + const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int dst_width, + int dst_height); + +// Helper class for directly converting and scaling NV12 to I420. The Y-plane +// will be scaled directly to the I420 destination, which makes this faster +// than separate NV12->I420 + I420->I420 scaling. +class RTC_EXPORT NV12ToI420Scaler { + public: + NV12ToI420Scaler(); + ~NV12ToI420Scaler(); + void NV12ToI420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height); + + private: + std::vector<uint8_t> tmp_uv_planes_; +}; + +// Convert VideoType to libyuv FourCC type +int ConvertVideoType(VideoType video_type); + +} // namespace webrtc + +#endif // COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_ diff --git a/third_party/libwebrtc/common_video/libyuv/libyuv_unittest.cc b/third_party/libwebrtc/common_video/libyuv/libyuv_unittest.cc new file mode 100644 index 0000000000..f9c82f6284 --- /dev/null +++ b/third_party/libwebrtc/common_video/libyuv/libyuv_unittest.cc @@ -0,0 +1,386 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "third_party/libyuv/include/libyuv.h" + +#include <math.h> +#include <string.h> + +#include <memory> + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "test/frame_utils.h" +#include "test/gmock.h" +#include "test/gtest.h" +#include "test/testsupport/file_utils.h" + +namespace webrtc { + +namespace { +void Calc16ByteAlignedStride(int width, int* stride_y, int* stride_uv) { + *stride_y = 16 * ((width + 15) / 16); + *stride_uv = 16 * ((width + 31) / 32); +} + +int PrintPlane(const uint8_t* buf, + int width, + int height, + int stride, + FILE* file) { + for (int i = 0; i < height; i++, buf += stride) { + if (fwrite(buf, 1, width, file) != static_cast<unsigned int>(width)) + return -1; + } + return 0; +} + +int PrintVideoFrame(const I420BufferInterface& frame, FILE* file) { + int width = frame.width(); + int height = frame.height(); + int chroma_width = frame.ChromaWidth(); + int chroma_height = frame.ChromaHeight(); + + if (PrintPlane(frame.DataY(), width, height, frame.StrideY(), file) < 0) { + return -1; + } + if (PrintPlane(frame.DataU(), chroma_width, chroma_height, frame.StrideU(), + file) < 0) { + return -1; + } + if (PrintPlane(frame.DataV(), chroma_width, chroma_height, frame.StrideV(), + file) < 0) { + return -1; + } + return 0; +} + +} // Anonymous namespace + +class TestLibYuv : public ::testing::Test { + protected: + TestLibYuv(); + void SetUp() override; + void TearDown() override; + + FILE* source_file_; + std::unique_ptr<VideoFrame> orig_frame_; + const int width_; + const int height_; + const int size_y_; + const int size_uv_; + const size_t frame_length_; +}; + +TestLibYuv::TestLibYuv() + : source_file_(NULL), + orig_frame_(), + width_(352), + height_(288), + size_y_(width_ * height_), + size_uv_(((width_ + 1) / 2) * ((height_ + 1) / 2)), + frame_length_(CalcBufferSize(VideoType::kI420, 352, 288)) {} + +void TestLibYuv::SetUp() { + const std::string input_file_name = + webrtc::test::ResourcePath("foreman_cif", "yuv"); + source_file_ = fopen(input_file_name.c_str(), "rb"); + ASSERT_TRUE(source_file_ != NULL) + << "Cannot read file: " << input_file_name << "\n"; + + rtc::scoped_refptr<I420BufferInterface> buffer( + test::ReadI420Buffer(width_, height_, source_file_)); + + orig_frame_ = + std::make_unique<VideoFrame>(VideoFrame::Builder() + .set_video_frame_buffer(buffer) + .set_rotation(webrtc::kVideoRotation_0) + .set_timestamp_us(0) + .build()); +} + +void TestLibYuv::TearDown() { + if (source_file_ != NULL) { + ASSERT_EQ(0, fclose(source_file_)); + } + source_file_ = NULL; +} + +TEST_F(TestLibYuv, ConvertTest) { + // Reading YUV frame - testing on the first frame of the foreman sequence + int j = 0; + std::string output_file_name = + webrtc::test::OutputPath() + "LibYuvTest_conversion.yuv"; + FILE* output_file = fopen(output_file_name.c_str(), "wb"); + ASSERT_TRUE(output_file != NULL); + + double psnr = 0.0; + + rtc::scoped_refptr<I420Buffer> res_i420_buffer = + I420Buffer::Create(width_, height_); + + printf("\nConvert #%d I420 <-> I420 \n", j); + std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]); + EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kI420, 0, + out_i420_buffer.get())); + int y_size = width_ * height_; + int u_size = res_i420_buffer->ChromaWidth() * res_i420_buffer->ChromaHeight(); + int ret = libyuv::I420Copy( + out_i420_buffer.get(), width_, out_i420_buffer.get() + y_size, + width_ >> 1, out_i420_buffer.get() + y_size + u_size, width_ >> 1, + res_i420_buffer.get()->MutableDataY(), res_i420_buffer.get()->StrideY(), + res_i420_buffer.get()->MutableDataU(), res_i420_buffer.get()->StrideU(), + res_i420_buffer.get()->MutableDataV(), res_i420_buffer.get()->StrideV(), + width_, height_); + EXPECT_EQ(0, ret); + + if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { + return; + } + psnr = + I420PSNR(*orig_frame_->video_frame_buffer()->GetI420(), *res_i420_buffer); + EXPECT_EQ(48.0, psnr); + j++; + + printf("\nConvert #%d I420 <-> RGB24\n", j); + std::unique_ptr<uint8_t[]> res_rgb_buffer2(new uint8_t[width_ * height_ * 3]); + // Align the stride values for the output frame. + int stride_y = 0; + int stride_uv = 0; + Calc16ByteAlignedStride(width_, &stride_y, &stride_uv); + res_i420_buffer = + I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv); + EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kRGB24, 0, + res_rgb_buffer2.get())); + + ret = libyuv::ConvertToI420( + res_rgb_buffer2.get(), 0, res_i420_buffer.get()->MutableDataY(), + res_i420_buffer.get()->StrideY(), res_i420_buffer.get()->MutableDataU(), + res_i420_buffer.get()->StrideU(), res_i420_buffer.get()->MutableDataV(), + res_i420_buffer.get()->StrideV(), 0, 0, width_, height_, + res_i420_buffer->width(), res_i420_buffer->height(), libyuv::kRotate0, + ConvertVideoType(VideoType::kRGB24)); + + EXPECT_EQ(0, ret); + if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { + return; + } + psnr = + I420PSNR(*orig_frame_->video_frame_buffer()->GetI420(), *res_i420_buffer); + + // Optimization Speed- quality trade-off => 45 dB only (platform dependant). + EXPECT_GT(ceil(psnr), 44); + j++; + + printf("\nConvert #%d I420 <-> UYVY\n", j); + std::unique_ptr<uint8_t[]> out_uyvy_buffer(new uint8_t[width_ * height_ * 2]); + EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kUYVY, 0, + out_uyvy_buffer.get())); + + ret = libyuv::ConvertToI420( + out_uyvy_buffer.get(), 0, res_i420_buffer.get()->MutableDataY(), + res_i420_buffer.get()->StrideY(), res_i420_buffer.get()->MutableDataU(), + res_i420_buffer.get()->StrideU(), res_i420_buffer.get()->MutableDataV(), + res_i420_buffer.get()->StrideV(), 0, 0, width_, height_, + res_i420_buffer->width(), res_i420_buffer->height(), libyuv::kRotate0, + ConvertVideoType(VideoType::kUYVY)); + + EXPECT_EQ(0, ret); + psnr = + I420PSNR(*orig_frame_->video_frame_buffer()->GetI420(), *res_i420_buffer); + EXPECT_EQ(48.0, psnr); + if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { + return; + } + j++; + + printf("\nConvert #%d I420 <-> YUY2\n", j); + std::unique_ptr<uint8_t[]> out_yuy2_buffer(new uint8_t[width_ * height_ * 2]); + EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kYUY2, 0, + out_yuy2_buffer.get())); + + ret = libyuv::ConvertToI420( + out_yuy2_buffer.get(), 0, res_i420_buffer.get()->MutableDataY(), + res_i420_buffer.get()->StrideY(), res_i420_buffer.get()->MutableDataU(), + res_i420_buffer.get()->StrideU(), res_i420_buffer.get()->MutableDataV(), + res_i420_buffer.get()->StrideV(), 0, 0, width_, height_, + res_i420_buffer->width(), res_i420_buffer->height(), libyuv::kRotate0, + ConvertVideoType(VideoType::kYUY2)); + + EXPECT_EQ(0, ret); + + if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { + return; + } + + psnr = + I420PSNR(*orig_frame_->video_frame_buffer()->GetI420(), *res_i420_buffer); + EXPECT_EQ(48.0, psnr); + + printf("\nConvert #%d I420 <-> RGB565\n", j); + std::unique_ptr<uint8_t[]> out_rgb565_buffer( + new uint8_t[width_ * height_ * 2]); + EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kRGB565, 0, + out_rgb565_buffer.get())); + + ret = libyuv::ConvertToI420( + out_rgb565_buffer.get(), 0, res_i420_buffer.get()->MutableDataY(), + res_i420_buffer.get()->StrideY(), res_i420_buffer.get()->MutableDataU(), + res_i420_buffer.get()->StrideU(), res_i420_buffer.get()->MutableDataV(), + res_i420_buffer.get()->StrideV(), 0, 0, width_, height_, + res_i420_buffer->width(), res_i420_buffer->height(), libyuv::kRotate0, + ConvertVideoType(VideoType::kRGB565)); + + EXPECT_EQ(0, ret); + if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { + return; + } + j++; + + psnr = + I420PSNR(*orig_frame_->video_frame_buffer()->GetI420(), *res_i420_buffer); + // TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565, + // Another example is I420ToRGB24, the psnr is 44 + // TODO(mikhal): Add psnr for RGB565, 1555, 4444, convert to ARGB. + EXPECT_GT(ceil(psnr), 40); + + printf("\nConvert #%d I420 <-> ARGB8888\n", j); + std::unique_ptr<uint8_t[]> out_argb8888_buffer( + new uint8_t[width_ * height_ * 4]); + EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kARGB, 0, + out_argb8888_buffer.get())); + + ret = libyuv::ConvertToI420( + out_argb8888_buffer.get(), 0, res_i420_buffer.get()->MutableDataY(), + res_i420_buffer.get()->StrideY(), res_i420_buffer.get()->MutableDataU(), + res_i420_buffer.get()->StrideU(), res_i420_buffer.get()->MutableDataV(), + res_i420_buffer.get()->StrideV(), 0, 0, width_, height_, + res_i420_buffer->width(), res_i420_buffer->height(), libyuv::kRotate0, + ConvertVideoType(VideoType::kARGB)); + + EXPECT_EQ(0, ret); + + if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { + return; + } + + psnr = + I420PSNR(*orig_frame_->video_frame_buffer()->GetI420(), *res_i420_buffer); + // TODO(leozwang) Investigate the right psnr should be set for + // I420ToARGB8888, + EXPECT_GT(ceil(psnr), 42); + + ASSERT_EQ(0, fclose(output_file)); +} + +TEST_F(TestLibYuv, ConvertAlignedFrame) { + // Reading YUV frame - testing on the first frame of the foreman sequence + std::string output_file_name = + webrtc::test::OutputPath() + "LibYuvTest_conversion.yuv"; + FILE* output_file = fopen(output_file_name.c_str(), "wb"); + ASSERT_TRUE(output_file != NULL); + + double psnr = 0.0; + + int stride_y = 0; + int stride_uv = 0; + Calc16ByteAlignedStride(width_, &stride_y, &stride_uv); + + rtc::scoped_refptr<I420Buffer> res_i420_buffer = + I420Buffer::Create(width_, height_, stride_y, stride_uv, stride_uv); + std::unique_ptr<uint8_t[]> out_i420_buffer(new uint8_t[frame_length_]); + EXPECT_EQ(0, ConvertFromI420(*orig_frame_, VideoType::kI420, 0, + out_i420_buffer.get())); + int y_size = width_ * height_; + int u_size = res_i420_buffer->ChromaWidth() * res_i420_buffer->ChromaHeight(); + int ret = libyuv::I420Copy( + out_i420_buffer.get(), width_, out_i420_buffer.get() + y_size, + width_ >> 1, out_i420_buffer.get() + y_size + u_size, width_ >> 1, + res_i420_buffer.get()->MutableDataY(), res_i420_buffer.get()->StrideY(), + res_i420_buffer.get()->MutableDataU(), res_i420_buffer.get()->StrideU(), + res_i420_buffer.get()->MutableDataV(), res_i420_buffer.get()->StrideV(), + width_, height_); + + EXPECT_EQ(0, ret); + + if (PrintVideoFrame(*res_i420_buffer, output_file) < 0) { + return; + } + psnr = + I420PSNR(*orig_frame_->video_frame_buffer()->GetI420(), *res_i420_buffer); + EXPECT_EQ(48.0, psnr); +} + +static uint8_t Average(int a, int b, int c, int d) { + return (a + b + c + d + 2) / 4; +} + +TEST_F(TestLibYuv, NV12Scale2x2to2x2) { + const std::vector<uint8_t> src_y = {0, 1, 2, 3}; + const std::vector<uint8_t> src_uv = {0, 1}; + std::vector<uint8_t> dst_y(4); + std::vector<uint8_t> dst_uv(2); + + uint8_t* tmp_buffer = nullptr; + + NV12Scale(tmp_buffer, src_y.data(), 2, src_uv.data(), 2, 2, 2, dst_y.data(), + 2, dst_uv.data(), 2, 2, 2); + + EXPECT_THAT(dst_y, ::testing::ContainerEq(src_y)); + EXPECT_THAT(dst_uv, ::testing::ContainerEq(src_uv)); +} + +TEST_F(TestLibYuv, NV12Scale4x4to2x2) { + const uint8_t src_y[] = {0, 1, 2, 3, 4, 5, 6, 7, + 8, 9, 10, 11, 12, 13, 14, 15}; + const uint8_t src_uv[] = {0, 1, 2, 3, 4, 5, 6, 7}; + std::vector<uint8_t> dst_y(4); + std::vector<uint8_t> dst_uv(2); + + std::vector<uint8_t> tmp_buffer; + const int src_chroma_width = (4 + 1) / 2; + const int src_chroma_height = (4 + 1) / 2; + const int dst_chroma_width = (2 + 1) / 2; + const int dst_chroma_height = (2 + 1) / 2; + tmp_buffer.resize(src_chroma_width * src_chroma_height * 2 + + dst_chroma_width * dst_chroma_height * 2); + tmp_buffer.shrink_to_fit(); + + NV12Scale(tmp_buffer.data(), src_y, 4, src_uv, 4, 4, 4, dst_y.data(), 2, + dst_uv.data(), 2, 2, 2); + + EXPECT_THAT(dst_y, ::testing::ElementsAre( + Average(0, 1, 4, 5), Average(2, 3, 6, 7), + Average(8, 9, 12, 13), Average(10, 11, 14, 15))); + EXPECT_THAT(dst_uv, + ::testing::ElementsAre(Average(0, 2, 4, 6), Average(1, 3, 5, 7))); +} + +TEST(I420WeightedPSNRTest, SmokeTest) { + uint8_t ref_y[] = {0, 0, 0, 0}; + uint8_t ref_uv[] = {0}; + rtc::scoped_refptr<I420Buffer> ref_buffer = + I420Buffer::Copy(/*width=*/2, /*height=*/2, ref_y, /*stride_y=*/2, ref_uv, + /*stride_u=*/1, ref_uv, /*stride_v=*/1); + + uint8_t test_y[] = {1, 1, 1, 1}; + uint8_t test_uv[] = {2}; + rtc::scoped_refptr<I420Buffer> test_buffer = I420Buffer::Copy( + /*width=*/2, /*height=*/2, test_y, /*stride_y=*/2, test_uv, + /*stride_u=*/1, test_uv, /*stride_v=*/1); + + auto psnr = [](double mse) { return 10.0 * log10(255.0 * 255.0 / mse); }; + EXPECT_NEAR(I420WeightedPSNR(*ref_buffer, *test_buffer), + (6.0 * psnr(1.0) + psnr(4.0) + psnr(4.0)) / 8.0, + /*abs_error=*/0.001); +} + +} // namespace webrtc diff --git a/third_party/libwebrtc/common_video/libyuv/webrtc_libyuv.cc b/third_party/libwebrtc/common_video/libyuv/webrtc_libyuv.cc new file mode 100644 index 0000000000..145237413e --- /dev/null +++ b/third_party/libwebrtc/common_video/libyuv/webrtc_libyuv.cc @@ -0,0 +1,472 @@ +/* + * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "common_video/libyuv/include/webrtc_libyuv.h" +#include "libyuv/planar_functions.h" + +#include <cstdint> + +#include "api/video/i420_buffer.h" +#include "common_video/include/video_frame_buffer.h" +#include "rtc_base/checks.h" +#include "libyuv/include/libyuv.h" + +namespace webrtc { + +size_t CalcBufferSize(VideoType type, int width, int height) { + RTC_DCHECK_GE(width, 0); + RTC_DCHECK_GE(height, 0); + + size_t buffer_size = 0; + switch (type) { + case VideoType::kI420: + case VideoType::kNV12: + case VideoType::kNV21: + case VideoType::kIYUV: + case VideoType::kYV12: { + int half_width = (width + 1) >> 1; + int half_height = (height + 1) >> 1; + buffer_size = width * height + half_width * half_height * 2; + break; + } + case VideoType::kARGB4444: + case VideoType::kRGB565: + case VideoType::kARGB1555: + case VideoType::kYUY2: + case VideoType::kUYVY: + buffer_size = width * height * 2; + break; + case VideoType::kRGB24: + buffer_size = width * height * 3; + break; + case VideoType::kBGRA: + case VideoType::kARGB: + buffer_size = width * height * 4; + break; + default: + RTC_DCHECK_NOTREACHED(); + break; + } + return buffer_size; +} + +int ExtractBuffer(const rtc::scoped_refptr<I420BufferInterface>& input_frame, + size_t size, + uint8_t* buffer) { + RTC_DCHECK(buffer); + if (!input_frame) + return -1; + int width = input_frame->width(); + int height = input_frame->height(); + size_t length = CalcBufferSize(VideoType::kI420, width, height); + if (size < length) { + return -1; + } + + int chroma_width = input_frame->ChromaWidth(); + int chroma_height = input_frame->ChromaHeight(); + + libyuv::I420Copy(input_frame->DataY(), input_frame->StrideY(), + input_frame->DataU(), input_frame->StrideU(), + input_frame->DataV(), input_frame->StrideV(), buffer, width, + buffer + width * height, chroma_width, + buffer + width * height + chroma_width * chroma_height, + chroma_width, width, height); + + return static_cast<int>(length); +} + +int ExtractBuffer(const VideoFrame& input_frame, size_t size, uint8_t* buffer) { + return ExtractBuffer(input_frame.video_frame_buffer()->ToI420(), size, + buffer); +} + +int ConvertVideoType(VideoType video_type) { + switch (video_type) { + case VideoType::kUnknown: + return libyuv::FOURCC_ANY; + case VideoType::kI420: + return libyuv::FOURCC_I420; + case VideoType::kIYUV: // same as VideoType::kYV12 + case VideoType::kYV12: + return libyuv::FOURCC_YV12; + case VideoType::kRGB24: + return libyuv::FOURCC_24BG; + case VideoType::kABGR: + return libyuv::FOURCC_ABGR; + case VideoType::kRGB565: + return libyuv::FOURCC_RGBP; + case VideoType::kYUY2: + return libyuv::FOURCC_YUY2; + case VideoType::kUYVY: + return libyuv::FOURCC_UYVY; + case VideoType::kMJPEG: + return libyuv::FOURCC_MJPG; + case VideoType::kNV21: + return libyuv::FOURCC_NV21; + case VideoType::kNV12: + return libyuv::FOURCC_NV12; + case VideoType::kARGB: + return libyuv::FOURCC_ARGB; + case VideoType::kBGRA: + return libyuv::FOURCC_BGRA; + case VideoType::kARGB4444: + return libyuv::FOURCC_R444; + case VideoType::kARGB1555: + return libyuv::FOURCC_RGBO; + } + RTC_DCHECK_NOTREACHED(); + return libyuv::FOURCC_ANY; +} + +int ConvertFromI420(const VideoFrame& src_frame, + VideoType dst_video_type, + int dst_sample_size, + uint8_t* dst_frame) { + rtc::scoped_refptr<I420BufferInterface> i420_buffer = + src_frame.video_frame_buffer()->ToI420(); + return libyuv::ConvertFromI420( + i420_buffer->DataY(), i420_buffer->StrideY(), i420_buffer->DataU(), + i420_buffer->StrideU(), i420_buffer->DataV(), i420_buffer->StrideV(), + dst_frame, dst_sample_size, src_frame.width(), src_frame.height(), + ConvertVideoType(dst_video_type)); +} + +rtc::scoped_refptr<I420ABufferInterface> ScaleI420ABuffer( + const I420ABufferInterface& buffer, + int target_width, + int target_height) { + rtc::scoped_refptr<I420Buffer> yuv_buffer = + I420Buffer::Create(target_width, target_height); + yuv_buffer->ScaleFrom(buffer); + rtc::scoped_refptr<I420Buffer> axx_buffer = + I420Buffer::Create(target_width, target_height); + libyuv::ScalePlane(buffer.DataA(), buffer.StrideA(), buffer.width(), + buffer.height(), axx_buffer->MutableDataY(), + axx_buffer->StrideY(), target_width, target_height, + libyuv::kFilterBox); + rtc::scoped_refptr<I420ABufferInterface> merged_buffer = WrapI420ABuffer( + yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(), + yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(), + yuv_buffer->DataV(), yuv_buffer->StrideV(), axx_buffer->DataY(), + axx_buffer->StrideY(), + // To keep references alive. + [yuv_buffer, axx_buffer] {}); + return merged_buffer; +} + +rtc::scoped_refptr<I420BufferInterface> ScaleVideoFrameBuffer( + const I420BufferInterface& source, + int dst_width, + int dst_height) { + rtc::scoped_refptr<I420Buffer> scaled_buffer = + I420Buffer::Create(dst_width, dst_height); + scaled_buffer->ScaleFrom(source); + return scaled_buffer; +} + +double I420SSE(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer) { + RTC_DCHECK_EQ(ref_buffer.width(), test_buffer.width()); + RTC_DCHECK_EQ(ref_buffer.height(), test_buffer.height()); + const uint64_t width = test_buffer.width(); + const uint64_t height = test_buffer.height(); + const uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataY(), ref_buffer.StrideY(), test_buffer.DataY(), + test_buffer.StrideY(), width, height); + const int width_uv = (width + 1) >> 1; + const int height_uv = (height + 1) >> 1; + const uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataU(), ref_buffer.StrideU(), test_buffer.DataU(), + test_buffer.StrideU(), width_uv, height_uv); + const uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataV(), ref_buffer.StrideV(), test_buffer.DataV(), + test_buffer.StrideV(), width_uv, height_uv); + const double samples = width * height + 2 * (width_uv * height_uv); + const double sse = sse_y + sse_u + sse_v; + return sse / (samples * 255.0 * 255.0); +} + +// Compute PSNR for an I420A frame (all planes). Can upscale test frame. +double I420APSNR(const I420ABufferInterface& ref_buffer, + const I420ABufferInterface& test_buffer) { + RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width()); + RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); + if ((ref_buffer.width() != test_buffer.width()) || + (ref_buffer.height() != test_buffer.height())) { + rtc::scoped_refptr<I420ABufferInterface> scaled_buffer = + ScaleI420ABuffer(test_buffer, ref_buffer.width(), ref_buffer.height()); + return I420APSNR(ref_buffer, *scaled_buffer); + } + const int width = test_buffer.width(); + const int height = test_buffer.height(); + const uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataY(), ref_buffer.StrideY(), test_buffer.DataY(), + test_buffer.StrideY(), width, height); + const int width_uv = (width + 1) >> 1; + const int height_uv = (height + 1) >> 1; + const uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataU(), ref_buffer.StrideU(), test_buffer.DataU(), + test_buffer.StrideU(), width_uv, height_uv); + const uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataV(), ref_buffer.StrideV(), test_buffer.DataV(), + test_buffer.StrideV(), width_uv, height_uv); + const uint64_t sse_a = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataA(), ref_buffer.StrideA(), test_buffer.DataA(), + test_buffer.StrideA(), width, height); + const uint64_t samples = 2 * (uint64_t)width * (uint64_t)height + + 2 * ((uint64_t)width_uv * (uint64_t)height_uv); + const uint64_t sse = sse_y + sse_u + sse_v + sse_a; + const double psnr = libyuv::SumSquareErrorToPsnr(sse, samples); + return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr; +} + +// Compute PSNR for an I420A frame (all planes) +double I420APSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) { + if (!ref_frame || !test_frame) + return -1; + RTC_DCHECK(ref_frame->video_frame_buffer()->type() == + VideoFrameBuffer::Type::kI420A); + RTC_DCHECK(test_frame->video_frame_buffer()->type() == + VideoFrameBuffer::Type::kI420A); + return I420APSNR(*ref_frame->video_frame_buffer()->GetI420A(), + *test_frame->video_frame_buffer()->GetI420A()); +} + +// Compute PSNR for an I420 frame (all planes). Can upscale test frame. +double I420PSNR(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer) { + RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width()); + RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); + if ((ref_buffer.width() != test_buffer.width()) || + (ref_buffer.height() != test_buffer.height())) { + rtc::scoped_refptr<I420Buffer> scaled_buffer = + I420Buffer::Create(ref_buffer.width(), ref_buffer.height()); + scaled_buffer->ScaleFrom(test_buffer); + return I420PSNR(ref_buffer, *scaled_buffer); + } + double psnr = libyuv::I420Psnr( + ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(), + ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(), + test_buffer.DataY(), test_buffer.StrideY(), test_buffer.DataU(), + test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(), + test_buffer.width(), test_buffer.height()); + // LibYuv sets the max psnr value to 128, we restrict it here. + // In case of 0 mse in one frame, 128 can skew the results significantly. + return (psnr > kPerfectPSNR) ? kPerfectPSNR : psnr; +} + +// Compute PSNR for an I420 frame (all planes) +double I420PSNR(const VideoFrame* ref_frame, const VideoFrame* test_frame) { + if (!ref_frame || !test_frame) + return -1; + return I420PSNR(*ref_frame->video_frame_buffer()->ToI420(), + *test_frame->video_frame_buffer()->ToI420()); +} + +double I420WeightedPSNR(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer) { + RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width()); + RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); + if ((ref_buffer.width() != test_buffer.width()) || + (ref_buffer.height() != test_buffer.height())) { + rtc::scoped_refptr<I420Buffer> scaled_ref_buffer = + I420Buffer::Create(test_buffer.width(), test_buffer.height()); + scaled_ref_buffer->ScaleFrom(ref_buffer); + return I420WeightedPSNR(*scaled_ref_buffer, test_buffer); + } + + // Luma. + int width_y = test_buffer.width(); + int height_y = test_buffer.height(); + uint64_t sse_y = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataY(), ref_buffer.StrideY(), test_buffer.DataY(), + test_buffer.StrideY(), width_y, height_y); + uint64_t num_samples_y = (uint64_t)width_y * (uint64_t)height_y; + double psnr_y = libyuv::SumSquareErrorToPsnr(sse_y, num_samples_y); + + // Chroma. + int width_uv = (width_y + 1) >> 1; + int height_uv = (height_y + 1) >> 1; + uint64_t sse_u = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataU(), ref_buffer.StrideU(), test_buffer.DataU(), + test_buffer.StrideU(), width_uv, height_uv); + uint64_t num_samples_uv = (uint64_t)width_uv * (uint64_t)height_uv; + double psnr_u = libyuv::SumSquareErrorToPsnr(sse_u, num_samples_uv); + uint64_t sse_v = libyuv::ComputeSumSquareErrorPlane( + ref_buffer.DataV(), ref_buffer.StrideV(), test_buffer.DataV(), + test_buffer.StrideV(), width_uv, height_uv); + double psnr_v = libyuv::SumSquareErrorToPsnr(sse_v, num_samples_uv); + + // Weights from Ohm et. al 2012. + double psnr_yuv = (6.0 * psnr_y + psnr_u + psnr_v) / 8.0; + return (psnr_yuv > kPerfectPSNR) ? kPerfectPSNR : psnr_yuv; +} + +// Compute SSIM for an I420A frame (all planes). Can upscale test frame. +double I420ASSIM(const I420ABufferInterface& ref_buffer, + const I420ABufferInterface& test_buffer) { + RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width()); + RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); + if ((ref_buffer.width() != test_buffer.width()) || + (ref_buffer.height() != test_buffer.height())) { + rtc::scoped_refptr<I420ABufferInterface> scaled_buffer = + ScaleI420ABuffer(test_buffer, ref_buffer.width(), ref_buffer.height()); + return I420ASSIM(ref_buffer, *scaled_buffer); + } + const double yuv_ssim = libyuv::I420Ssim( + ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(), + ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(), + test_buffer.DataY(), test_buffer.StrideY(), test_buffer.DataU(), + test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(), + test_buffer.width(), test_buffer.height()); + const double a_ssim = libyuv::CalcFrameSsim( + ref_buffer.DataA(), ref_buffer.StrideA(), test_buffer.DataA(), + test_buffer.StrideA(), test_buffer.width(), test_buffer.height()); + return (yuv_ssim + (a_ssim * 0.8)) / 1.8; +} + +// Compute SSIM for an I420A frame (all planes) +double I420ASSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) { + if (!ref_frame || !test_frame) + return -1; + RTC_DCHECK(ref_frame->video_frame_buffer()->type() == + VideoFrameBuffer::Type::kI420A); + RTC_DCHECK(test_frame->video_frame_buffer()->type() == + VideoFrameBuffer::Type::kI420A); + return I420ASSIM(*ref_frame->video_frame_buffer()->GetI420A(), + *test_frame->video_frame_buffer()->GetI420A()); +} + +// Compute SSIM for an I420 frame (all planes). Can upscale test_buffer. +double I420SSIM(const I420BufferInterface& ref_buffer, + const I420BufferInterface& test_buffer) { + RTC_DCHECK_GE(ref_buffer.width(), test_buffer.width()); + RTC_DCHECK_GE(ref_buffer.height(), test_buffer.height()); + if ((ref_buffer.width() != test_buffer.width()) || + (ref_buffer.height() != test_buffer.height())) { + rtc::scoped_refptr<I420Buffer> scaled_buffer = + I420Buffer::Create(ref_buffer.width(), ref_buffer.height()); + scaled_buffer->ScaleFrom(test_buffer); + return I420SSIM(ref_buffer, *scaled_buffer); + } + return libyuv::I420Ssim( + ref_buffer.DataY(), ref_buffer.StrideY(), ref_buffer.DataU(), + ref_buffer.StrideU(), ref_buffer.DataV(), ref_buffer.StrideV(), + test_buffer.DataY(), test_buffer.StrideY(), test_buffer.DataU(), + test_buffer.StrideU(), test_buffer.DataV(), test_buffer.StrideV(), + test_buffer.width(), test_buffer.height()); +} + +double I420SSIM(const VideoFrame* ref_frame, const VideoFrame* test_frame) { + if (!ref_frame || !test_frame) + return -1; + return I420SSIM(*ref_frame->video_frame_buffer()->ToI420(), + *test_frame->video_frame_buffer()->ToI420()); +} + +void NV12Scale(uint8_t* tmp_buffer, + const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int dst_width, + int dst_height) { + const int src_chroma_width = (src_width + 1) / 2; + const int src_chroma_height = (src_height + 1) / 2; + + if (src_width == dst_width && src_height == dst_height) { + // No scaling. + libyuv::CopyPlane(src_y, src_stride_y, dst_y, dst_stride_y, src_width, + src_height); + libyuv::CopyPlane(src_uv, src_stride_uv, dst_uv, dst_stride_uv, + src_chroma_width * 2, src_chroma_height); + return; + } + + // Scaling. + // Allocate temporary memory for spitting UV planes and scaling them. + const int dst_chroma_width = (dst_width + 1) / 2; + const int dst_chroma_height = (dst_height + 1) / 2; + + uint8_t* const src_u = tmp_buffer; + uint8_t* const src_v = src_u + src_chroma_width * src_chroma_height; + uint8_t* const dst_u = src_v + src_chroma_width * src_chroma_height; + uint8_t* const dst_v = dst_u + dst_chroma_width * dst_chroma_height; + + // Split source UV plane into separate U and V plane using the temporary data. + libyuv::SplitUVPlane(src_uv, src_stride_uv, src_u, src_chroma_width, src_v, + src_chroma_width, src_chroma_width, src_chroma_height); + + // Scale the planes. + libyuv::I420Scale( + src_y, src_stride_y, src_u, src_chroma_width, src_v, src_chroma_width, + src_width, src_height, dst_y, dst_stride_y, dst_u, dst_chroma_width, + dst_v, dst_chroma_width, dst_width, dst_height, libyuv::kFilterBox); + + // Merge the UV planes into the destination. + libyuv::MergeUVPlane(dst_u, dst_chroma_width, dst_v, dst_chroma_width, dst_uv, + dst_stride_uv, dst_chroma_width, dst_chroma_height); +} + +NV12ToI420Scaler::NV12ToI420Scaler() = default; +NV12ToI420Scaler::~NV12ToI420Scaler() = default; + +void NV12ToI420Scaler::NV12ToI420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height) { + if (src_width == dst_width && src_height == dst_height) { + // No scaling. + tmp_uv_planes_.clear(); + tmp_uv_planes_.shrink_to_fit(); + libyuv::NV12ToI420(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, dst_stride_v, + src_width, src_height); + return; + } + + // Scaling. + // Allocate temporary memory for spitting UV planes. + const int src_uv_width = (src_width + 1) / 2; + const int src_uv_height = (src_height + 1) / 2; + tmp_uv_planes_.resize(src_uv_width * src_uv_height * 2); + tmp_uv_planes_.shrink_to_fit(); + + // Split source UV plane into separate U and V plane using the temporary data. + uint8_t* const src_u = tmp_uv_planes_.data(); + uint8_t* const src_v = tmp_uv_planes_.data() + src_uv_width * src_uv_height; + libyuv::SplitUVPlane(src_uv, src_stride_uv, src_u, src_uv_width, src_v, + src_uv_width, src_uv_width, src_uv_height); + + // Scale the planes into the destination. + libyuv::I420Scale(src_y, src_stride_y, src_u, src_uv_width, src_v, + src_uv_width, src_width, src_height, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, dst_width, + dst_height, libyuv::kFilterBox); +} + +} // namespace webrtc |