1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
|
/*
* Copyright (c) 2022 The WebRTC project authors. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license
* that can be found in the LICENSE file in the root of the source
* tree. An additional intellectual property rights grant can be found
* in the file PATENTS. All contributing project authors may
* be found in the AUTHORS file in the root of the source tree.
*/
#include "modules/video_coding/codecs/test/video_codec_analyzer.h"
#include "absl/types/optional.h"
#include "api/video/i420_buffer.h"
#include "test/gmock.h"
#include "test/gtest.h"
#include "third_party/libyuv/include/libyuv/planar_functions.h"
namespace webrtc {
namespace test {
namespace {
using ::testing::Return;
using ::testing::Values;
using Psnr = VideoCodecStats::Frame::Psnr;
const uint32_t kTimestamp = 3000;
const int kSpatialIdx = 2;
class MockReferenceVideoSource
: public VideoCodecAnalyzer::ReferenceVideoSource {
public:
MOCK_METHOD(VideoFrame, GetFrame, (uint32_t, Resolution), (override));
};
VideoFrame CreateVideoFrame(uint32_t timestamp_rtp,
uint8_t y = 0,
uint8_t u = 0,
uint8_t v = 0) {
rtc::scoped_refptr<I420Buffer> buffer(I420Buffer::Create(2, 2));
libyuv::I420Rect(buffer->MutableDataY(), buffer->StrideY(),
buffer->MutableDataU(), buffer->StrideU(),
buffer->MutableDataV(), buffer->StrideV(), 0, 0,
buffer->width(), buffer->height(), y, u, v);
return VideoFrame::Builder()
.set_video_frame_buffer(buffer)
.set_timestamp_rtp(timestamp_rtp)
.build();
}
EncodedImage CreateEncodedImage(uint32_t timestamp_rtp, int spatial_idx = 0) {
EncodedImage encoded_image;
encoded_image.SetRtpTimestamp(timestamp_rtp);
encoded_image.SetSpatialIndex(spatial_idx);
return encoded_image;
}
} // namespace
TEST(VideoCodecAnalyzerTest, StartEncode) {
VideoCodecAnalyzer analyzer;
analyzer.StartEncode(CreateVideoFrame(kTimestamp));
auto fs = analyzer.GetStats()->Slice();
EXPECT_EQ(1u, fs.size());
EXPECT_EQ(fs[0].timestamp_rtp, kTimestamp);
}
TEST(VideoCodecAnalyzerTest, FinishEncode) {
VideoCodecAnalyzer analyzer;
analyzer.StartEncode(CreateVideoFrame(kTimestamp));
EncodedImage encoded_frame = CreateEncodedImage(kTimestamp, kSpatialIdx);
analyzer.FinishEncode(encoded_frame);
auto fs = analyzer.GetStats()->Slice();
EXPECT_EQ(2u, fs.size());
EXPECT_EQ(kSpatialIdx, fs[1].spatial_idx);
}
TEST(VideoCodecAnalyzerTest, StartDecode) {
VideoCodecAnalyzer analyzer;
analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
auto fs = analyzer.GetStats()->Slice();
EXPECT_EQ(1u, fs.size());
EXPECT_EQ(kTimestamp, fs[0].timestamp_rtp);
}
TEST(VideoCodecAnalyzerTest, FinishDecode) {
VideoCodecAnalyzer analyzer;
analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
VideoFrame decoded_frame = CreateVideoFrame(kTimestamp);
analyzer.FinishDecode(decoded_frame, kSpatialIdx);
auto fs = analyzer.GetStats()->Slice();
EXPECT_EQ(1u, fs.size());
EXPECT_EQ(decoded_frame.width(), fs[0].width);
EXPECT_EQ(decoded_frame.height(), fs[0].height);
}
TEST(VideoCodecAnalyzerTest, ReferenceVideoSource) {
MockReferenceVideoSource reference_video_source;
VideoCodecAnalyzer analyzer(&reference_video_source);
analyzer.StartDecode(CreateEncodedImage(kTimestamp, kSpatialIdx));
EXPECT_CALL(reference_video_source, GetFrame)
.WillOnce(Return(CreateVideoFrame(kTimestamp, /*y=*/0,
/*u=*/0, /*v=*/0)));
analyzer.FinishDecode(
CreateVideoFrame(kTimestamp, /*value_y=*/1, /*value_u=*/2, /*value_v=*/3),
kSpatialIdx);
auto fs = analyzer.GetStats()->Slice();
EXPECT_EQ(1u, fs.size());
EXPECT_TRUE(fs[0].psnr.has_value());
const Psnr& psnr = *fs[0].psnr;
EXPECT_NEAR(psnr.y, 48, 1);
EXPECT_NEAR(psnr.u, 42, 1);
EXPECT_NEAR(psnr.v, 38, 1);
}
} // namespace test
} // namespace webrtc
|