1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef mozilla_image_decoders_nsAVIFDecoder_h
#define mozilla_image_decoders_nsAVIFDecoder_h
#include "Decoder.h"
#include "mozilla/gfx/Types.h"
#include "MP4Metadata.h"
#include "mp4parse.h"
#include "SampleIterator.h"
#include "SurfacePipe.h"
#include "aom/aom_decoder.h"
#include "dav1d/dav1d.h"
#include "mozilla/Telemetry.h"
namespace mozilla {
namespace image {
class RasterImage;
class AVIFDecoderStream;
class AVIFParser;
class AVIFDecoderInterface;
class nsAVIFDecoder final : public Decoder {
public:
virtual ~nsAVIFDecoder();
DecoderType GetType() const override { return DecoderType::AVIF; }
protected:
LexerResult DoDecode(SourceBufferIterator& aIterator,
IResumable* aOnResume) override;
Maybe<Telemetry::HistogramID> SpeedHistogram() const override;
private:
friend class DecoderFactory;
friend class AVIFDecoderInterface;
friend class AVIFParser;
// Decoders should only be instantiated via DecoderFactory.
explicit nsAVIFDecoder(RasterImage* aImage);
static intptr_t ReadSource(uint8_t* aDestBuf, uintptr_t aDestBufSize,
void* aUserData);
typedef int Dav1dResult;
enum class NonAOMCodecError { NoFrame, SizeOverflow };
typedef Variant<aom_codec_err_t, NonAOMCodecError> AOMResult;
enum class NonDecoderResult {
NeedMoreData,
OutputAvailable,
Complete,
SizeOverflow,
OutOfMemory,
PipeInitError,
WriteBufferError,
AlphaYSizeMismatch,
AlphaYColorDepthMismatch,
MetadataImageSizeMismatch,
RenderSizeMismatch,
FrameSizeChanged,
InvalidCICP,
NoSamples,
};
using DecodeResult =
Variant<Mp4parseStatus, NonDecoderResult, Dav1dResult, AOMResult>;
Mp4parseStatus CreateParser();
DecodeResult CreateDecoder();
DecodeResult DoDecodeInternal(SourceBufferIterator& aIterator,
IResumable* aOnResume);
static bool IsDecodeSuccess(const DecodeResult& aResult);
void RecordDecodeResultTelemetry(const DecodeResult& aResult);
Vector<uint8_t> mBufferedData;
RefPtr<AVIFDecoderStream> mBufferStream;
/// Pointer to the next place to read from mBufferedData
const uint8_t* mReadCursor = nullptr;
UniquePtr<AVIFParser> mParser = nullptr;
UniquePtr<AVIFDecoderInterface> mDecoder = nullptr;
bool mIsAnimated = false;
bool mHasAlpha = false;
};
class AVIFDecoderStream : public ByteStream {
public:
explicit AVIFDecoderStream(Vector<uint8_t>* aBuffer) { mBuffer = aBuffer; }
virtual bool ReadAt(int64_t offset, void* data, size_t size,
size_t* bytes_read) override;
virtual bool CachedReadAt(int64_t offset, void* data, size_t size,
size_t* bytes_read) override {
return ReadAt(offset, data, size, bytes_read);
};
virtual bool Length(int64_t* size) override;
virtual const uint8_t* GetContiguousAccess(int64_t aOffset,
size_t aSize) override;
private:
Vector<uint8_t>* mBuffer;
};
struct AVIFImage {
uint32_t mFrameNum = 0;
FrameTimeout mDuration = FrameTimeout::Zero();
RefPtr<MediaRawData> mColorImage = nullptr;
RefPtr<MediaRawData> mAlphaImage = nullptr;
};
class AVIFParser {
public:
static Mp4parseStatus Create(const Mp4parseIo* aIo, ByteStream* aBuffer,
UniquePtr<AVIFParser>& aParserOut,
bool aAllowSequences, bool aAnimateAVIFMajor);
~AVIFParser();
const Mp4parseAvifInfo& GetInfo() const { return mInfo; }
nsAVIFDecoder::DecodeResult GetImage(AVIFImage& aImage);
bool IsAnimated() const;
private:
explicit AVIFParser(const Mp4parseIo* aIo);
Mp4parseStatus Init(ByteStream* aBuffer, bool aAllowSequences,
bool aAnimateAVIFMajor);
struct FreeAvifParser {
void operator()(Mp4parseAvifParser* aPtr) { mp4parse_avif_free(aPtr); }
};
const Mp4parseIo* mIo;
UniquePtr<Mp4parseAvifParser, FreeAvifParser> mParser = nullptr;
Mp4parseAvifInfo mInfo = {};
UniquePtr<SampleIterator> mColorSampleIter = nullptr;
UniquePtr<SampleIterator> mAlphaSampleIter = nullptr;
uint32_t mFrameNum = 0;
};
struct Dav1dPictureUnref {
void operator()(Dav1dPicture* aPtr) {
dav1d_picture_unref(aPtr);
delete aPtr;
}
};
using OwnedDav1dPicture = UniquePtr<Dav1dPicture, Dav1dPictureUnref>;
class OwnedAOMImage {
public:
~OwnedAOMImage();
static OwnedAOMImage* CopyFrom(aom_image_t* aImage, bool aIsAlpha);
aom_image_t* GetImage() { return mImage.isSome() ? mImage.ptr() : nullptr; }
private:
OwnedAOMImage();
bool CloneFrom(aom_image_t* aImage, bool aIsAlpha);
// The mImage's planes are referenced to mBuffer
Maybe<aom_image_t> mImage;
UniquePtr<uint8_t[]> mBuffer;
};
struct AVIFDecodedData : layers::PlanarYCbCrData {
public:
Maybe<OrientedIntSize> mRenderSize = Nothing();
gfx::CICP::ColourPrimaries mColourPrimaries = gfx::CICP::CP_UNSPECIFIED;
gfx::CICP::TransferCharacteristics mTransferCharacteristics =
gfx::CICP::TC_UNSPECIFIED;
gfx::CICP::MatrixCoefficients mMatrixCoefficients = gfx::CICP::MC_UNSPECIFIED;
OwnedDav1dPicture mColorDav1d;
OwnedDav1dPicture mAlphaDav1d;
UniquePtr<OwnedAOMImage> mColorAOM;
UniquePtr<OwnedAOMImage> mAlphaAOM;
// CICP values (either from the BMFF container or the AV1 sequence header) are
// used to create the colorspace transform. CICP::MatrixCoefficients is only
// stored for the sake of telemetry, since the relevant information for YUV ->
// RGB conversion is stored in mYUVColorSpace.
//
// There are three potential sources of color information for an AVIF:
// 1. ICC profile via a ColourInformationBox (colr) defined in [ISOBMFF]
// § 12.1.5 "Colour information" and [MIAF] § 7.3.6.4 "Colour information
// property"
// 2. NCLX (AKA CICP see [ITU-T H.273]) values in the same
// ColourInformationBox
// which can have an ICC profile or NCLX values, not both).
// 3. NCLX values in the AV1 bitstream
//
// The 'colr' box is optional, but there are always CICP values in the AV1
// bitstream, so it is possible to have both. Per ISOBMFF § 12.1.5.1
// > If colour information is supplied in both this box, and also in the
// > video bitstream, this box takes precedence, and over-rides the
// > information in the bitstream.
//
// If present, the ICC profile takes precedence over CICP values, but only
// specifies the color space, not the matrix coefficients necessary to convert
// YCbCr data (as most AVIF are encoded) to RGB. The matrix coefficients are
// always derived from the CICP values for matrix_coefficients (and
// potentially colour_primaries, but in that case only the CICP values for
// colour_primaries will be used, not anything harvested from the ICC
// profile).
//
// If there is no ICC profile, the color space transform will be based on the
// CICP values either from the 'colr' box, or if absent/unspecified, the
// decoded AV1 sequence header.
//
// For values that are 2 (meaning unspecified) after trying both, the
// fallback values are:
// - CP: 1 (BT.709/sRGB)
// - TC: 13 (sRGB)
// - MC: 6 (BT.601)
// - Range: Full
//
// Additional details here:
// <https://github.com/AOMediaCodec/libavif/wiki/CICP#unspecified>. Note
// that this contradicts the current version of [MIAF] § 7.3.6.4 which
// specifies MC=1 (BT.709). This is revised in [MIAF DAMD2] and confirmed by
// <https://github.com/AOMediaCodec/av1-avif/issues/77#issuecomment-676526097>
//
// The precedence for applying the various values and defaults in the event
// no valid values are found are managed by the following functions.
//
// References:
// [ISOBMFF]: ISO/IEC 14496-12:2020 <https://www.iso.org/standard/74428.html>
// [MIAF]: ISO/IEC 23000-22:2019 <https://www.iso.org/standard/74417.html>
// [MIAF DAMD2]: ISO/IEC 23000-22:2019/FDAmd 2
// <https://www.iso.org/standard/81634.html>
// [ITU-T H.273]: Rec. ITU-T H.273 (12/2016)
// <https://www.itu.int/rec/T-REC-H.273-201612-I/en>
void SetCicpValues(
const Mp4parseNclxColourInformation* aNclx,
const gfx::CICP::ColourPrimaries aAv1ColourPrimaries,
const gfx::CICP::TransferCharacteristics aAv1TransferCharacteristics,
const gfx::CICP::MatrixCoefficients aAv1MatrixCoefficients);
};
// An interface to do decode and get the decoded data
class AVIFDecoderInterface {
public:
using Dav1dResult = nsAVIFDecoder::Dav1dResult;
using NonAOMCodecError = nsAVIFDecoder::NonAOMCodecError;
using AOMResult = nsAVIFDecoder::AOMResult;
using NonDecoderResult = nsAVIFDecoder::NonDecoderResult;
using DecodeResult = nsAVIFDecoder::DecodeResult;
virtual ~AVIFDecoderInterface() = default;
// Set the mDecodedData if Decode() succeeds
virtual DecodeResult Decode(bool aShouldSendTelemetry,
const Mp4parseAvifInfo& aAVIFInfo,
const AVIFImage& aSamples) = 0;
// Must be called only once after Decode() succeeds
UniquePtr<AVIFDecodedData> GetDecodedData() {
MOZ_ASSERT(mDecodedData);
return std::move(mDecodedData);
}
protected:
explicit AVIFDecoderInterface() = default;
inline static bool IsDecodeSuccess(const DecodeResult& aResult) {
return nsAVIFDecoder::IsDecodeSuccess(aResult);
}
// The mDecodedData is valid after Decode() succeeds
UniquePtr<AVIFDecodedData> mDecodedData;
};
} // namespace image
} // namespace mozilla
#endif // mozilla_image_decoders_nsAVIFDecoder_h
|