summaryrefslogtreecommitdiffstats
path: root/gfx/harfbuzz/src/OT
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 17:32:43 +0000
commit6bf0a5cb5034a7e684dcc3500e841785237ce2dd (patch)
treea68f146d7fa01f0134297619fbe7e33db084e0aa /gfx/harfbuzz/src/OT
parentInitial commit. (diff)
downloadthunderbird-upstream.tar.xz
thunderbird-upstream.zip
Adding upstream version 1:115.7.0.upstream/1%115.7.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--gfx/harfbuzz/src/OT/Color/CBDT/CBDT.hh1030
-rw-r--r--gfx/harfbuzz/src/OT/Color/COLR/COLR.hh2436
-rw-r--r--gfx/harfbuzz/src/OT/Color/COLR/colrv1-closure.hh107
-rw-r--r--gfx/harfbuzz/src/OT/Color/CPAL/CPAL.hh350
-rw-r--r--gfx/harfbuzz/src/OT/Color/sbix/sbix.hh452
-rw-r--r--gfx/harfbuzz/src/OT/Color/svg/svg.hh151
-rw-r--r--gfx/harfbuzz/src/OT/Layout/Common/Coverage.hh348
-rw-r--r--gfx/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh133
-rw-r--r--gfx/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh239
-rw-r--r--gfx/harfbuzz/src/OT/Layout/Common/RangeRecord.hh97
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GDEF/GDEF.hh942
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/Anchor.hh83
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat1.hh46
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat2.hh58
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat3.hh100
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/AnchorMatrix.hh77
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/ChainContextPos.hh14
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/Common.hh33
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/ContextPos.hh14
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/CursivePos.hh35
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/CursivePosFormat1.hh301
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/ExtensionPos.hh17
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/GPOS.hh171
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/LigatureArray.hh56
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/MarkArray.hh128
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh41
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/MarkBasePosFormat1.hh244
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh41
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/MarkLigPosFormat1.hh223
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh42
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/MarkMarkPosFormat1.hh228
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/MarkRecord.hh52
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/PairPos.hh46
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh217
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh356
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/PairSet.hh207
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/PairValueRecord.hh99
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/PosLookup.hh79
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/PosLookupSubTable.hh79
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/SinglePos.hh100
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh164
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh176
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GPOS/ValueFormat.hh394
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh126
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh62
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSubstFormat1.hh128
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/ChainContextSubst.hh18
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/Common.hh21
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/ContextSubst.hh18
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/ExtensionSubst.hh22
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/GSUB.hh61
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/Ligature.hh190
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSet.hh189
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh71
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSubstFormat1.hh166
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh62
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/MultipleSubstFormat1.hh130
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubst.hh36
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubstFormat1.hh244
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/Sequence.hh165
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh103
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh204
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh176
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/SubstLookup.hh220
-rw-r--r--gfx/harfbuzz/src/OT/Layout/GSUB/SubstLookupSubTable.hh77
-rw-r--r--gfx/harfbuzz/src/OT/Layout/types.hh66
-rw-r--r--gfx/harfbuzz/src/OT/glyf/CompositeGlyph.hh423
-rw-r--r--gfx/harfbuzz/src/OT/glyf/Glyph.hh577
-rw-r--r--gfx/harfbuzz/src/OT/glyf/GlyphHeader.hh52
-rw-r--r--gfx/harfbuzz/src/OT/glyf/SimpleGlyph.hh348
-rw-r--r--gfx/harfbuzz/src/OT/glyf/SubsetGlyph.hh152
-rw-r--r--gfx/harfbuzz/src/OT/glyf/VarCompositeGlyph.hh401
-rw-r--r--gfx/harfbuzz/src/OT/glyf/composite-iter.hh68
-rw-r--r--gfx/harfbuzz/src/OT/glyf/coord-setter.hh34
-rw-r--r--gfx/harfbuzz/src/OT/glyf/glyf-helpers.hh104
-rw-r--r--gfx/harfbuzz/src/OT/glyf/glyf.hh504
-rw-r--r--gfx/harfbuzz/src/OT/glyf/loca.hh43
-rw-r--r--gfx/harfbuzz/src/OT/glyf/path-builder.hh189
-rw-r--r--gfx/harfbuzz/src/OT/name/name.hh589
79 files changed, 16245 insertions, 0 deletions
diff --git a/gfx/harfbuzz/src/OT/Color/CBDT/CBDT.hh b/gfx/harfbuzz/src/OT/Color/CBDT/CBDT.hh
new file mode 100644
index 0000000000..b125052344
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Color/CBDT/CBDT.hh
@@ -0,0 +1,1030 @@
+/*
+ * Copyright © 2016 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Google Author(s): Seigo Nonaka, Calder Kitagawa
+ */
+
+#ifndef OT_COLOR_CBDT_CBDT_HH
+#define OT_COLOR_CBDT_CBDT_HH
+
+#include "../../../hb-open-type.hh"
+#include "../../../hb-paint.hh"
+
+/*
+ * CBLC -- Color Bitmap Location
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/cblc
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/eblc
+ * CBDT -- Color Bitmap Data
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/cbdt
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/ebdt
+ */
+#define HB_OT_TAG_CBLC HB_TAG('C','B','L','C')
+#define HB_OT_TAG_CBDT HB_TAG('C','B','D','T')
+
+
+namespace OT {
+
+struct cblc_bitmap_size_subset_context_t
+{
+ const char *cbdt;
+ unsigned int cbdt_length;
+ hb_vector_t<char> *cbdt_prime;
+ unsigned int size; /* INOUT
+ * Input: old size of IndexSubtable
+ * Output: new size of IndexSubtable
+ */
+ unsigned int num_tables; /* INOUT
+ * Input: old number of subtables.
+ * Output: new number of subtables.
+ */
+ hb_codepoint_t start_glyph; /* OUT */
+ hb_codepoint_t end_glyph; /* OUT */
+};
+
+static inline bool
+_copy_data_to_cbdt (hb_vector_t<char> *cbdt_prime,
+ const void *data,
+ unsigned length)
+{
+ unsigned int new_len = cbdt_prime->length + length;
+ if (unlikely (!cbdt_prime->alloc (new_len))) return false;
+ hb_memcpy (cbdt_prime->arrayZ + cbdt_prime->length, data, length);
+ cbdt_prime->length = new_len;
+ return true;
+}
+
+struct SmallGlyphMetrics
+{
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ void get_extents (hb_font_t *font, hb_glyph_extents_t *extents, bool scale) const
+ {
+ extents->x_bearing = bearingX;
+ extents->y_bearing = bearingY;
+ extents->width = width;
+ extents->height = -static_cast<int> (height);
+
+ if (scale)
+ font->scale_glyph_extents (extents);
+ }
+
+ HBUINT8 height;
+ HBUINT8 width;
+ HBINT8 bearingX;
+ HBINT8 bearingY;
+ HBUINT8 advance;
+ public:
+ DEFINE_SIZE_STATIC (5);
+};
+
+struct BigGlyphMetrics : SmallGlyphMetrics
+{
+ HBINT8 vertBearingX;
+ HBINT8 vertBearingY;
+ HBUINT8 vertAdvance;
+ public:
+ DEFINE_SIZE_STATIC (8);
+};
+
+struct SBitLineMetrics
+{
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ HBINT8 ascender;
+ HBINT8 decender;
+ HBUINT8 widthMax;
+ HBINT8 caretSlopeNumerator;
+ HBINT8 caretSlopeDenominator;
+ HBINT8 caretOffset;
+ HBINT8 minOriginSB;
+ HBINT8 minAdvanceSB;
+ HBINT8 maxBeforeBL;
+ HBINT8 minAfterBL;
+ HBINT8 padding1;
+ HBINT8 padding2;
+ public:
+ DEFINE_SIZE_STATIC (12);
+};
+
+
+/*
+ * Index Subtables.
+ */
+
+struct IndexSubtableHeader
+{
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ HBUINT16 indexFormat;
+ HBUINT16 imageFormat;
+ HBUINT32 imageDataOffset;
+ public:
+ DEFINE_SIZE_STATIC (8);
+};
+
+template <typename OffsetType>
+struct IndexSubtableFormat1Or3
+{
+ bool sanitize (hb_sanitize_context_t *c, unsigned int glyph_count) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ offsetArrayZ.sanitize (c, glyph_count + 1));
+ }
+
+ bool get_image_data (unsigned int idx,
+ unsigned int *offset,
+ unsigned int *length) const
+ {
+ if (unlikely (offsetArrayZ[idx + 1] <= offsetArrayZ[idx]))
+ return false;
+
+ *offset = header.imageDataOffset + offsetArrayZ[idx];
+ *length = offsetArrayZ[idx + 1] - offsetArrayZ[idx];
+ return true;
+ }
+
+ bool add_offset (hb_serialize_context_t *c,
+ unsigned int offset,
+ unsigned int *size /* OUT (accumulated) */)
+ {
+ TRACE_SERIALIZE (this);
+ Offset<OffsetType> embedded_offset;
+ embedded_offset = offset;
+ *size += sizeof (OffsetType);
+ auto *o = c->embed (embedded_offset);
+ return_trace ((bool) o);
+ }
+
+ IndexSubtableHeader header;
+ UnsizedArrayOf<Offset<OffsetType>>
+ offsetArrayZ;
+ public:
+ DEFINE_SIZE_ARRAY (8, offsetArrayZ);
+};
+
+struct IndexSubtableFormat1 : IndexSubtableFormat1Or3<HBUINT32> {};
+struct IndexSubtableFormat3 : IndexSubtableFormat1Or3<HBUINT16> {};
+
+struct IndexSubtable
+{
+ bool sanitize (hb_sanitize_context_t *c, unsigned int glyph_count) const
+ {
+ TRACE_SANITIZE (this);
+ if (!u.header.sanitize (c)) return_trace (false);
+ switch (u.header.indexFormat)
+ {
+ case 1: return_trace (u.format1.sanitize (c, glyph_count));
+ case 3: return_trace (u.format3.sanitize (c, glyph_count));
+ default:return_trace (true);
+ }
+ }
+
+ bool
+ finish_subtable (hb_serialize_context_t *c,
+ unsigned int cbdt_prime_len,
+ unsigned int num_glyphs,
+ unsigned int *size /* OUT (accumulated) */)
+ {
+ TRACE_SERIALIZE (this);
+
+ unsigned int local_offset = cbdt_prime_len - u.header.imageDataOffset;
+ switch (u.header.indexFormat)
+ {
+ case 1: return_trace (u.format1.add_offset (c, local_offset, size));
+ case 3: {
+ if (!u.format3.add_offset (c, local_offset, size))
+ return_trace (false);
+ if (!(num_glyphs & 0x01)) // Pad to 32-bit alignment if needed.
+ return_trace (u.format3.add_offset (c, 0, size));
+ return_trace (true);
+ }
+ // TODO: implement 2, 4, 5.
+ case 2: case 4: // No-op.
+ case 5: // Pad to 32-bit aligned.
+ default: return_trace (false);
+ }
+ }
+
+ bool
+ fill_missing_glyphs (hb_serialize_context_t *c,
+ unsigned int cbdt_prime_len,
+ unsigned int num_missing,
+ unsigned int *size /* OUT (accumulated) */,
+ unsigned int *num_glyphs /* OUT (accumulated) */)
+ {
+ TRACE_SERIALIZE (this);
+
+ unsigned int local_offset = cbdt_prime_len - u.header.imageDataOffset;
+ switch (u.header.indexFormat)
+ {
+ case 1: {
+ for (unsigned int i = 0; i < num_missing; i++)
+ {
+ if (unlikely (!u.format1.add_offset (c, local_offset, size)))
+ return_trace (false);
+ *num_glyphs += 1;
+ }
+ return_trace (true);
+ }
+ case 3: {
+ for (unsigned int i = 0; i < num_missing; i++)
+ {
+ if (unlikely (!u.format3.add_offset (c, local_offset, size)))
+ return_trace (false);
+ *num_glyphs += 1;
+ }
+ return_trace (true);
+ }
+ // TODO: implement 2, 4, 5.
+ case 2: // Add empty space in cbdt_prime?.
+ case 4: case 5: // No-op as sparse is supported.
+ default: return_trace (false);
+ }
+ }
+
+ bool
+ copy_glyph_at_idx (hb_serialize_context_t *c, unsigned int idx,
+ const char *cbdt, unsigned int cbdt_length,
+ hb_vector_t<char> *cbdt_prime /* INOUT */,
+ IndexSubtable *subtable_prime /* INOUT */,
+ unsigned int *size /* OUT (accumulated) */) const
+ {
+ TRACE_SERIALIZE (this);
+
+ unsigned int offset, length, format;
+ if (unlikely (!get_image_data (idx, &offset, &length, &format))) return_trace (false);
+ if (unlikely (offset > cbdt_length || cbdt_length - offset < length)) return_trace (false);
+
+ auto *header_prime = subtable_prime->get_header ();
+ unsigned int new_local_offset = cbdt_prime->length - (unsigned int) header_prime->imageDataOffset;
+ if (unlikely (!_copy_data_to_cbdt (cbdt_prime, cbdt + offset, length))) return_trace (false);
+
+ return_trace (subtable_prime->add_offset (c, new_local_offset, size));
+ }
+
+ bool
+ add_offset (hb_serialize_context_t *c, unsigned int local_offset,
+ unsigned int *size /* OUT (accumulated) */)
+ {
+ TRACE_SERIALIZE (this);
+ switch (u.header.indexFormat)
+ {
+ case 1: return_trace (u.format1.add_offset (c, local_offset, size));
+ case 3: return_trace (u.format3.add_offset (c, local_offset, size));
+ // TODO: Implement tables 2, 4, 5
+ case 2: // Should be a no-op.
+ case 4: case 5: // Handle sparse cases.
+ default: return_trace (false);
+ }
+ }
+
+ bool get_extents (hb_glyph_extents_t *extents HB_UNUSED, bool scale HB_UNUSED) const
+ {
+ switch (u.header.indexFormat)
+ {
+ case 2: case 5: /* TODO */
+ case 1: case 3: case 4: /* Variable-metrics formats do not have metrics here. */
+ default:return (false);
+ }
+ }
+
+ bool
+ get_image_data (unsigned int idx, unsigned int *offset,
+ unsigned int *length, unsigned int *format) const
+ {
+ *format = u.header.imageFormat;
+ switch (u.header.indexFormat)
+ {
+ case 1: return u.format1.get_image_data (idx, offset, length);
+ case 3: return u.format3.get_image_data (idx, offset, length);
+ default: return false;
+ }
+ }
+
+ const IndexSubtableHeader* get_header () const { return &u.header; }
+
+ void populate_header (unsigned index_format,
+ unsigned image_format,
+ unsigned int image_data_offset,
+ unsigned int *size)
+ {
+ u.header.indexFormat = index_format;
+ u.header.imageFormat = image_format;
+ u.header.imageDataOffset = image_data_offset;
+ switch (u.header.indexFormat)
+ {
+ case 1: *size += IndexSubtableFormat1::min_size; break;
+ case 3: *size += IndexSubtableFormat3::min_size; break;
+ }
+ }
+
+ protected:
+ union {
+ IndexSubtableHeader header;
+ IndexSubtableFormat1 format1;
+ IndexSubtableFormat3 format3;
+ /* TODO: Format 2, 4, 5. */
+ } u;
+ public:
+ DEFINE_SIZE_UNION (8, header);
+};
+
+struct IndexSubtableRecord
+{
+ /* XXX Remove this and fix by not inserting it into vector. */
+ IndexSubtableRecord& operator = (const IndexSubtableRecord &o)
+ {
+ firstGlyphIndex = o.firstGlyphIndex;
+ lastGlyphIndex = o.lastGlyphIndex;
+ offsetToSubtable = (unsigned) o.offsetToSubtable;
+ assert (offsetToSubtable.is_null ());
+ return *this;
+ }
+
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ firstGlyphIndex <= lastGlyphIndex &&
+ offsetToSubtable.sanitize (c, base, lastGlyphIndex - firstGlyphIndex + 1));
+ }
+
+ const IndexSubtable* get_subtable (const void *base) const
+ {
+ return &(base+offsetToSubtable);
+ }
+
+ bool add_new_subtable (hb_subset_context_t* c,
+ cblc_bitmap_size_subset_context_t *bitmap_size_context,
+ IndexSubtableRecord *record,
+ const hb_vector_t<hb_pair_t<hb_codepoint_t, const IndexSubtableRecord*>> *lookup, /* IN */
+ const void *base,
+ unsigned int *start /* INOUT */) const
+ {
+ TRACE_SERIALIZE (this);
+
+ auto *subtable = c->serializer->start_embed<IndexSubtable> ();
+ if (unlikely (!subtable)) return_trace (false);
+ if (unlikely (!c->serializer->extend_min (subtable))) return_trace (false);
+
+ auto *old_subtable = get_subtable (base);
+ auto *old_header = old_subtable->get_header ();
+
+ subtable->populate_header (old_header->indexFormat,
+ old_header->imageFormat,
+ bitmap_size_context->cbdt_prime->length,
+ &bitmap_size_context->size);
+
+ unsigned int num_glyphs = 0;
+ bool early_exit = false;
+ for (unsigned int i = *start; i < lookup->length; i++)
+ {
+ hb_codepoint_t new_gid = (*lookup)[i].first;
+ const IndexSubtableRecord *next_record = (*lookup)[i].second;
+ const IndexSubtable *next_subtable = next_record->get_subtable (base);
+ auto *next_header = next_subtable->get_header ();
+ if (next_header != old_header)
+ {
+ *start = i;
+ early_exit = true;
+ break;
+ }
+ unsigned int num_missing = record->add_glyph_for_subset (new_gid);
+ if (unlikely (!subtable->fill_missing_glyphs (c->serializer,
+ bitmap_size_context->cbdt_prime->length,
+ num_missing,
+ &bitmap_size_context->size,
+ &num_glyphs)))
+ return_trace (false);
+
+ hb_codepoint_t old_gid = 0;
+ c->plan->old_gid_for_new_gid (new_gid, &old_gid);
+ if (old_gid < next_record->firstGlyphIndex)
+ return_trace (false);
+
+ unsigned int old_idx = (unsigned int) old_gid - next_record->firstGlyphIndex;
+ if (unlikely (!next_subtable->copy_glyph_at_idx (c->serializer,
+ old_idx,
+ bitmap_size_context->cbdt,
+ bitmap_size_context->cbdt_length,
+ bitmap_size_context->cbdt_prime,
+ subtable,
+ &bitmap_size_context->size)))
+ return_trace (false);
+ num_glyphs += 1;
+ }
+ if (!early_exit)
+ *start = lookup->length;
+ if (unlikely (!subtable->finish_subtable (c->serializer,
+ bitmap_size_context->cbdt_prime->length,
+ num_glyphs,
+ &bitmap_size_context->size)))
+ return_trace (false);
+ return_trace (true);
+ }
+
+ bool add_new_record (hb_subset_context_t *c,
+ cblc_bitmap_size_subset_context_t *bitmap_size_context,
+ const hb_vector_t<hb_pair_t<hb_codepoint_t, const IndexSubtableRecord*>> *lookup, /* IN */
+ const void *base,
+ unsigned int *start, /* INOUT */
+ hb_vector_t<IndexSubtableRecord>* records /* INOUT */) const
+ {
+ TRACE_SERIALIZE (this);
+ auto snap = c->serializer->snapshot ();
+ unsigned int old_size = bitmap_size_context->size;
+ unsigned int old_cbdt_prime_length = bitmap_size_context->cbdt_prime->length;
+
+ // Set to invalid state to indicate filling glyphs is not yet started.
+ if (unlikely (!c->serializer->check_success (records->resize (records->length + 1))))
+ return_trace (false);
+
+ records->tail ().firstGlyphIndex = 1;
+ records->tail ().lastGlyphIndex = 0;
+ bitmap_size_context->size += IndexSubtableRecord::min_size;
+
+ c->serializer->push ();
+
+ if (unlikely (!add_new_subtable (c, bitmap_size_context, &(records->tail ()), lookup, base, start)))
+ {
+ c->serializer->pop_discard ();
+ c->serializer->revert (snap);
+ bitmap_size_context->cbdt_prime->shrink (old_cbdt_prime_length);
+ bitmap_size_context->size = old_size;
+ records->resize (records->length - 1);
+ return_trace (false);
+ }
+
+ bitmap_size_context->num_tables += 1;
+ return_trace (true);
+ }
+
+ unsigned int add_glyph_for_subset (hb_codepoint_t gid)
+ {
+ if (firstGlyphIndex > lastGlyphIndex)
+ {
+ firstGlyphIndex = gid;
+ lastGlyphIndex = gid;
+ return 0;
+ }
+ // TODO maybe assert? this shouldn't occur.
+ if (lastGlyphIndex > gid)
+ return 0;
+ unsigned int num_missing = (unsigned int) (gid - lastGlyphIndex - 1);
+ lastGlyphIndex = gid;
+ return num_missing;
+ }
+
+ bool get_extents (hb_glyph_extents_t *extents, const void *base, bool scale) const
+ { return (base+offsetToSubtable).get_extents (extents, scale); }
+
+ bool get_image_data (unsigned int gid,
+ const void *base,
+ unsigned int *offset,
+ unsigned int *length,
+ unsigned int *format) const
+ {
+ if (gid < firstGlyphIndex || gid > lastGlyphIndex) return false;
+ return (base+offsetToSubtable).get_image_data (gid - firstGlyphIndex,
+ offset, length, format);
+ }
+
+ HBGlyphID16 firstGlyphIndex;
+ HBGlyphID16 lastGlyphIndex;
+ Offset32To<IndexSubtable> offsetToSubtable;
+ public:
+ DEFINE_SIZE_STATIC (8);
+};
+
+struct IndexSubtableArray
+{
+ friend struct CBDT;
+
+ bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (indexSubtablesZ.sanitize (c, count, this));
+ }
+
+ void
+ build_lookup (hb_subset_context_t *c, cblc_bitmap_size_subset_context_t *bitmap_size_context,
+ hb_vector_t<hb_pair_t<hb_codepoint_t,
+ const IndexSubtableRecord*>> *lookup /* OUT */) const
+ {
+ bool start_glyph_is_set = false;
+ for (hb_codepoint_t new_gid = 0; new_gid < c->plan->num_output_glyphs (); new_gid++)
+ {
+ hb_codepoint_t old_gid;
+ if (unlikely (!c->plan->old_gid_for_new_gid (new_gid, &old_gid))) continue;
+
+ const IndexSubtableRecord* record = find_table (old_gid, bitmap_size_context->num_tables);
+ if (unlikely (!record)) continue;
+
+ // Don't add gaps to the lookup. The best way to determine if a glyph is a
+ // gap is that it has no image data.
+ unsigned int offset, length, format;
+ if (unlikely (!record->get_image_data (old_gid, this, &offset, &length, &format))) continue;
+
+ lookup->push (hb_pair_t<hb_codepoint_t, const IndexSubtableRecord*> (new_gid, record));
+
+ if (!start_glyph_is_set)
+ {
+ bitmap_size_context->start_glyph = new_gid;
+ start_glyph_is_set = true;
+ }
+
+ bitmap_size_context->end_glyph = new_gid;
+ }
+ }
+
+ bool
+ subset (hb_subset_context_t *c,
+ cblc_bitmap_size_subset_context_t *bitmap_size_context) const
+ {
+ TRACE_SUBSET (this);
+
+ auto *dst = c->serializer->start_embed<IndexSubtableArray> ();
+ if (unlikely (!dst)) return_trace (false);
+
+ hb_vector_t<hb_pair_t<hb_codepoint_t, const IndexSubtableRecord*>> lookup;
+ build_lookup (c, bitmap_size_context, &lookup);
+ if (unlikely (!c->serializer->propagate_error (lookup)))
+ return false;
+
+ bitmap_size_context->size = 0;
+ bitmap_size_context->num_tables = 0;
+ hb_vector_t<IndexSubtableRecord> records;
+ for (unsigned int start = 0; start < lookup.length;)
+ {
+ if (unlikely (!lookup[start].second->add_new_record (c, bitmap_size_context, &lookup, this, &start, &records)))
+ {
+ // Discard any leftover pushes to the serializer from successful records.
+ for (unsigned int i = 0; i < records.length; i++)
+ c->serializer->pop_discard ();
+ return_trace (false);
+ }
+ }
+
+ /* Workaround to ensure offset ordering is from least to greatest when
+ * resolving links. */
+ hb_vector_t<hb_serialize_context_t::objidx_t> objidxs;
+ for (unsigned int i = 0; i < records.length; i++)
+ objidxs.push (c->serializer->pop_pack ());
+ for (unsigned int i = 0; i < records.length; i++)
+ {
+ IndexSubtableRecord* record = c->serializer->embed (records[i]);
+ if (unlikely (!record)) return_trace (false);
+ c->serializer->add_link (record->offsetToSubtable, objidxs[records.length - 1 - i]);
+ }
+ return_trace (true);
+ }
+
+ public:
+ const IndexSubtableRecord* find_table (hb_codepoint_t glyph, unsigned int numTables) const
+ {
+ for (unsigned int i = 0; i < numTables; ++i)
+ {
+ unsigned int firstGlyphIndex = indexSubtablesZ[i].firstGlyphIndex;
+ unsigned int lastGlyphIndex = indexSubtablesZ[i].lastGlyphIndex;
+ if (firstGlyphIndex <= glyph && glyph <= lastGlyphIndex)
+ return &indexSubtablesZ[i];
+ }
+ return nullptr;
+ }
+
+ protected:
+ UnsizedArrayOf<IndexSubtableRecord> indexSubtablesZ;
+};
+
+struct BitmapSizeTable
+{
+ friend struct CBLC;
+ friend struct CBDT;
+
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ indexSubtableArrayOffset.sanitize (c, base, numberOfIndexSubtables) &&
+ horizontal.sanitize (c) &&
+ vertical.sanitize (c));
+ }
+
+ const IndexSubtableRecord *
+ find_table (hb_codepoint_t glyph, const void *base, const void **out_base) const
+ {
+ *out_base = &(base+indexSubtableArrayOffset);
+ return (base+indexSubtableArrayOffset).find_table (glyph, numberOfIndexSubtables);
+ }
+
+ bool
+ subset (hb_subset_context_t *c, const void *base,
+ const char *cbdt, unsigned int cbdt_length,
+ hb_vector_t<char> *cbdt_prime /* INOUT */) const
+ {
+ TRACE_SUBSET (this);
+ auto *out_table = c->serializer->embed (this);
+ if (unlikely (!out_table)) return_trace (false);
+
+ cblc_bitmap_size_subset_context_t bitmap_size_context;
+ bitmap_size_context.cbdt = cbdt;
+ bitmap_size_context.cbdt_length = cbdt_length;
+ bitmap_size_context.cbdt_prime = cbdt_prime;
+ bitmap_size_context.size = indexTablesSize;
+ bitmap_size_context.num_tables = numberOfIndexSubtables;
+ bitmap_size_context.start_glyph = 1;
+ bitmap_size_context.end_glyph = 0;
+
+ if (!out_table->indexSubtableArrayOffset.serialize_subset (c,
+ indexSubtableArrayOffset,
+ base,
+ &bitmap_size_context))
+ return_trace (false);
+ if (!bitmap_size_context.size ||
+ !bitmap_size_context.num_tables ||
+ bitmap_size_context.start_glyph > bitmap_size_context.end_glyph)
+ return_trace (false);
+
+ out_table->indexTablesSize = bitmap_size_context.size;
+ out_table->numberOfIndexSubtables = bitmap_size_context.num_tables;
+ out_table->startGlyphIndex = bitmap_size_context.start_glyph;
+ out_table->endGlyphIndex = bitmap_size_context.end_glyph;
+ return_trace (true);
+ }
+
+ protected:
+ NNOffset32To<IndexSubtableArray>
+ indexSubtableArrayOffset;
+ HBUINT32 indexTablesSize;
+ HBUINT32 numberOfIndexSubtables;
+ HBUINT32 colorRef;
+ SBitLineMetrics horizontal;
+ SBitLineMetrics vertical;
+ HBGlyphID16 startGlyphIndex;
+ HBGlyphID16 endGlyphIndex;
+ HBUINT8 ppemX;
+ HBUINT8 ppemY;
+ HBUINT8 bitDepth;
+ HBINT8 flags;
+ public:
+ DEFINE_SIZE_STATIC (48);
+};
+
+
+/*
+ * Glyph Bitmap Data Formats.
+ */
+
+struct GlyphBitmapDataFormat17
+{
+ SmallGlyphMetrics glyphMetrics;
+ Array32Of<HBUINT8> data;
+ public:
+ DEFINE_SIZE_ARRAY (9, data);
+};
+
+struct GlyphBitmapDataFormat18
+{
+ BigGlyphMetrics glyphMetrics;
+ Array32Of<HBUINT8> data;
+ public:
+ DEFINE_SIZE_ARRAY (12, data);
+};
+
+struct GlyphBitmapDataFormat19
+{
+ Array32Of<HBUINT8> data;
+ public:
+ DEFINE_SIZE_ARRAY (4, data);
+};
+
+struct CBLC
+{
+ friend struct CBDT;
+
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_CBLC;
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ likely (version.major == 2 || version.major == 3) &&
+ sizeTables.sanitize (c, this));
+ }
+
+ static bool
+ sink_cbdt (hb_subset_context_t *c, hb_vector_t<char>* cbdt_prime)
+ {
+ hb_blob_t *cbdt_prime_blob = hb_blob_create (cbdt_prime->arrayZ,
+ cbdt_prime->length,
+ HB_MEMORY_MODE_WRITABLE,
+ cbdt_prime->arrayZ,
+ hb_free);
+ cbdt_prime->init (); // Leak arrayZ to the blob.
+ bool ret = c->plan->add_table (HB_OT_TAG_CBDT, cbdt_prime_blob);
+ hb_blob_destroy (cbdt_prime_blob);
+ return ret;
+ }
+
+ bool
+ subset_size_table (hb_subset_context_t *c, const BitmapSizeTable& table,
+ const char *cbdt /* IN */, unsigned int cbdt_length,
+ CBLC *cblc_prime /* INOUT */, hb_vector_t<char> *cbdt_prime /* INOUT */) const
+ {
+ TRACE_SUBSET (this);
+ cblc_prime->sizeTables.len++;
+
+ auto snap = c->serializer->snapshot ();
+ auto cbdt_prime_len = cbdt_prime->length;
+
+ if (!table.subset (c, this, cbdt, cbdt_length, cbdt_prime))
+ {
+ cblc_prime->sizeTables.len--;
+ c->serializer->revert (snap);
+ cbdt_prime->shrink (cbdt_prime_len);
+ return_trace (false);
+ }
+ return_trace (true);
+ }
+
+ // Implemented in cc file as it depends on definition of CBDT.
+ HB_INTERNAL bool subset (hb_subset_context_t *c) const;
+
+ protected:
+ const BitmapSizeTable &choose_strike (hb_font_t *font) const
+ {
+ unsigned count = sizeTables.len;
+ if (unlikely (!count))
+ return Null (BitmapSizeTable);
+
+ unsigned int requested_ppem = hb_max (font->x_ppem, font->y_ppem);
+ if (!requested_ppem)
+ requested_ppem = 1<<30; /* Choose largest strike. */
+ unsigned int best_i = 0;
+ unsigned int best_ppem = hb_max (sizeTables[0].ppemX, sizeTables[0].ppemY);
+
+ for (unsigned int i = 1; i < count; i++)
+ {
+ unsigned int ppem = hb_max (sizeTables[i].ppemX, sizeTables[i].ppemY);
+ if ((requested_ppem <= ppem && ppem < best_ppem) ||
+ (requested_ppem > best_ppem && ppem > best_ppem))
+ {
+ best_i = i;
+ best_ppem = ppem;
+ }
+ }
+
+ return sizeTables[best_i];
+ }
+
+ protected:
+ FixedVersion<> version;
+ Array32Of<BitmapSizeTable> sizeTables;
+ public:
+ DEFINE_SIZE_ARRAY (8, sizeTables);
+};
+
+struct CBDT
+{
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_CBDT;
+
+ struct accelerator_t
+ {
+ accelerator_t (hb_face_t *face)
+ {
+ this->cblc = hb_sanitize_context_t ().reference_table<CBLC> (face);
+ this->cbdt = hb_sanitize_context_t ().reference_table<CBDT> (face);
+
+ upem = hb_face_get_upem (face);
+ }
+ ~accelerator_t ()
+ {
+ this->cblc.destroy ();
+ this->cbdt.destroy ();
+ }
+
+ bool
+ get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents, bool scale = true) const
+ {
+ const void *base;
+ const BitmapSizeTable &strike = this->cblc->choose_strike (font);
+ const IndexSubtableRecord *subtable_record = strike.find_table (glyph, cblc, &base);
+ if (!subtable_record || !strike.ppemX || !strike.ppemY)
+ return false;
+
+ if (subtable_record->get_extents (extents, base, scale))
+ return true;
+
+ unsigned int image_offset = 0, image_length = 0, image_format = 0;
+ if (!subtable_record->get_image_data (glyph, base, &image_offset, &image_length, &image_format))
+ return false;
+
+ unsigned int cbdt_len = cbdt.get_length ();
+ if (unlikely (image_offset > cbdt_len || cbdt_len - image_offset < image_length))
+ return false;
+
+ switch (image_format)
+ {
+ case 17: {
+ if (unlikely (image_length < GlyphBitmapDataFormat17::min_size))
+ return false;
+ auto &glyphFormat17 = StructAtOffset<GlyphBitmapDataFormat17> (this->cbdt, image_offset);
+ glyphFormat17.glyphMetrics.get_extents (font, extents, scale);
+ break;
+ }
+ case 18: {
+ if (unlikely (image_length < GlyphBitmapDataFormat18::min_size))
+ return false;
+ auto &glyphFormat18 = StructAtOffset<GlyphBitmapDataFormat18> (this->cbdt, image_offset);
+ glyphFormat18.glyphMetrics.get_extents (font, extents, scale);
+ break;
+ }
+ default: return false; /* TODO: Support other image formats. */
+ }
+
+ /* Convert to font units. */
+ if (scale)
+ {
+ float x_scale = upem / (float) strike.ppemX;
+ float y_scale = upem / (float) strike.ppemY;
+ extents->x_bearing = roundf (extents->x_bearing * x_scale);
+ extents->y_bearing = roundf (extents->y_bearing * y_scale);
+ extents->width = roundf (extents->width * x_scale);
+ extents->height = roundf (extents->height * y_scale);
+ }
+
+ return true;
+ }
+
+ hb_blob_t*
+ reference_png (hb_font_t *font, hb_codepoint_t glyph) const
+ {
+ const void *base;
+ const BitmapSizeTable &strike = this->cblc->choose_strike (font);
+ const IndexSubtableRecord *subtable_record = strike.find_table (glyph, cblc, &base);
+ if (!subtable_record || !strike.ppemX || !strike.ppemY)
+ return hb_blob_get_empty ();
+
+ unsigned int image_offset = 0, image_length = 0, image_format = 0;
+ if (!subtable_record->get_image_data (glyph, base, &image_offset, &image_length, &image_format))
+ return hb_blob_get_empty ();
+
+ unsigned int cbdt_len = cbdt.get_length ();
+ if (unlikely (image_offset > cbdt_len || cbdt_len - image_offset < image_length))
+ return hb_blob_get_empty ();
+
+ switch (image_format)
+ {
+ case 17:
+ {
+ if (unlikely (image_length < GlyphBitmapDataFormat17::min_size))
+ return hb_blob_get_empty ();
+ auto &glyphFormat17 = StructAtOffset<GlyphBitmapDataFormat17> (this->cbdt, image_offset);
+ return hb_blob_create_sub_blob (cbdt.get_blob (),
+ image_offset + GlyphBitmapDataFormat17::min_size,
+ glyphFormat17.data.len);
+ }
+ case 18:
+ {
+ if (unlikely (image_length < GlyphBitmapDataFormat18::min_size))
+ return hb_blob_get_empty ();
+ auto &glyphFormat18 = StructAtOffset<GlyphBitmapDataFormat18> (this->cbdt, image_offset);
+ return hb_blob_create_sub_blob (cbdt.get_blob (),
+ image_offset + GlyphBitmapDataFormat18::min_size,
+ glyphFormat18.data.len);
+ }
+ case 19:
+ {
+ if (unlikely (image_length < GlyphBitmapDataFormat19::min_size))
+ return hb_blob_get_empty ();
+ auto &glyphFormat19 = StructAtOffset<GlyphBitmapDataFormat19> (this->cbdt, image_offset);
+ return hb_blob_create_sub_blob (cbdt.get_blob (),
+ image_offset + GlyphBitmapDataFormat19::min_size,
+ glyphFormat19.data.len);
+ }
+ default: return hb_blob_get_empty (); /* TODO: Support other image formats. */
+ }
+ }
+
+ bool has_data () const { return cbdt.get_length (); }
+
+ bool paint_glyph (hb_font_t *font, hb_codepoint_t glyph, hb_paint_funcs_t *funcs, void *data) const
+ {
+ hb_glyph_extents_t extents;
+ hb_glyph_extents_t pixel_extents;
+ hb_blob_t *blob = reference_png (font, glyph);
+
+ if (unlikely (blob == hb_blob_get_empty ()))
+ return false;
+
+ if (unlikely (!hb_font_get_glyph_extents (font, glyph, &extents)))
+ return false;
+
+ if (unlikely (!get_extents (font, glyph, &pixel_extents, false)))
+ return false;
+
+ bool ret = funcs->image (data,
+ blob,
+ pixel_extents.width, -pixel_extents.height,
+ HB_PAINT_IMAGE_FORMAT_PNG,
+ font->slant_xy,
+ &extents);
+
+ hb_blob_destroy (blob);
+ return ret;
+ }
+
+ private:
+ hb_blob_ptr_t<CBLC> cblc;
+ hb_blob_ptr_t<CBDT> cbdt;
+
+ unsigned int upem;
+ };
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ likely (version.major == 2 || version.major == 3));
+ }
+
+ protected:
+ FixedVersion<> version;
+ UnsizedArrayOf<HBUINT8> dataZ;
+ public:
+ DEFINE_SIZE_ARRAY (4, dataZ);
+};
+
+inline bool
+CBLC::subset (hb_subset_context_t *c) const
+{
+ TRACE_SUBSET (this);
+
+ auto *cblc_prime = c->serializer->start_embed<CBLC> ();
+
+ // Use a vector as a secondary buffer as the tables need to be built in parallel.
+ hb_vector_t<char> cbdt_prime;
+
+ if (unlikely (!cblc_prime)) return_trace (false);
+ if (unlikely (!c->serializer->extend_min (cblc_prime))) return_trace (false);
+ cblc_prime->version = version;
+
+ hb_blob_t* cbdt_blob = hb_sanitize_context_t ().reference_table<CBDT> (c->plan->source);
+ unsigned int cbdt_length;
+ CBDT* cbdt = (CBDT *) hb_blob_get_data (cbdt_blob, &cbdt_length);
+ if (unlikely (cbdt_length < CBDT::min_size))
+ {
+ hb_blob_destroy (cbdt_blob);
+ return_trace (false);
+ }
+ _copy_data_to_cbdt (&cbdt_prime, cbdt, CBDT::min_size);
+
+ for (const BitmapSizeTable& table : + sizeTables.iter ())
+ subset_size_table (c, table, (const char *) cbdt, cbdt_length, cblc_prime, &cbdt_prime);
+
+ hb_blob_destroy (cbdt_blob);
+
+ return_trace (CBLC::sink_cbdt (c, &cbdt_prime));
+}
+
+struct CBDT_accelerator_t : CBDT::accelerator_t {
+ CBDT_accelerator_t (hb_face_t *face) : CBDT::accelerator_t (face) {}
+};
+
+
+} /* namespace OT */
+
+#endif /* OT_COLOR_CBDT_CBDT_HH */
diff --git a/gfx/harfbuzz/src/OT/Color/COLR/COLR.hh b/gfx/harfbuzz/src/OT/Color/COLR/COLR.hh
new file mode 100644
index 0000000000..2a47984294
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Color/COLR/COLR.hh
@@ -0,0 +1,2436 @@
+/*
+ * Copyright © 2018 Ebrahim Byagowi
+ * Copyright © 2020 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Google Author(s): Calder Kitagawa
+ */
+
+#ifndef OT_COLOR_COLR_COLR_HH
+#define OT_COLOR_COLR_COLR_HH
+
+#include "../../../hb.hh"
+#include "../../../hb-open-type.hh"
+#include "../../../hb-ot-var-common.hh"
+#include "../../../hb-paint.hh"
+#include "../../../hb-paint-extents.hh"
+
+/*
+ * COLR -- Color
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/colr
+ */
+#define HB_OT_TAG_COLR HB_TAG('C','O','L','R')
+
+namespace OT {
+struct hb_paint_context_t;
+}
+
+namespace OT {
+
+struct COLR;
+
+struct Paint;
+
+struct hb_paint_context_t :
+ hb_dispatch_context_t<hb_paint_context_t>
+{
+ template <typename T>
+ return_t dispatch (const T &obj) { obj.paint_glyph (this); return hb_empty_t (); }
+ static return_t default_return_value () { return hb_empty_t (); }
+
+ const COLR* get_colr_table () const
+ { return reinterpret_cast<const COLR *> (base); }
+
+public:
+ const void *base;
+ hb_paint_funcs_t *funcs;
+ void *data;
+ hb_font_t *font;
+ unsigned int palette_index;
+ hb_color_t foreground;
+ VarStoreInstancer &instancer;
+ int depth_left = HB_MAX_NESTING_LEVEL;
+ int edge_count = HB_COLRV1_MAX_EDGE_COUNT;
+
+ hb_paint_context_t (const void *base_,
+ hb_paint_funcs_t *funcs_,
+ void *data_,
+ hb_font_t *font_,
+ unsigned int palette_,
+ hb_color_t foreground_,
+ VarStoreInstancer &instancer_) :
+ base (base_),
+ funcs (funcs_),
+ data (data_),
+ font (font_),
+ palette_index (palette_),
+ foreground (foreground_),
+ instancer (instancer_)
+ { }
+
+ hb_color_t get_color (unsigned int color_index, float alpha, hb_bool_t *is_foreground)
+ {
+ hb_color_t color = foreground;
+
+ *is_foreground = true;
+
+ if (color_index != 0xffff)
+ {
+ if (!funcs->custom_palette_color (data, color_index, &color))
+ {
+ unsigned int clen = 1;
+ hb_face_t *face = hb_font_get_face (font);
+
+ hb_ot_color_palette_get_colors (face, palette_index, color_index, &clen, &color);
+ }
+
+ *is_foreground = false;
+ }
+
+ return HB_COLOR (hb_color_get_blue (color),
+ hb_color_get_green (color),
+ hb_color_get_red (color),
+ hb_color_get_alpha (color) * alpha);
+ }
+
+ inline void recurse (const Paint &paint);
+};
+
+struct hb_colrv1_closure_context_t :
+ hb_dispatch_context_t<hb_colrv1_closure_context_t>
+{
+ template <typename T>
+ return_t dispatch (const T &obj)
+ {
+ if (unlikely (nesting_level_left == 0))
+ return hb_empty_t ();
+
+ if (paint_visited (&obj))
+ return hb_empty_t ();
+
+ nesting_level_left--;
+ obj.closurev1 (this);
+ nesting_level_left++;
+ return hb_empty_t ();
+ }
+ static return_t default_return_value () { return hb_empty_t (); }
+
+ bool paint_visited (const void *paint)
+ {
+ hb_codepoint_t delta = (hb_codepoint_t) ((uintptr_t) paint - (uintptr_t) base);
+ if (visited_paint.in_error() || visited_paint.has (delta))
+ return true;
+
+ visited_paint.add (delta);
+ return false;
+ }
+
+ const COLR* get_colr_table () const
+ { return reinterpret_cast<const COLR *> (base); }
+
+ void add_glyph (unsigned glyph_id)
+ { glyphs->add (glyph_id); }
+
+ void add_layer_indices (unsigned first_layer_index, unsigned num_of_layers)
+ { layer_indices->add_range (first_layer_index, first_layer_index + num_of_layers - 1); }
+
+ void add_palette_index (unsigned palette_index)
+ { palette_indices->add (palette_index); }
+
+ public:
+ const void *base;
+ hb_set_t visited_paint;
+ hb_set_t *glyphs;
+ hb_set_t *layer_indices;
+ hb_set_t *palette_indices;
+ unsigned nesting_level_left;
+
+ hb_colrv1_closure_context_t (const void *base_,
+ hb_set_t *glyphs_,
+ hb_set_t *layer_indices_,
+ hb_set_t *palette_indices_,
+ unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
+ base (base_),
+ glyphs (glyphs_),
+ layer_indices (layer_indices_),
+ palette_indices (palette_indices_),
+ nesting_level_left (nesting_level_left_)
+ {}
+};
+
+struct LayerRecord
+{
+ operator hb_ot_color_layer_t () const { return {glyphId, colorIdx}; }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ public:
+ HBGlyphID16 glyphId; /* Glyph ID of layer glyph */
+ Index colorIdx; /* Index value to use with a
+ * selected color palette.
+ * An index value of 0xFFFF
+ * is a special case indicating
+ * that the text foreground
+ * color (defined by a
+ * higher-level client) should
+ * be used and shall not be
+ * treated as actual index
+ * into CPAL ColorRecord array. */
+ public:
+ DEFINE_SIZE_STATIC (4);
+};
+
+struct BaseGlyphRecord
+{
+ int cmp (hb_codepoint_t g) const
+ { return g < glyphId ? -1 : g > glyphId ? 1 : 0; }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ public:
+ HBGlyphID16 glyphId; /* Glyph ID of reference glyph */
+ HBUINT16 firstLayerIdx; /* Index (from beginning of
+ * the Layer Records) to the
+ * layer record. There will be
+ * numLayers consecutive entries
+ * for this base glyph. */
+ HBUINT16 numLayers; /* Number of color layers
+ * associated with this glyph */
+ public:
+ DEFINE_SIZE_STATIC (6);
+};
+
+template <typename T>
+struct Variable
+{
+ static constexpr bool is_variable = true;
+
+ Variable<T>* copy (hb_serialize_context_t *c) const
+ {
+ TRACE_SERIALIZE (this);
+ return_trace (c->embed (this));
+ }
+
+ void closurev1 (hb_colrv1_closure_context_t* c) const
+ { value.closurev1 (c); }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ if (!value.subset (c, instancer, varIdxBase)) return_trace (false);
+ if (c->plan->all_axes_pinned)
+ return_trace (true);
+
+ //TODO: update varIdxBase for partial-instancing
+ return_trace (c->serializer->embed (varIdxBase));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && value.sanitize (c));
+ }
+
+ void paint_glyph (hb_paint_context_t *c) const
+ {
+ value.paint_glyph (c, varIdxBase);
+ }
+
+ void get_color_stop (hb_paint_context_t *c,
+ hb_color_stop_t *stop,
+ const VarStoreInstancer &instancer) const
+ {
+ value.get_color_stop (c, stop, varIdxBase, instancer);
+ }
+
+ hb_paint_extend_t get_extend () const
+ {
+ return value.get_extend ();
+ }
+
+ protected:
+ T value;
+ public:
+ VarIdx varIdxBase;
+ public:
+ DEFINE_SIZE_STATIC (4 + T::static_size);
+};
+
+template <typename T>
+struct NoVariable
+{
+ static constexpr bool is_variable = false;
+
+ static constexpr uint32_t varIdxBase = VarIdx::NO_VARIATION;
+
+ NoVariable<T>* copy (hb_serialize_context_t *c) const
+ {
+ TRACE_SERIALIZE (this);
+ return_trace (c->embed (this));
+ }
+
+ void closurev1 (hb_colrv1_closure_context_t* c) const
+ { value.closurev1 (c); }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ return_trace (value.subset (c, instancer, varIdxBase));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && value.sanitize (c));
+ }
+
+ void paint_glyph (hb_paint_context_t *c) const
+ {
+ value.paint_glyph (c, varIdxBase);
+ }
+
+ void get_color_stop (hb_paint_context_t *c,
+ hb_color_stop_t *stop,
+ const VarStoreInstancer &instancer) const
+ {
+ value.get_color_stop (c, stop, VarIdx::NO_VARIATION, instancer);
+ }
+
+ hb_paint_extend_t get_extend () const
+ {
+ return value.get_extend ();
+ }
+
+ T value;
+ public:
+ DEFINE_SIZE_STATIC (T::static_size);
+};
+
+// Color structures
+
+struct ColorStop
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const
+ { c->add_palette_index (paletteIndex); }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (*this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->stopOffset.set_float (stopOffset.to_float(instancer (varIdxBase, 0)));
+ out->alpha.set_float (alpha.to_float (instancer (varIdxBase, 1)));
+ }
+
+ return_trace (c->serializer->check_assign (out->paletteIndex, c->plan->colr_palettes.get (paletteIndex),
+ HB_SERIALIZE_ERROR_INT_OVERFLOW));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ void get_color_stop (hb_paint_context_t *c,
+ hb_color_stop_t *out,
+ uint32_t varIdx,
+ const VarStoreInstancer &instancer) const
+ {
+ out->offset = stopOffset.to_float(instancer (varIdx, 0));
+ out->color = c->get_color (paletteIndex,
+ alpha.to_float (instancer (varIdx, 1)),
+ &out->is_foreground);
+ }
+
+ F2DOT14 stopOffset;
+ HBUINT16 paletteIndex;
+ F2DOT14 alpha;
+ public:
+ DEFINE_SIZE_STATIC (2 + 2 * F2DOT14::static_size);
+};
+
+struct Extend : HBUINT8
+{
+ enum {
+ EXTEND_PAD = 0,
+ EXTEND_REPEAT = 1,
+ EXTEND_REFLECT = 2,
+ };
+ public:
+ DEFINE_SIZE_STATIC (1);
+};
+
+template <template<typename> class Var>
+struct ColorLine
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const
+ {
+ for (const auto &stop : stops.iter ())
+ stop.closurev1 (c);
+ }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (this);
+ if (unlikely (!out)) return_trace (false);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ if (!c->serializer->check_assign (out->extend, extend, HB_SERIALIZE_ERROR_INT_OVERFLOW)) return_trace (false);
+ if (!c->serializer->check_assign (out->stops.len, stops.len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW)) return_trace (false);
+
+ for (const auto& stop : stops.iter ())
+ {
+ if (!stop.subset (c, instancer)) return_trace (false);
+ }
+ return_trace (true);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ stops.sanitize (c));
+ }
+
+ /* get up to count stops from start */
+ unsigned int
+ get_color_stops (hb_paint_context_t *c,
+ unsigned int start,
+ unsigned int *count,
+ hb_color_stop_t *color_stops,
+ const VarStoreInstancer &instancer) const
+ {
+ unsigned int len = stops.len;
+
+ if (count && color_stops)
+ {
+ unsigned int i;
+ for (i = 0; i < *count && start + i < len; i++)
+ stops[start + i].get_color_stop (c, &color_stops[i], instancer);
+ *count = i;
+ }
+
+ return len;
+ }
+
+ HB_INTERNAL static unsigned int static_get_color_stops (hb_color_line_t *color_line,
+ void *color_line_data,
+ unsigned int start,
+ unsigned int *count,
+ hb_color_stop_t *color_stops,
+ void *user_data)
+ {
+ const ColorLine *thiz = (const ColorLine *) color_line_data;
+ hb_paint_context_t *c = (hb_paint_context_t *) user_data;
+ return thiz->get_color_stops (c, start, count, color_stops, c->instancer);
+ }
+
+ hb_paint_extend_t get_extend () const
+ {
+ return (hb_paint_extend_t) (unsigned int) extend;
+ }
+
+ HB_INTERNAL static hb_paint_extend_t static_get_extend (hb_color_line_t *color_line,
+ void *color_line_data,
+ void *user_data)
+ {
+ const ColorLine *thiz = (const ColorLine *) color_line_data;
+ return thiz->get_extend ();
+ }
+
+ Extend extend;
+ Array16Of<Var<ColorStop>> stops;
+ public:
+ DEFINE_SIZE_ARRAY_SIZED (3, stops);
+};
+
+// Composition modes
+
+// Compositing modes are taken from https://www.w3.org/TR/compositing-1/
+// NOTE: a brief audit of major implementations suggests most support most
+// or all of the specified modes.
+struct CompositeMode : HBUINT8
+{
+ enum {
+ // Porter-Duff modes
+ // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators
+ COMPOSITE_CLEAR = 0, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_clear
+ COMPOSITE_SRC = 1, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_src
+ COMPOSITE_DEST = 2, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dst
+ COMPOSITE_SRC_OVER = 3, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcover
+ COMPOSITE_DEST_OVER = 4, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstover
+ COMPOSITE_SRC_IN = 5, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcin
+ COMPOSITE_DEST_IN = 6, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstin
+ COMPOSITE_SRC_OUT = 7, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcout
+ COMPOSITE_DEST_OUT = 8, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstout
+ COMPOSITE_SRC_ATOP = 9, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcatop
+ COMPOSITE_DEST_ATOP = 10, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstatop
+ COMPOSITE_XOR = 11, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_xor
+ COMPOSITE_PLUS = 12, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_plus
+
+ // Blend modes
+ // https://www.w3.org/TR/compositing-1/#blending
+ COMPOSITE_SCREEN = 13, // https://www.w3.org/TR/compositing-1/#blendingscreen
+ COMPOSITE_OVERLAY = 14, // https://www.w3.org/TR/compositing-1/#blendingoverlay
+ COMPOSITE_DARKEN = 15, // https://www.w3.org/TR/compositing-1/#blendingdarken
+ COMPOSITE_LIGHTEN = 16, // https://www.w3.org/TR/compositing-1/#blendinglighten
+ COMPOSITE_COLOR_DODGE = 17, // https://www.w3.org/TR/compositing-1/#blendingcolordodge
+ COMPOSITE_COLOR_BURN = 18, // https://www.w3.org/TR/compositing-1/#blendingcolorburn
+ COMPOSITE_HARD_LIGHT = 19, // https://www.w3.org/TR/compositing-1/#blendinghardlight
+ COMPOSITE_SOFT_LIGHT = 20, // https://www.w3.org/TR/compositing-1/#blendingsoftlight
+ COMPOSITE_DIFFERENCE = 21, // https://www.w3.org/TR/compositing-1/#blendingdifference
+ COMPOSITE_EXCLUSION = 22, // https://www.w3.org/TR/compositing-1/#blendingexclusion
+ COMPOSITE_MULTIPLY = 23, // https://www.w3.org/TR/compositing-1/#blendingmultiply
+
+ // Modes that, uniquely, do not operate on components
+ // https://www.w3.org/TR/compositing-1/#blendingnonseparable
+ COMPOSITE_HSL_HUE = 24, // https://www.w3.org/TR/compositing-1/#blendinghue
+ COMPOSITE_HSL_SATURATION = 25, // https://www.w3.org/TR/compositing-1/#blendingsaturation
+ COMPOSITE_HSL_COLOR = 26, // https://www.w3.org/TR/compositing-1/#blendingcolor
+ COMPOSITE_HSL_LUMINOSITY = 27, // https://www.w3.org/TR/compositing-1/#blendingluminosity
+ };
+ public:
+ DEFINE_SIZE_STATIC (1);
+};
+
+struct Affine2x3
+{
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (*this);
+ if (unlikely (!out)) return_trace (false);
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->xx.set_float (xx.to_float(instancer (varIdxBase, 0)));
+ out->yx.set_float (yx.to_float(instancer (varIdxBase, 1)));
+ out->xy.set_float (xy.to_float(instancer (varIdxBase, 2)));
+ out->yy.set_float (yy.to_float(instancer (varIdxBase, 3)));
+ out->dx.set_float (dx.to_float(instancer (varIdxBase, 4)));
+ out->dy.set_float (dy.to_float(instancer (varIdxBase, 5)));
+ }
+ return_trace (true);
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ c->funcs->push_transform (c->data,
+ xx.to_float (c->instancer (varIdxBase, 0)),
+ yx.to_float (c->instancer (varIdxBase, 1)),
+ xy.to_float (c->instancer (varIdxBase, 2)),
+ yy.to_float (c->instancer (varIdxBase, 3)),
+ dx.to_float (c->instancer (varIdxBase, 4)),
+ dy.to_float (c->instancer (varIdxBase, 5)));
+ }
+
+ F16DOT16 xx;
+ F16DOT16 yx;
+ F16DOT16 xy;
+ F16DOT16 yy;
+ F16DOT16 dx;
+ F16DOT16 dy;
+ public:
+ DEFINE_SIZE_STATIC (6 * F16DOT16::static_size);
+};
+
+struct PaintColrLayers
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer HB_UNUSED) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+ return_trace (c->serializer->check_assign (out->firstLayerIndex, c->plan->colrv1_layers.get (firstLayerIndex),
+ HB_SERIALIZE_ERROR_INT_OVERFLOW));
+
+ return_trace (true);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ inline void paint_glyph (hb_paint_context_t *c) const;
+
+ HBUINT8 format; /* format = 1 */
+ HBUINT8 numLayers;
+ HBUINT32 firstLayerIndex; /* index into COLRv1::layerList */
+ public:
+ DEFINE_SIZE_STATIC (6);
+};
+
+struct PaintSolid
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const
+ { c->add_palette_index (paletteIndex); }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (*this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ out->alpha.set_float (alpha.to_float (instancer (varIdxBase, 0)));
+
+ if (format == 3 && c->plan->all_axes_pinned)
+ out->format = 2;
+
+ return_trace (c->serializer->check_assign (out->paletteIndex, c->plan->colr_palettes.get (paletteIndex),
+ HB_SERIALIZE_ERROR_INT_OVERFLOW));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ hb_bool_t is_foreground;
+ hb_color_t color;
+
+ color = c->get_color (paletteIndex,
+ alpha.to_float (c->instancer (varIdxBase, 0)),
+ &is_foreground);
+ c->funcs->color (c->data, is_foreground, color);
+ }
+
+ HBUINT8 format; /* format = 2(noVar) or 3(Var)*/
+ HBUINT16 paletteIndex;
+ F2DOT14 alpha;
+ public:
+ DEFINE_SIZE_STATIC (3 + F2DOT14::static_size);
+};
+
+template <template<typename> class Var>
+struct PaintLinearGradient
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const
+ { (this+colorLine).closurev1 (c); }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->x0 = x0 + (int) roundf (instancer (varIdxBase, 0));
+ out->y0 = y0 + (int) roundf (instancer (varIdxBase, 1));
+ out->x1 = x1 + (int) roundf (instancer (varIdxBase, 2));
+ out->y1 = y1 + (int) roundf (instancer (varIdxBase, 3));
+ out->x2 = x2 + (int) roundf (instancer (varIdxBase, 4));
+ out->y2 = y2 + (int) roundf (instancer (varIdxBase, 5));
+ }
+
+ if (format == 5 && c->plan->all_axes_pinned)
+ out->format = 4;
+
+ return_trace (out->colorLine.serialize_subset (c, colorLine, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ hb_color_line_t cl = {
+ (void *) &(this+colorLine),
+ (this+colorLine).static_get_color_stops, c,
+ (this+colorLine).static_get_extend, nullptr
+ };
+
+ c->funcs->linear_gradient (c->data, &cl,
+ x0 + c->instancer (varIdxBase, 0),
+ y0 + c->instancer (varIdxBase, 1),
+ x1 + c->instancer (varIdxBase, 2),
+ y1 + c->instancer (varIdxBase, 3),
+ x2 + c->instancer (varIdxBase, 4),
+ y2 + c->instancer (varIdxBase, 5));
+ }
+
+ HBUINT8 format; /* format = 4(noVar) or 5 (Var) */
+ Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintLinearGradient
+ * table) to ColorLine subtable. */
+ FWORD x0;
+ FWORD y0;
+ FWORD x1;
+ FWORD y1;
+ FWORD x2;
+ FWORD y2;
+ public:
+ DEFINE_SIZE_STATIC (4 + 6 * FWORD::static_size);
+};
+
+template <template<typename> class Var>
+struct PaintRadialGradient
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const
+ { (this+colorLine).closurev1 (c); }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->x0 = x0 + (int) roundf (instancer (varIdxBase, 0));
+ out->y0 = y0 + (int) roundf (instancer (varIdxBase, 1));
+ out->radius0 = radius0 + (unsigned) roundf (instancer (varIdxBase, 2));
+ out->x1 = x1 + (int) roundf (instancer (varIdxBase, 3));
+ out->y1 = y1 + (int) roundf (instancer (varIdxBase, 4));
+ out->radius1 = radius1 + (unsigned) roundf (instancer (varIdxBase, 5));
+ }
+
+ if (format == 7 && c->plan->all_axes_pinned)
+ out->format = 6;
+
+ return_trace (out->colorLine.serialize_subset (c, colorLine, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ hb_color_line_t cl = {
+ (void *) &(this+colorLine),
+ (this+colorLine).static_get_color_stops, c,
+ (this+colorLine).static_get_extend, nullptr
+ };
+
+ c->funcs->radial_gradient (c->data, &cl,
+ x0 + c->instancer (varIdxBase, 0),
+ y0 + c->instancer (varIdxBase, 1),
+ radius0 + c->instancer (varIdxBase, 2),
+ x1 + c->instancer (varIdxBase, 3),
+ y1 + c->instancer (varIdxBase, 4),
+ radius1 + c->instancer (varIdxBase, 5));
+ }
+
+ HBUINT8 format; /* format = 6(noVar) or 7 (Var) */
+ Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintRadialGradient
+ * table) to ColorLine subtable. */
+ FWORD x0;
+ FWORD y0;
+ UFWORD radius0;
+ FWORD x1;
+ FWORD y1;
+ UFWORD radius1;
+ public:
+ DEFINE_SIZE_STATIC (4 + 6 * FWORD::static_size);
+};
+
+template <template<typename> class Var>
+struct PaintSweepGradient
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const
+ { (this+colorLine).closurev1 (c); }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->centerX = centerX + (int) roundf (instancer (varIdxBase, 0));
+ out->centerY = centerY + (int) roundf (instancer (varIdxBase, 1));
+ out->startAngle.set_float (startAngle.to_float (instancer (varIdxBase, 2)));
+ out->endAngle.set_float (endAngle.to_float (instancer (varIdxBase, 3)));
+ }
+
+ if (format == 9 && c->plan->all_axes_pinned)
+ out->format = 8;
+
+ return_trace (out->colorLine.serialize_subset (c, colorLine, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ hb_color_line_t cl = {
+ (void *) &(this+colorLine),
+ (this+colorLine).static_get_color_stops, c,
+ (this+colorLine).static_get_extend, nullptr
+ };
+
+ c->funcs->sweep_gradient (c->data, &cl,
+ centerX + c->instancer (varIdxBase, 0),
+ centerY + c->instancer (varIdxBase, 1),
+ (startAngle.to_float (c->instancer (varIdxBase, 2)) + 1) * HB_PI,
+ (endAngle.to_float (c->instancer (varIdxBase, 3)) + 1) * HB_PI);
+ }
+
+ HBUINT8 format; /* format = 8(noVar) or 9 (Var) */
+ Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintSweepGradient
+ * table) to ColorLine subtable. */
+ FWORD centerX;
+ FWORD centerY;
+ F2DOT14 startAngle;
+ F2DOT14 endAngle;
+ public:
+ DEFINE_SIZE_STATIC (4 + 2 * FWORD::static_size + 2 * F2DOT14::static_size);
+};
+
+// Paint a non-COLR glyph, filled as indicated by paint.
+struct PaintGlyph
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (! c->serializer->check_assign (out->gid, c->plan->glyph_map->get (gid),
+ HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return_trace (false);
+
+ return_trace (out->paint.serialize_subset (c, paint, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && paint.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c) const
+ {
+ c->funcs->push_inverse_root_transform (c->data, c->font);
+ c->funcs->push_clip_glyph (c->data, gid, c->font);
+ c->funcs->push_root_transform (c->data, c->font);
+ c->recurse (this+paint);
+ c->funcs->pop_transform (c->data);
+ c->funcs->pop_clip (c->data);
+ c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 10 */
+ Offset24To<Paint> paint; /* Offset (from beginning of PaintGlyph table) to Paint subtable. */
+ HBUINT16 gid;
+ public:
+ DEFINE_SIZE_STATIC (6);
+};
+
+struct PaintColrGlyph
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer HB_UNUSED) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ return_trace (c->serializer->check_assign (out->gid, c->plan->glyph_map->get (gid),
+ HB_SERIALIZE_ERROR_INT_OVERFLOW));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ inline void paint_glyph (hb_paint_context_t *c) const;
+
+ HBUINT8 format; /* format = 11 */
+ HBUINT16 gid;
+ public:
+ DEFINE_SIZE_STATIC (3);
+};
+
+template <template<typename> class Var>
+struct PaintTransform
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+ if (!out->transform.serialize_subset (c, transform, this, instancer)) return_trace (false);
+ if (format == 13 && c->plan->all_axes_pinned)
+ out->format = 12;
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ src.sanitize (c, this) &&
+ transform.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c) const
+ {
+ (this+transform).paint_glyph (c);
+ c->recurse (this+src);
+ c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 12(noVar) or 13 (Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintTransform table) to Paint subtable. */
+ Offset24To<Var<Affine2x3>> transform;
+ public:
+ DEFINE_SIZE_STATIC (7);
+};
+
+struct PaintTranslate
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->dx = dx + (int) roundf (instancer (varIdxBase, 0));
+ out->dy = dy + (int) roundf (instancer (varIdxBase, 1));
+ }
+
+ if (format == 15 && c->plan->all_axes_pinned)
+ out->format = 14;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float ddx = dx + c->instancer (varIdxBase, 0);
+ float ddy = dy + c->instancer (varIdxBase, 1);
+
+ bool p1 = c->funcs->push_translate (c->data, ddx, ddy);
+ c->recurse (this+src);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 14(noVar) or 15 (Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintTranslate table) to Paint subtable. */
+ FWORD dx;
+ FWORD dy;
+ public:
+ DEFINE_SIZE_STATIC (4 + 2 * FWORD::static_size);
+};
+
+struct PaintScale
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->scaleX.set_float (scaleX.to_float (instancer (varIdxBase, 0)));
+ out->scaleY.set_float (scaleY.to_float (instancer (varIdxBase, 1)));
+ }
+
+ if (format == 17 && c->plan->all_axes_pinned)
+ out->format = 16;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float sx = scaleX.to_float (c->instancer (varIdxBase, 0));
+ float sy = scaleY.to_float (c->instancer (varIdxBase, 1));
+
+ bool p1 = c->funcs->push_scale (c->data, sx, sy);
+ c->recurse (this+src);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 16 (noVar) or 17(Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintScale table) to Paint subtable. */
+ F2DOT14 scaleX;
+ F2DOT14 scaleY;
+ public:
+ DEFINE_SIZE_STATIC (4 + 2 * F2DOT14::static_size);
+};
+
+struct PaintScaleAroundCenter
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->scaleX.set_float (scaleX.to_float (instancer (varIdxBase, 0)));
+ out->scaleY.set_float (scaleY.to_float (instancer (varIdxBase, 1)));
+ out->centerX = centerX + (int) roundf (instancer (varIdxBase, 2));
+ out->centerY = centerY + (int) roundf (instancer (varIdxBase, 3));
+ }
+
+ if (format == 19 && c->plan->all_axes_pinned)
+ out->format = 18;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float sx = scaleX.to_float (c->instancer (varIdxBase, 0));
+ float sy = scaleY.to_float (c->instancer (varIdxBase, 1));
+ float tCenterX = centerX + c->instancer (varIdxBase, 2);
+ float tCenterY = centerY + c->instancer (varIdxBase, 3);
+
+ bool p1 = c->funcs->push_translate (c->data, +tCenterX, +tCenterY);
+ bool p2 = c->funcs->push_scale (c->data, sx, sy);
+ bool p3 = c->funcs->push_translate (c->data, -tCenterX, -tCenterY);
+ c->recurse (this+src);
+ if (p3) c->funcs->pop_transform (c->data);
+ if (p2) c->funcs->pop_transform (c->data);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 18 (noVar) or 19(Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintScaleAroundCenter table) to Paint subtable. */
+ F2DOT14 scaleX;
+ F2DOT14 scaleY;
+ FWORD centerX;
+ FWORD centerY;
+ public:
+ DEFINE_SIZE_STATIC (4 + 2 * F2DOT14::static_size + 2 * FWORD::static_size);
+};
+
+struct PaintScaleUniform
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ out->scale.set_float (scale.to_float (instancer (varIdxBase, 0)));
+
+ if (format == 21 && c->plan->all_axes_pinned)
+ out->format = 20;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float s = scale.to_float (c->instancer (varIdxBase, 0));
+
+ bool p1 = c->funcs->push_scale (c->data, s, s);
+ c->recurse (this+src);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 20 (noVar) or 21(Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintScaleUniform table) to Paint subtable. */
+ F2DOT14 scale;
+ public:
+ DEFINE_SIZE_STATIC (4 + F2DOT14::static_size);
+};
+
+struct PaintScaleUniformAroundCenter
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->scale.set_float (scale.to_float (instancer (varIdxBase, 0)));
+ out->centerX = centerX + (int) roundf (instancer (varIdxBase, 1));
+ out->centerY = centerY + (int) roundf (instancer (varIdxBase, 2));
+ }
+
+ if (format == 23 && c->plan->all_axes_pinned)
+ out->format = 22;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float s = scale.to_float (c->instancer (varIdxBase, 0));
+ float tCenterX = centerX + c->instancer (varIdxBase, 1);
+ float tCenterY = centerY + c->instancer (varIdxBase, 2);
+
+ bool p1 = c->funcs->push_translate (c->data, +tCenterX, +tCenterY);
+ bool p2 = c->funcs->push_scale (c->data, s, s);
+ bool p3 = c->funcs->push_translate (c->data, -tCenterX, -tCenterY);
+ c->recurse (this+src);
+ if (p3) c->funcs->pop_transform (c->data);
+ if (p2) c->funcs->pop_transform (c->data);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 22 (noVar) or 23(Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintScaleUniformAroundCenter table) to Paint subtable. */
+ F2DOT14 scale;
+ FWORD centerX;
+ FWORD centerY;
+ public:
+ DEFINE_SIZE_STATIC (4 + F2DOT14::static_size + 2 * FWORD::static_size);
+};
+
+struct PaintRotate
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ out->angle.set_float (angle.to_float (instancer (varIdxBase, 0)));
+
+ if (format == 25 && c->plan->all_axes_pinned)
+ out->format = 24;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float a = angle.to_float (c->instancer (varIdxBase, 0));
+
+ bool p1 = c->funcs->push_rotate (c->data, a);
+ c->recurse (this+src);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 24 (noVar) or 25(Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintRotate table) to Paint subtable. */
+ F2DOT14 angle;
+ public:
+ DEFINE_SIZE_STATIC (4 + F2DOT14::static_size);
+};
+
+struct PaintRotateAroundCenter
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->angle.set_float (angle.to_float (instancer (varIdxBase, 0)));
+ out->centerX = centerX + (int) roundf (instancer (varIdxBase, 1));
+ out->centerY = centerY + (int) roundf (instancer (varIdxBase, 2));
+ }
+
+ if (format ==27 && c->plan->all_axes_pinned)
+ out->format = 26;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float a = angle.to_float (c->instancer (varIdxBase, 0));
+ float tCenterX = centerX + c->instancer (varIdxBase, 1);
+ float tCenterY = centerY + c->instancer (varIdxBase, 2);
+
+ bool p1 = c->funcs->push_translate (c->data, +tCenterX, +tCenterY);
+ bool p2 = c->funcs->push_rotate (c->data, a);
+ bool p3 = c->funcs->push_translate (c->data, -tCenterX, -tCenterY);
+ c->recurse (this+src);
+ if (p3) c->funcs->pop_transform (c->data);
+ if (p2) c->funcs->pop_transform (c->data);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 26 (noVar) or 27(Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintRotateAroundCenter table) to Paint subtable. */
+ F2DOT14 angle;
+ FWORD centerX;
+ FWORD centerY;
+ public:
+ DEFINE_SIZE_STATIC (4 + F2DOT14::static_size + 2 * FWORD::static_size);
+};
+
+struct PaintSkew
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->xSkewAngle.set_float (xSkewAngle.to_float (instancer (varIdxBase, 0)));
+ out->ySkewAngle.set_float (ySkewAngle.to_float (instancer (varIdxBase, 1)));
+ }
+
+ if (format == 29 && c->plan->all_axes_pinned)
+ out->format = 28;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float sx = xSkewAngle.to_float(c->instancer (varIdxBase, 0));
+ float sy = ySkewAngle.to_float(c->instancer (varIdxBase, 1));
+
+ bool p1 = c->funcs->push_skew (c->data, sx, sy);
+ c->recurse (this+src);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 28(noVar) or 29 (Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintSkew table) to Paint subtable. */
+ F2DOT14 xSkewAngle;
+ F2DOT14 ySkewAngle;
+ public:
+ DEFINE_SIZE_STATIC (4 + 2 * F2DOT14::static_size);
+};
+
+struct PaintSkewAroundCenter
+{
+ HB_INTERNAL void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->xSkewAngle.set_float (xSkewAngle.to_float (instancer (varIdxBase, 0)));
+ out->ySkewAngle.set_float (ySkewAngle.to_float (instancer (varIdxBase, 1)));
+ out->centerX = centerX + (int) roundf (instancer (varIdxBase, 2));
+ out->centerY = centerY + (int) roundf (instancer (varIdxBase, 3));
+ }
+
+ if (format == 31 && c->plan->all_axes_pinned)
+ out->format = 30;
+
+ return_trace (out->src.serialize_subset (c, src, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && src.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c, uint32_t varIdxBase) const
+ {
+ float sx = xSkewAngle.to_float(c->instancer (varIdxBase, 0));
+ float sy = ySkewAngle.to_float(c->instancer (varIdxBase, 1));
+ float tCenterX = centerX + c->instancer (varIdxBase, 2);
+ float tCenterY = centerY + c->instancer (varIdxBase, 3);
+
+ bool p1 = c->funcs->push_translate (c->data, +tCenterX, +tCenterY);
+ bool p2 = c->funcs->push_skew (c->data, sx, sy);
+ bool p3 = c->funcs->push_translate (c->data, -tCenterX, -tCenterY);
+ c->recurse (this+src);
+ if (p3) c->funcs->pop_transform (c->data);
+ if (p2) c->funcs->pop_transform (c->data);
+ if (p1) c->funcs->pop_transform (c->data);
+ }
+
+ HBUINT8 format; /* format = 30(noVar) or 31 (Var) */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintSkewAroundCenter table) to Paint subtable. */
+ F2DOT14 xSkewAngle;
+ F2DOT14 ySkewAngle;
+ FWORD centerX;
+ FWORD centerY;
+ public:
+ DEFINE_SIZE_STATIC (4 + 2 * F2DOT14::static_size + 2 * FWORD::static_size);
+};
+
+struct PaintComposite
+{
+ void closurev1 (hb_colrv1_closure_context_t* c) const;
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (!out->src.serialize_subset (c, src, this, instancer)) return_trace (false);
+ return_trace (out->backdrop.serialize_subset (c, backdrop, this, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ src.sanitize (c, this) &&
+ backdrop.sanitize (c, this));
+ }
+
+ void paint_glyph (hb_paint_context_t *c) const
+ {
+ c->recurse (this+backdrop);
+ c->funcs->push_group (c->data);
+ c->recurse (this+src);
+ c->funcs->pop_group (c->data, (hb_paint_composite_mode_t) (int) mode);
+ }
+
+ HBUINT8 format; /* format = 32 */
+ Offset24To<Paint> src; /* Offset (from beginning of PaintComposite table) to source Paint subtable. */
+ CompositeMode mode; /* If mode is unrecognized use COMPOSITE_CLEAR */
+ Offset24To<Paint> backdrop; /* Offset (from beginning of PaintComposite table) to backdrop Paint subtable. */
+ public:
+ DEFINE_SIZE_STATIC (8);
+};
+
+struct ClipBoxData
+{
+ int xMin, yMin, xMax, yMax;
+};
+
+struct ClipBoxFormat1
+{
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer HB_UNUSED) const
+ {
+ clip_box.xMin = xMin;
+ clip_box.yMin = yMin;
+ clip_box.xMax = xMax;
+ clip_box.yMax = yMax;
+ }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ uint32_t varIdxBase) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (*this);
+ if (unlikely (!out)) return_trace (false);
+
+ if (instancer && !c->plan->pinned_at_default && varIdxBase != VarIdx::NO_VARIATION)
+ {
+ out->xMin = xMin + (int) roundf (instancer (varIdxBase, 0));
+ out->yMin = yMin + (int) roundf (instancer (varIdxBase, 1));
+ out->xMax = xMax + (int) roundf (instancer (varIdxBase, 2));
+ out->yMax = yMax + (int) roundf (instancer (varIdxBase, 3));
+ }
+
+ if (format == 2 && c->plan->all_axes_pinned)
+ out->format = 1;
+
+ return_trace (true);
+ }
+
+ public:
+ HBUINT8 format; /* format = 1(noVar) or 2(Var)*/
+ FWORD xMin;
+ FWORD yMin;
+ FWORD xMax;
+ FWORD yMax;
+ public:
+ DEFINE_SIZE_STATIC (1 + 4 * FWORD::static_size);
+};
+
+struct ClipBoxFormat2 : Variable<ClipBoxFormat1>
+{
+ void get_clip_box (ClipBoxData &clip_box, const VarStoreInstancer &instancer) const
+ {
+ value.get_clip_box(clip_box, instancer);
+ if (instancer)
+ {
+ clip_box.xMin += roundf (instancer (varIdxBase, 0));
+ clip_box.yMin += roundf (instancer (varIdxBase, 1));
+ clip_box.xMax += roundf (instancer (varIdxBase, 2));
+ clip_box.yMax += roundf (instancer (varIdxBase, 3));
+ }
+ }
+};
+
+struct ClipBox
+{
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ switch (u.format) {
+ case 1: return_trace (u.format1.subset (c, instancer, VarIdx::NO_VARIATION));
+ case 2: return_trace (u.format2.subset (c, instancer));
+ default:return_trace (c->default_return_value ());
+ }
+ }
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+ default:return_trace (c->default_return_value ());
+ }
+ }
+
+ bool get_extents (hb_glyph_extents_t *extents,
+ const VarStoreInstancer &instancer) const
+ {
+ ClipBoxData clip_box;
+ switch (u.format) {
+ case 1:
+ u.format1.get_clip_box (clip_box, instancer);
+ break;
+ case 2:
+ u.format2.get_clip_box (clip_box, instancer);
+ break;
+ default:
+ return false;
+ }
+
+ extents->x_bearing = clip_box.xMin;
+ extents->y_bearing = clip_box.yMax;
+ extents->width = clip_box.xMax - clip_box.xMin;
+ extents->height = clip_box.yMin - clip_box.yMax;
+ return true;
+ }
+
+ protected:
+ union {
+ HBUINT8 format; /* Format identifier */
+ ClipBoxFormat1 format1;
+ ClipBoxFormat2 format2;
+ } u;
+};
+
+struct ClipRecord
+{
+ int cmp (hb_codepoint_t g) const
+ { return g < startGlyphID ? -1 : g <= endGlyphID ? 0 : +1; }
+
+ bool subset (hb_subset_context_t *c,
+ const void *base,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (*this);
+ if (unlikely (!out)) return_trace (false);
+
+ return_trace (out->clipBox.serialize_subset (c, clipBox, base, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && clipBox.sanitize (c, base));
+ }
+
+ bool get_extents (hb_glyph_extents_t *extents,
+ const void *base,
+ const VarStoreInstancer &instancer) const
+ {
+ return (base+clipBox).get_extents (extents, instancer);
+ }
+
+ public:
+ HBUINT16 startGlyphID; // first gid clip applies to
+ HBUINT16 endGlyphID; // last gid clip applies to, inclusive
+ Offset24To<ClipBox> clipBox; // Box or VarBox
+ public:
+ DEFINE_SIZE_STATIC (7);
+};
+DECLARE_NULL_NAMESPACE_BYTES (OT, ClipRecord);
+
+struct ClipList
+{
+ unsigned serialize_clip_records (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer,
+ const hb_set_t& gids,
+ const hb_map_t& gid_offset_map) const
+ {
+ TRACE_SERIALIZE (this);
+ if (gids.is_empty () ||
+ gid_offset_map.get_population () != gids.get_population ())
+ return_trace (0);
+
+ unsigned count = 0;
+
+ hb_codepoint_t start_gid= gids.get_min ();
+ hb_codepoint_t prev_gid = start_gid;
+
+ unsigned offset = gid_offset_map.get (start_gid);
+ unsigned prev_offset = offset;
+ for (const hb_codepoint_t _ : gids.iter ())
+ {
+ if (_ == start_gid) continue;
+
+ offset = gid_offset_map.get (_);
+ if (_ == prev_gid + 1 && offset == prev_offset)
+ {
+ prev_gid = _;
+ continue;
+ }
+
+ ClipRecord record;
+ record.startGlyphID = start_gid;
+ record.endGlyphID = prev_gid;
+ record.clipBox = prev_offset;
+
+ if (!record.subset (c, this, instancer)) return_trace (0);
+ count++;
+
+ start_gid = _;
+ prev_gid = _;
+ prev_offset = offset;
+ }
+
+ //last one
+ {
+ ClipRecord record;
+ record.startGlyphID = start_gid;
+ record.endGlyphID = prev_gid;
+ record.clipBox = prev_offset;
+ if (!record.subset (c, this, instancer)) return_trace (0);
+ count++;
+ }
+ return_trace (count);
+ }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ if (!c->serializer->check_assign (out->format, format, HB_SERIALIZE_ERROR_INT_OVERFLOW)) return_trace (false);
+
+ const hb_set_t& glyphset = c->plan->_glyphset_colred;
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ hb_map_t new_gid_offset_map;
+ hb_set_t new_gids;
+ for (const ClipRecord& record : clips.iter ())
+ {
+ unsigned start_gid = record.startGlyphID;
+ unsigned end_gid = record.endGlyphID;
+ for (unsigned gid = start_gid; gid <= end_gid; gid++)
+ {
+ if (!glyphset.has (gid) || !glyph_map.has (gid)) continue;
+ unsigned new_gid = glyph_map.get (gid);
+ new_gid_offset_map.set (new_gid, record.clipBox);
+ new_gids.add (new_gid);
+ }
+ }
+
+ unsigned count = serialize_clip_records (c, instancer, new_gids, new_gid_offset_map);
+ if (!count) return_trace (false);
+ return_trace (c->serializer->check_assign (out->clips.len, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ // TODO Make a formatted struct!
+ return_trace (c->check_struct (this) && clips.sanitize (c, this));
+ }
+
+ bool
+ get_extents (hb_codepoint_t gid,
+ hb_glyph_extents_t *extents,
+ const VarStoreInstancer &instancer) const
+ {
+ auto *rec = clips.as_array ().bsearch (gid);
+ if (rec)
+ {
+ rec->get_extents (extents, this, instancer);
+ return true;
+ }
+ return false;
+ }
+
+ HBUINT8 format; // Set to 1.
+ SortedArray32Of<ClipRecord> clips; // Clip records, sorted by startGlyphID
+ public:
+ DEFINE_SIZE_ARRAY_SIZED (5, clips);
+};
+
+struct Paint
+{
+
+ template <typename ...Ts>
+ bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
+ {
+ TRACE_SANITIZE (this);
+
+ if (unlikely (!c->check_start_recursion (HB_MAX_NESTING_LEVEL)))
+ return_trace (c->no_dispatch_return_value ());
+
+ return_trace (c->end_recursion (this->dispatch (c, std::forward<Ts> (ds)...)));
+ }
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.paintformat1, std::forward<Ts> (ds)...));
+ case 2: return_trace (c->dispatch (u.paintformat2, std::forward<Ts> (ds)...));
+ case 3: return_trace (c->dispatch (u.paintformat3, std::forward<Ts> (ds)...));
+ case 4: return_trace (c->dispatch (u.paintformat4, std::forward<Ts> (ds)...));
+ case 5: return_trace (c->dispatch (u.paintformat5, std::forward<Ts> (ds)...));
+ case 6: return_trace (c->dispatch (u.paintformat6, std::forward<Ts> (ds)...));
+ case 7: return_trace (c->dispatch (u.paintformat7, std::forward<Ts> (ds)...));
+ case 8: return_trace (c->dispatch (u.paintformat8, std::forward<Ts> (ds)...));
+ case 9: return_trace (c->dispatch (u.paintformat9, std::forward<Ts> (ds)...));
+ case 10: return_trace (c->dispatch (u.paintformat10, std::forward<Ts> (ds)...));
+ case 11: return_trace (c->dispatch (u.paintformat11, std::forward<Ts> (ds)...));
+ case 12: return_trace (c->dispatch (u.paintformat12, std::forward<Ts> (ds)...));
+ case 13: return_trace (c->dispatch (u.paintformat13, std::forward<Ts> (ds)...));
+ case 14: return_trace (c->dispatch (u.paintformat14, std::forward<Ts> (ds)...));
+ case 15: return_trace (c->dispatch (u.paintformat15, std::forward<Ts> (ds)...));
+ case 16: return_trace (c->dispatch (u.paintformat16, std::forward<Ts> (ds)...));
+ case 17: return_trace (c->dispatch (u.paintformat17, std::forward<Ts> (ds)...));
+ case 18: return_trace (c->dispatch (u.paintformat18, std::forward<Ts> (ds)...));
+ case 19: return_trace (c->dispatch (u.paintformat19, std::forward<Ts> (ds)...));
+ case 20: return_trace (c->dispatch (u.paintformat20, std::forward<Ts> (ds)...));
+ case 21: return_trace (c->dispatch (u.paintformat21, std::forward<Ts> (ds)...));
+ case 22: return_trace (c->dispatch (u.paintformat22, std::forward<Ts> (ds)...));
+ case 23: return_trace (c->dispatch (u.paintformat23, std::forward<Ts> (ds)...));
+ case 24: return_trace (c->dispatch (u.paintformat24, std::forward<Ts> (ds)...));
+ case 25: return_trace (c->dispatch (u.paintformat25, std::forward<Ts> (ds)...));
+ case 26: return_trace (c->dispatch (u.paintformat26, std::forward<Ts> (ds)...));
+ case 27: return_trace (c->dispatch (u.paintformat27, std::forward<Ts> (ds)...));
+ case 28: return_trace (c->dispatch (u.paintformat28, std::forward<Ts> (ds)...));
+ case 29: return_trace (c->dispatch (u.paintformat29, std::forward<Ts> (ds)...));
+ case 30: return_trace (c->dispatch (u.paintformat30, std::forward<Ts> (ds)...));
+ case 31: return_trace (c->dispatch (u.paintformat31, std::forward<Ts> (ds)...));
+ case 32: return_trace (c->dispatch (u.paintformat32, std::forward<Ts> (ds)...));
+ default:return_trace (c->default_return_value ());
+ }
+ }
+
+ protected:
+ union {
+ HBUINT8 format;
+ PaintColrLayers paintformat1;
+ NoVariable<PaintSolid> paintformat2;
+ Variable<PaintSolid> paintformat3;
+ NoVariable<PaintLinearGradient<NoVariable>> paintformat4;
+ Variable<PaintLinearGradient<Variable>> paintformat5;
+ NoVariable<PaintRadialGradient<NoVariable>> paintformat6;
+ Variable<PaintRadialGradient<Variable>> paintformat7;
+ NoVariable<PaintSweepGradient<NoVariable>> paintformat8;
+ Variable<PaintSweepGradient<Variable>> paintformat9;
+ PaintGlyph paintformat10;
+ PaintColrGlyph paintformat11;
+ PaintTransform<NoVariable> paintformat12;
+ PaintTransform<Variable> paintformat13;
+ NoVariable<PaintTranslate> paintformat14;
+ Variable<PaintTranslate> paintformat15;
+ NoVariable<PaintScale> paintformat16;
+ Variable<PaintScale> paintformat17;
+ NoVariable<PaintScaleAroundCenter> paintformat18;
+ Variable<PaintScaleAroundCenter> paintformat19;
+ NoVariable<PaintScaleUniform> paintformat20;
+ Variable<PaintScaleUniform> paintformat21;
+ NoVariable<PaintScaleUniformAroundCenter> paintformat22;
+ Variable<PaintScaleUniformAroundCenter> paintformat23;
+ NoVariable<PaintRotate> paintformat24;
+ Variable<PaintRotate> paintformat25;
+ NoVariable<PaintRotateAroundCenter> paintformat26;
+ Variable<PaintRotateAroundCenter> paintformat27;
+ NoVariable<PaintSkew> paintformat28;
+ Variable<PaintSkew> paintformat29;
+ NoVariable<PaintSkewAroundCenter> paintformat30;
+ Variable<PaintSkewAroundCenter> paintformat31;
+ PaintComposite paintformat32;
+ } u;
+ public:
+ DEFINE_SIZE_MIN (2);
+};
+
+struct BaseGlyphPaintRecord
+{
+ int cmp (hb_codepoint_t g) const
+ { return g < glyphId ? -1 : g > glyphId ? 1 : 0; }
+
+ bool serialize (hb_serialize_context_t *s, const hb_map_t* glyph_map,
+ const void* src_base, hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SERIALIZE (this);
+ auto *out = s->embed (this);
+ if (unlikely (!out)) return_trace (false);
+ if (!s->check_assign (out->glyphId, glyph_map->get (glyphId),
+ HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return_trace (false);
+
+ return_trace (out->paint.serialize_subset (c, paint, src_base, instancer));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (likely (c->check_struct (this) && paint.sanitize (c, base)));
+ }
+
+ public:
+ HBGlyphID16 glyphId; /* Glyph ID of reference glyph */
+ Offset32To<Paint> paint; /* Offset (from beginning of BaseGlyphPaintRecord array) to Paint,
+ * Typically PaintColrLayers */
+ public:
+ DEFINE_SIZE_STATIC (6);
+};
+
+struct BaseGlyphList : SortedArray32Of<BaseGlyphPaintRecord>
+{
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ const hb_set_t* glyphset = &c->plan->_glyphset_colred;
+
+ for (const auto& _ : as_array ())
+ {
+ unsigned gid = _.glyphId;
+ if (!glyphset->has (gid)) continue;
+
+ if (_.serialize (c->serializer, c->plan->glyph_map, this, c, instancer)) out->len++;
+ else return_trace (false);
+ }
+
+ return_trace (out->len != 0);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (SortedArray32Of<BaseGlyphPaintRecord>::sanitize (c, this));
+ }
+};
+
+struct LayerList : Array32OfOffset32To<Paint>
+{
+ const Paint& get_paint (unsigned i) const
+ { return this+(*this)[i]; }
+
+ bool subset (hb_subset_context_t *c,
+ const VarStoreInstancer &instancer) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ for (const auto& _ : + hb_enumerate (*this)
+ | hb_filter (c->plan->colrv1_layers, hb_first))
+
+ {
+ auto *o = out->serialize_append (c->serializer);
+ if (unlikely (!o) || !o->serialize_subset (c, _.second, this, instancer))
+ return_trace (false);
+ }
+ return_trace (true);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (Array32OfOffset32To<Paint>::sanitize (c, this));
+ }
+};
+
+struct COLR
+{
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_COLR;
+
+ bool has_v0_data () const { return numBaseGlyphs; }
+ bool has_v1_data () const
+ {
+ if (version == 1)
+ return (this+baseGlyphList).len > 0;
+
+ return false;
+ }
+
+ unsigned int get_glyph_layers (hb_codepoint_t glyph,
+ unsigned int start_offset,
+ unsigned int *count, /* IN/OUT. May be NULL. */
+ hb_ot_color_layer_t *layers /* OUT. May be NULL. */) const
+ {
+ const BaseGlyphRecord &record = (this+baseGlyphsZ).bsearch (numBaseGlyphs, glyph);
+
+ hb_array_t<const LayerRecord> all_layers = (this+layersZ).as_array (numLayers);
+ hb_array_t<const LayerRecord> glyph_layers = all_layers.sub_array (record.firstLayerIdx,
+ record.numLayers);
+ if (count)
+ {
+ + glyph_layers.sub_array (start_offset, count)
+ | hb_sink (hb_array (layers, *count))
+ ;
+ }
+ return glyph_layers.length;
+ }
+
+ struct accelerator_t
+ {
+ accelerator_t (hb_face_t *face)
+ { colr = hb_sanitize_context_t ().reference_table<COLR> (face); }
+ ~accelerator_t () { this->colr.destroy (); }
+
+ bool is_valid () { return colr.get_blob ()->length; }
+
+ void closure_glyphs (hb_codepoint_t glyph,
+ hb_set_t *related_ids /* OUT */) const
+ { colr->closure_glyphs (glyph, related_ids); }
+
+ void closure_V0palette_indices (const hb_set_t *glyphs,
+ hb_set_t *palettes /* OUT */) const
+ { colr->closure_V0palette_indices (glyphs, palettes); }
+
+ void closure_forV1 (hb_set_t *glyphset,
+ hb_set_t *layer_indices,
+ hb_set_t *palette_indices) const
+ { colr->closure_forV1 (glyphset, layer_indices, palette_indices); }
+
+ private:
+ hb_blob_ptr_t<COLR> colr;
+ };
+
+ void closure_glyphs (hb_codepoint_t glyph,
+ hb_set_t *related_ids /* OUT */) const
+ {
+ const BaseGlyphRecord *record = get_base_glyph_record (glyph);
+ if (!record) return;
+
+ auto glyph_layers = (this+layersZ).as_array (numLayers).sub_array (record->firstLayerIdx,
+ record->numLayers);
+ if (!glyph_layers.length) return;
+ related_ids->add_array (&glyph_layers[0].glyphId, glyph_layers.length, LayerRecord::min_size);
+ }
+
+ void closure_V0palette_indices (const hb_set_t *glyphs,
+ hb_set_t *palettes /* OUT */) const
+ {
+ if (!numBaseGlyphs || !numLayers) return;
+ hb_array_t<const BaseGlyphRecord> baseGlyphs = (this+baseGlyphsZ).as_array (numBaseGlyphs);
+ hb_array_t<const LayerRecord> all_layers = (this+layersZ).as_array (numLayers);
+
+ for (const BaseGlyphRecord record : baseGlyphs)
+ {
+ if (!glyphs->has (record.glyphId)) continue;
+ hb_array_t<const LayerRecord> glyph_layers = all_layers.sub_array (record.firstLayerIdx,
+ record.numLayers);
+ for (const LayerRecord layer : glyph_layers)
+ palettes->add (layer.colorIdx);
+ }
+ }
+
+ void closure_forV1 (hb_set_t *glyphset,
+ hb_set_t *layer_indices,
+ hb_set_t *palette_indices) const
+ {
+ if (version != 1) return;
+ hb_set_t visited_glyphs;
+
+ hb_colrv1_closure_context_t c (this, &visited_glyphs, layer_indices, palette_indices);
+ const BaseGlyphList &baseglyph_paintrecords = this+baseGlyphList;
+
+ for (const BaseGlyphPaintRecord &baseglyph_paintrecord: baseglyph_paintrecords.iter ())
+ {
+ unsigned gid = baseglyph_paintrecord.glyphId;
+ if (!glyphset->has (gid)) continue;
+
+ const Paint &paint = &baseglyph_paintrecords+baseglyph_paintrecord.paint;
+ paint.dispatch (&c);
+ }
+ hb_set_union (glyphset, &visited_glyphs);
+ }
+
+ const LayerList& get_layerList () const
+ { return (this+layerList); }
+
+ const BaseGlyphList& get_baseglyphList () const
+ { return (this+baseGlyphList); }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ (this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
+ (this+layersZ).sanitize (c, numLayers) &&
+ (version == 0 ||
+ (version == 1 &&
+ baseGlyphList.sanitize (c, this) &&
+ layerList.sanitize (c, this) &&
+ clipList.sanitize (c, this) &&
+ varIdxMap.sanitize (c, this) &&
+ varStore.sanitize (c, this))));
+ }
+
+ template<typename BaseIterator, typename LayerIterator,
+ hb_requires (hb_is_iterator (BaseIterator)),
+ hb_requires (hb_is_iterator (LayerIterator))>
+ bool serialize_V0 (hb_serialize_context_t *c,
+ unsigned version,
+ BaseIterator base_it,
+ LayerIterator layer_it)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (base_it.len () != layer_it.len ()))
+ return_trace (false);
+
+ this->version = version;
+ numLayers = 0;
+ numBaseGlyphs = base_it.len ();
+ if (numBaseGlyphs == 0)
+ {
+ baseGlyphsZ = 0;
+ layersZ = 0;
+ return_trace (true);
+ }
+
+ c->push ();
+ for (const hb_item_type<BaseIterator> _ : + base_it.iter ())
+ {
+ auto* record = c->embed (_);
+ if (unlikely (!record)) return_trace (false);
+ record->firstLayerIdx = numLayers;
+ numLayers += record->numLayers;
+ }
+ c->add_link (baseGlyphsZ, c->pop_pack ());
+
+ c->push ();
+ for (const hb_item_type<LayerIterator>& _ : + layer_it.iter ())
+ _.as_array ().copy (c);
+
+ c->add_link (layersZ, c->pop_pack ());
+
+ return_trace (true);
+ }
+
+ const BaseGlyphRecord* get_base_glyph_record (hb_codepoint_t gid) const
+ {
+ const BaseGlyphRecord* record = &(this+baseGlyphsZ).bsearch (numBaseGlyphs, (unsigned int) gid);
+ if (record == &Null (BaseGlyphRecord) ||
+ (record && (hb_codepoint_t) record->glyphId != gid))
+ record = nullptr;
+ return record;
+ }
+
+ const BaseGlyphPaintRecord* get_base_glyph_paintrecord (hb_codepoint_t gid) const
+ {
+ const BaseGlyphPaintRecord* record = &(this+baseGlyphList).bsearch ((unsigned) gid);
+ if ((record && (hb_codepoint_t) record->glyphId != gid))
+ record = nullptr;
+ return record;
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_map_t &reverse_glyph_map = *c->plan->reverse_glyph_map;
+ const hb_set_t& glyphset = c->plan->_glyphset_colred;
+
+ auto base_it =
+ + hb_range (c->plan->num_output_glyphs ())
+ | hb_filter ([&](hb_codepoint_t new_gid)
+ {
+ hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
+ if (glyphset.has (old_gid)) return true;
+ return false;
+ })
+ | hb_map_retains_sorting ([&](hb_codepoint_t new_gid)
+ {
+ hb_codepoint_t old_gid = reverse_glyph_map.get (new_gid);
+
+ const BaseGlyphRecord* old_record = get_base_glyph_record (old_gid);
+ if (unlikely (!old_record))
+ return hb_pair_t<bool, BaseGlyphRecord> (false, Null (BaseGlyphRecord));
+ BaseGlyphRecord new_record = {};
+ new_record.glyphId = new_gid;
+ new_record.numLayers = old_record->numLayers;
+ return hb_pair_t<bool, BaseGlyphRecord> (true, new_record);
+ })
+ | hb_filter (hb_first)
+ | hb_map_retains_sorting (hb_second)
+ ;
+
+ auto layer_it =
+ + hb_range (c->plan->num_output_glyphs ())
+ | hb_map (reverse_glyph_map)
+ | hb_filter (glyphset)
+ | hb_map_retains_sorting ([&](hb_codepoint_t old_gid)
+ {
+ const BaseGlyphRecord* old_record = get_base_glyph_record (old_gid);
+ hb_vector_t<LayerRecord> out_layers;
+
+ if (unlikely (!old_record ||
+ old_record->firstLayerIdx >= numLayers ||
+ old_record->firstLayerIdx + old_record->numLayers > numLayers))
+ return hb_pair_t<bool, hb_vector_t<LayerRecord>> (false, out_layers);
+
+ auto layers = (this+layersZ).as_array (numLayers).sub_array (old_record->firstLayerIdx,
+ old_record->numLayers);
+ out_layers.resize (layers.length);
+ for (unsigned int i = 0; i < layers.length; i++) {
+ out_layers[i] = layers[i];
+ hb_codepoint_t new_gid = 0;
+ if (unlikely (!c->plan->new_gid_for_old_gid (out_layers[i].glyphId, &new_gid)))
+ return hb_pair_t<bool, hb_vector_t<LayerRecord>> (false, out_layers);
+ out_layers[i].glyphId = new_gid;
+ out_layers[i].colorIdx = c->plan->colr_palettes.get (layers[i].colorIdx);
+ }
+
+ return hb_pair_t<bool, hb_vector_t<LayerRecord>> (true, out_layers);
+ })
+ | hb_filter (hb_first)
+ | hb_map_retains_sorting (hb_second)
+ ;
+
+ if (version == 0 && (!base_it || !layer_it))
+ return_trace (false);
+
+ COLR *colr_prime = c->serializer->start_embed<COLR> ();
+ if (unlikely (!c->serializer->extend_min (colr_prime))) return_trace (false);
+
+ if (version == 0)
+ return_trace (colr_prime->serialize_V0 (c->serializer, version, base_it, layer_it));
+
+ auto snap = c->serializer->snapshot ();
+ if (!c->serializer->allocate_size<void> (5 * HBUINT32::static_size)) return_trace (false);
+
+ VarStoreInstancer instancer (varStore ? &(this+varStore) : nullptr,
+ varIdxMap ? &(this+varIdxMap) : nullptr,
+ c->plan->normalized_coords.as_array ());
+
+ if (!colr_prime->baseGlyphList.serialize_subset (c, baseGlyphList, this, instancer))
+ {
+ if (c->serializer->in_error ()) return_trace (false);
+ //no more COLRv1 glyphs: downgrade to version 0
+ c->serializer->revert (snap);
+ return_trace (colr_prime->serialize_V0 (c->serializer, 0, base_it, layer_it));
+ }
+
+ if (!colr_prime->serialize_V0 (c->serializer, version, base_it, layer_it)) return_trace (false);
+
+ colr_prime->layerList.serialize_subset (c, layerList, this, instancer);
+ colr_prime->clipList.serialize_subset (c, clipList, this, instancer);
+ if (!varStore || c->plan->all_axes_pinned)
+ return_trace (true);
+
+ colr_prime->varIdxMap.serialize_copy (c->serializer, varIdxMap, this);
+ colr_prime->varStore.serialize_copy (c->serializer, varStore, this);
+ return_trace (true);
+ }
+
+ const Paint *get_base_glyph_paint (hb_codepoint_t glyph) const
+ {
+ const BaseGlyphList &baseglyph_paintrecords = this+baseGlyphList;
+ const BaseGlyphPaintRecord* record = get_base_glyph_paintrecord (glyph);
+ if (record)
+ {
+ const Paint &paint = &baseglyph_paintrecords+record->paint;
+ return &paint;
+ }
+ else
+ return nullptr;
+ }
+
+#ifndef HB_NO_PAINT
+ bool
+ get_extents (hb_font_t *font, hb_codepoint_t glyph, hb_glyph_extents_t *extents) const
+ {
+ if (version != 1)
+ return false;
+
+ VarStoreInstancer instancer (&(this+varStore),
+ &(this+varIdxMap),
+ hb_array (font->coords, font->num_coords));
+
+ if (get_clip (glyph, extents, instancer))
+ {
+ font->scale_glyph_extents (extents);
+ return true;
+ }
+
+ auto *extents_funcs = hb_paint_extents_get_funcs ();
+ hb_paint_extents_context_t extents_data;
+ bool ret = paint_glyph (font, glyph, extents_funcs, &extents_data, 0, HB_COLOR(0,0,0,0));
+
+ hb_extents_t e = extents_data.get_extents ();
+ if (e.is_void ())
+ {
+ extents->x_bearing = 0;
+ extents->y_bearing = 0;
+ extents->width = 0;
+ extents->height = 0;
+ }
+ else
+ {
+ extents->x_bearing = e.xmin;
+ extents->y_bearing = e.ymax;
+ extents->width = e.xmax - e.xmin;
+ extents->height = e.ymin - e.ymax;
+ }
+
+ return ret;
+ }
+#endif
+
+ bool
+ has_paint_for_glyph (hb_codepoint_t glyph) const
+ {
+ if (version == 1)
+ {
+ const Paint *paint = get_base_glyph_paint (glyph);
+
+ return paint != nullptr;
+ }
+
+ return false;
+ }
+
+ bool get_clip (hb_codepoint_t glyph,
+ hb_glyph_extents_t *extents,
+ const VarStoreInstancer instancer) const
+ {
+ return (this+clipList).get_extents (glyph,
+ extents,
+ instancer);
+ }
+
+#ifndef HB_NO_PAINT
+ bool
+ paint_glyph (hb_font_t *font, hb_codepoint_t glyph, hb_paint_funcs_t *funcs, void *data, unsigned int palette_index, hb_color_t foreground, bool clip = true) const
+ {
+ VarStoreInstancer instancer (&(this+varStore),
+ &(this+varIdxMap),
+ hb_array (font->coords, font->num_coords));
+ hb_paint_context_t c (this, funcs, data, font, palette_index, foreground, instancer);
+
+ if (version == 1)
+ {
+ const Paint *paint = get_base_glyph_paint (glyph);
+ if (paint)
+ {
+ // COLRv1 glyph
+
+ VarStoreInstancer instancer (&(this+varStore),
+ &(this+varIdxMap),
+ hb_array (font->coords, font->num_coords));
+
+ bool is_bounded = true;
+ if (clip)
+ {
+ hb_glyph_extents_t extents;
+ if (get_clip (glyph, &extents, instancer))
+ {
+ font->scale_glyph_extents (&extents);
+ c.funcs->push_clip_rectangle (c.data,
+ extents.x_bearing,
+ extents.y_bearing + extents.height,
+ extents.x_bearing + extents.width,
+ extents.y_bearing);
+ }
+ else
+ {
+ auto *extents_funcs = hb_paint_extents_get_funcs ();
+ hb_paint_extents_context_t extents_data;
+
+ paint_glyph (font, glyph,
+ extents_funcs, &extents_data,
+ palette_index, foreground,
+ false);
+
+ hb_extents_t extents = extents_data.get_extents ();
+ is_bounded = extents_data.is_bounded ();
+
+ c.funcs->push_clip_rectangle (c.data,
+ extents.xmin,
+ extents.ymin,
+ extents.xmax,
+ extents.ymax);
+ }
+ }
+
+ c.funcs->push_root_transform (c.data, font);
+
+ if (is_bounded)
+ c.recurse (*paint);
+
+ c.funcs->pop_transform (c.data);
+
+ if (clip)
+ c.funcs->pop_clip (c.data);
+
+ return true;
+ }
+ }
+
+ const BaseGlyphRecord *record = get_base_glyph_record (glyph);
+ if (record && ((hb_codepoint_t) record->glyphId == glyph))
+ {
+ // COLRv0 glyph
+ for (const auto &r : (this+layersZ).as_array (numLayers)
+ .sub_array (record->firstLayerIdx, record->numLayers))
+ {
+ hb_bool_t is_foreground;
+ hb_color_t color = c.get_color (r.colorIdx, 1., &is_foreground);
+ c.funcs->push_clip_glyph (c.data, r.glyphId, c.font);
+ c.funcs->color (c.data, is_foreground, color);
+ c.funcs->pop_clip (c.data);
+ }
+
+ return true;
+ }
+
+ return false;
+ }
+#endif
+
+ protected:
+ HBUINT16 version; /* Table version number (starts at 0). */
+ HBUINT16 numBaseGlyphs; /* Number of Base Glyph Records. */
+ NNOffset32To<SortedUnsizedArrayOf<BaseGlyphRecord>>
+ baseGlyphsZ; /* Offset to Base Glyph records. */
+ NNOffset32To<UnsizedArrayOf<LayerRecord>>
+ layersZ; /* Offset to Layer Records. */
+ HBUINT16 numLayers; /* Number of Layer Records. */
+ // Version-1 additions
+ Offset32To<BaseGlyphList> baseGlyphList;
+ Offset32To<LayerList> layerList;
+ Offset32To<ClipList> clipList; // Offset to ClipList table (may be NULL)
+ Offset32To<DeltaSetIndexMap> varIdxMap; // Offset to DeltaSetIndexMap table (may be NULL)
+ Offset32To<VariationStore> varStore;
+ public:
+ DEFINE_SIZE_MIN (14);
+};
+
+struct COLR_accelerator_t : COLR::accelerator_t {
+ COLR_accelerator_t (hb_face_t *face) : COLR::accelerator_t (face) {}
+};
+
+void
+hb_paint_context_t::recurse (const Paint &paint)
+{
+ if (unlikely (depth_left <= 0 || edge_count <= 0)) return;
+ depth_left--;
+ edge_count--;
+ paint.dispatch (this);
+ depth_left++;
+}
+
+void PaintColrLayers::paint_glyph (hb_paint_context_t *c) const
+{
+ const LayerList &paint_offset_lists = c->get_colr_table ()->get_layerList ();
+ for (unsigned i = firstLayerIndex; i < firstLayerIndex + numLayers; i++)
+ {
+ const Paint &paint = paint_offset_lists.get_paint (i);
+ c->funcs->push_group (c->data);
+ c->recurse (paint);
+ c->funcs->pop_group (c->data, HB_PAINT_COMPOSITE_MODE_SRC_OVER);
+ }
+}
+
+void PaintColrGlyph::paint_glyph (hb_paint_context_t *c) const
+{
+ const COLR *colr_table = c->get_colr_table ();
+ const Paint *paint = colr_table->get_base_glyph_paint (gid);
+
+ hb_glyph_extents_t extents = {0};
+ bool has_clip_box = colr_table->get_clip (gid, &extents, c->instancer);
+
+ if (has_clip_box)
+ c->funcs->push_clip_rectangle (c->data,
+ extents.x_bearing,
+ extents.y_bearing + extents.height,
+ extents.x_bearing + extents.width,
+ extents.y_bearing);
+
+ if (paint)
+ c->recurse (*paint);
+
+ if (has_clip_box)
+ c->funcs->pop_clip (c->data);
+}
+
+} /* namespace OT */
+
+#endif /* OT_COLOR_COLR_COLR_HH */
diff --git a/gfx/harfbuzz/src/OT/Color/COLR/colrv1-closure.hh b/gfx/harfbuzz/src/OT/Color/COLR/colrv1-closure.hh
new file mode 100644
index 0000000000..705863d4ad
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Color/COLR/colrv1-closure.hh
@@ -0,0 +1,107 @@
+/*
+ * Copyright © 2018 Ebrahim Byagowi
+ * Copyright © 2020 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ */
+
+#ifndef OT_COLOR_COLR_COLRV1_CLOSURE_HH
+#define OT_COLOR_COLR_COLRV1_CLOSURE_HH
+
+#include "../../../hb-open-type.hh"
+#include "COLR.hh"
+
+/*
+ * COLR -- Color
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/colr
+ */
+namespace OT {
+
+HB_INTERNAL void PaintColrLayers::closurev1 (hb_colrv1_closure_context_t* c) const
+{
+ c->add_layer_indices (firstLayerIndex, numLayers);
+ const LayerList &paint_offset_lists = c->get_colr_table ()->get_layerList ();
+ for (unsigned i = firstLayerIndex; i < firstLayerIndex + numLayers; i++)
+ {
+ const Paint &paint = std::addressof (paint_offset_lists) + paint_offset_lists[i];
+ paint.dispatch (c);
+ }
+}
+
+HB_INTERNAL void PaintGlyph::closurev1 (hb_colrv1_closure_context_t* c) const
+{
+ c->add_glyph (gid);
+ (this+paint).dispatch (c);
+}
+
+HB_INTERNAL void PaintColrGlyph::closurev1 (hb_colrv1_closure_context_t* c) const
+{
+ const COLR *colr_table = c->get_colr_table ();
+ const BaseGlyphPaintRecord* baseglyph_paintrecord = colr_table->get_base_glyph_paintrecord (gid);
+ if (!baseglyph_paintrecord) return;
+ c->add_glyph (gid);
+
+ const BaseGlyphList &baseglyph_list = colr_table->get_baseglyphList ();
+ (&baseglyph_list+baseglyph_paintrecord->paint).dispatch (c);
+}
+
+template <template<typename> class Var>
+HB_INTERNAL void PaintTransform<Var>::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintTranslate::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintScale::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintScaleAroundCenter::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintScaleUniform::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintScaleUniformAroundCenter::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintRotate::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintRotateAroundCenter::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintSkew::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintSkewAroundCenter::closurev1 (hb_colrv1_closure_context_t* c) const
+{ (this+src).dispatch (c); }
+
+HB_INTERNAL void PaintComposite::closurev1 (hb_colrv1_closure_context_t* c) const
+{
+ (this+src).dispatch (c);
+ (this+backdrop).dispatch (c);
+}
+
+} /* namespace OT */
+
+
+#endif /* OT_COLOR_COLR_COLRV1_CLOSURE_HH */
diff --git a/gfx/harfbuzz/src/OT/Color/CPAL/CPAL.hh b/gfx/harfbuzz/src/OT/Color/CPAL/CPAL.hh
new file mode 100644
index 0000000000..c07716c1c9
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Color/CPAL/CPAL.hh
@@ -0,0 +1,350 @@
+/*
+ * Copyright © 2016 Google, Inc.
+ * Copyright © 2018 Ebrahim Byagowi
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Google Author(s): Sascha Brawer
+ */
+
+#ifndef OT_COLOR_CPAL_CPAL_HH
+#define OT_COLOR_CPAL_CPAL_HH
+
+#include "../../../hb-open-type.hh"
+#include "../../../hb-ot-color.h"
+#include "../../../hb-ot-name.h"
+
+
+/*
+ * CPAL -- Color Palette
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/cpal
+ */
+#define HB_OT_TAG_CPAL HB_TAG('C','P','A','L')
+
+namespace OT {
+
+
+struct CPALV1Tail
+{
+ friend struct CPAL;
+
+ private:
+ hb_ot_color_palette_flags_t get_palette_flags (const void *base,
+ unsigned int palette_index,
+ unsigned int palette_count) const
+ {
+ if (!paletteFlagsZ) return HB_OT_COLOR_PALETTE_FLAG_DEFAULT;
+ return (hb_ot_color_palette_flags_t) (uint32_t)
+ (base+paletteFlagsZ).as_array (palette_count)[palette_index];
+ }
+
+ hb_ot_name_id_t get_palette_name_id (const void *base,
+ unsigned int palette_index,
+ unsigned int palette_count) const
+ {
+ if (!paletteLabelsZ) return HB_OT_NAME_ID_INVALID;
+ return (base+paletteLabelsZ).as_array (palette_count)[palette_index];
+ }
+
+ hb_ot_name_id_t get_color_name_id (const void *base,
+ unsigned int color_index,
+ unsigned int color_count) const
+ {
+ if (!colorLabelsZ) return HB_OT_NAME_ID_INVALID;
+ return (base+colorLabelsZ).as_array (color_count)[color_index];
+ }
+
+ public:
+ void collect_name_ids (const void *base,
+ unsigned palette_count,
+ unsigned color_count,
+ const hb_map_t *color_index_map,
+ hb_set_t *nameids_to_retain /* OUT */) const
+ {
+ if (paletteLabelsZ)
+ {
+ + (base+paletteLabelsZ).as_array (palette_count)
+ | hb_sink (nameids_to_retain)
+ ;
+ }
+
+ if (colorLabelsZ)
+ {
+ const hb_array_t<const NameID> colorLabels = (base+colorLabelsZ).as_array (color_count);
+ for (unsigned i = 0; i < color_count; i++)
+ {
+ if (!color_index_map->has (i)) continue;
+ nameids_to_retain->add (colorLabels[i]);
+ }
+ }
+ }
+
+ bool serialize (hb_serialize_context_t *c,
+ unsigned palette_count,
+ unsigned color_count,
+ const void *base,
+ const hb_map_t *color_index_map) const
+ {
+ TRACE_SERIALIZE (this);
+ auto *out = c->allocate_size<CPALV1Tail> (static_size);
+ if (unlikely (!out)) return_trace (false);
+
+ out->paletteFlagsZ = 0;
+ if (paletteFlagsZ)
+ out->paletteFlagsZ.serialize_copy (c, paletteFlagsZ, base, 0, hb_serialize_context_t::Head, palette_count);
+
+ out->paletteLabelsZ = 0;
+ if (paletteLabelsZ)
+ out->paletteLabelsZ.serialize_copy (c, paletteLabelsZ, base, 0, hb_serialize_context_t::Head, palette_count);
+
+ const hb_array_t<const NameID> colorLabels = (base+colorLabelsZ).as_array (color_count);
+ if (colorLabelsZ)
+ {
+ c->push ();
+ for (unsigned i = 0; i < color_count; i++)
+ {
+ if (!color_index_map->has (i)) continue;
+ if (!c->copy<NameID> (colorLabels[i]))
+ {
+ c->pop_discard ();
+ return_trace (false);
+ }
+ }
+ c->add_link (out->colorLabelsZ, c->pop_pack ());
+ }
+ return_trace (true);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c,
+ const void *base,
+ unsigned int palette_count,
+ unsigned int color_count) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ (!paletteFlagsZ || (base+paletteFlagsZ).sanitize (c, palette_count)) &&
+ (!paletteLabelsZ || (base+paletteLabelsZ).sanitize (c, palette_count)) &&
+ (!colorLabelsZ || (base+colorLabelsZ).sanitize (c, color_count)));
+ }
+
+ protected:
+ // TODO(garretrieger): these offsets can hold nulls so we should not be using non-null offsets
+ // here. Currently they are needed since UnsizedArrayOf doesn't define null_size
+ NNOffset32To<UnsizedArrayOf<HBUINT32>>
+ paletteFlagsZ; /* Offset from the beginning of CPAL table to
+ * the Palette Type Array. Set to 0 if no array
+ * is provided. */
+ NNOffset32To<UnsizedArrayOf<NameID>>
+ paletteLabelsZ; /* Offset from the beginning of CPAL table to
+ * the palette labels array. Set to 0 if no
+ * array is provided. */
+ NNOffset32To<UnsizedArrayOf<NameID>>
+ colorLabelsZ; /* Offset from the beginning of CPAL table to
+ * the color labels array. Set to 0
+ * if no array is provided. */
+ public:
+ DEFINE_SIZE_STATIC (12);
+};
+
+typedef HBUINT32 BGRAColor;
+
+struct CPAL
+{
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_CPAL;
+
+ bool has_data () const { return numPalettes; }
+
+ unsigned int get_size () const
+ { return min_size + numPalettes * sizeof (colorRecordIndicesZ[0]); }
+
+ unsigned int get_palette_count () const { return numPalettes; }
+ unsigned int get_color_count () const { return numColors; }
+
+ hb_ot_color_palette_flags_t get_palette_flags (unsigned int palette_index) const
+ { return v1 ().get_palette_flags (this, palette_index, numPalettes); }
+
+ hb_ot_name_id_t get_palette_name_id (unsigned int palette_index) const
+ { return v1 ().get_palette_name_id (this, palette_index, numPalettes); }
+
+ hb_ot_name_id_t get_color_name_id (unsigned int color_index) const
+ { return v1 ().get_color_name_id (this, color_index, numColors); }
+
+ unsigned int get_palette_colors (unsigned int palette_index,
+ unsigned int start_offset,
+ unsigned int *color_count, /* IN/OUT. May be NULL. */
+ hb_color_t *colors /* OUT. May be NULL. */) const
+ {
+ if (unlikely (palette_index >= numPalettes))
+ {
+ if (color_count) *color_count = 0;
+ return 0;
+ }
+ unsigned int start_index = colorRecordIndicesZ[palette_index];
+ hb_array_t<const BGRAColor> all_colors ((this+colorRecordsZ).arrayZ, numColorRecords);
+ hb_array_t<const BGRAColor> palette_colors = all_colors.sub_array (start_index,
+ numColors);
+ if (color_count)
+ {
+ + palette_colors.sub_array (start_offset, color_count)
+ | hb_sink (hb_array (colors, *color_count))
+ ;
+ }
+ return numColors;
+ }
+
+ void collect_name_ids (const hb_map_t *color_index_map,
+ hb_set_t *nameids_to_retain /* OUT */) const
+ {
+ if (version == 1)
+ v1 ().collect_name_ids (this, numPalettes, numColors, color_index_map, nameids_to_retain);
+ }
+
+ private:
+ const CPALV1Tail& v1 () const
+ {
+ if (version == 0) return Null (CPALV1Tail);
+ return StructAfter<CPALV1Tail> (*this);
+ }
+
+ public:
+ bool serialize (hb_serialize_context_t *c,
+ const hb_array_t<const HBUINT16> &color_record_indices,
+ const hb_array_t<const BGRAColor> &color_records,
+ const hb_vector_t<unsigned>& first_color_index_for_layer,
+ const hb_map_t& first_color_to_layer_index,
+ const hb_set_t &retained_color_indices) const
+ {
+ TRACE_SERIALIZE (this);
+
+ // TODO(grieger): limit total final size.
+
+ for (const auto idx : color_record_indices)
+ {
+ hb_codepoint_t layer_index = first_color_to_layer_index[idx];
+
+ HBUINT16 new_idx;
+ new_idx = layer_index * retained_color_indices.get_population ();
+ if (!c->copy<HBUINT16> (new_idx)) return_trace (false);
+ }
+
+ c->push ();
+ for (unsigned first_color_index : first_color_index_for_layer)
+ {
+ for (hb_codepoint_t color_index : retained_color_indices)
+ {
+ if (!c->copy<BGRAColor> (color_records[first_color_index + color_index]))
+ {
+ c->pop_discard ();
+ return_trace (false);
+ }
+ }
+ }
+
+ c->add_link (colorRecordsZ, c->pop_pack ());
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ if (!numPalettes) return_trace (false);
+
+ const hb_map_t *color_index_map = &c->plan->colr_palettes;
+ if (color_index_map->is_empty ()) return_trace (false);
+
+ hb_set_t retained_color_indices;
+ for (const auto _ : color_index_map->keys ())
+ {
+ if (_ == 0xFFFF) continue;
+ retained_color_indices.add (_);
+ }
+ if (retained_color_indices.is_empty ()) return_trace (false);
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+
+ out->version = version;
+ out->numColors = retained_color_indices.get_population ();
+ out->numPalettes = numPalettes;
+
+ hb_vector_t<unsigned> first_color_index_for_layer;
+ hb_map_t first_color_to_layer_index;
+
+ const hb_array_t<const HBUINT16> colorRecordIndices = colorRecordIndicesZ.as_array (numPalettes);
+ for (const auto first_color_record_idx : colorRecordIndices)
+ {
+ if (first_color_to_layer_index.has (first_color_record_idx)) continue;
+
+ first_color_index_for_layer.push (first_color_record_idx);
+ first_color_to_layer_index.set (first_color_record_idx,
+ first_color_index_for_layer.length - 1);
+ }
+
+ out->numColorRecords = first_color_index_for_layer.length
+ * retained_color_indices.get_population ();
+
+ const hb_array_t<const BGRAColor> color_records = (this+colorRecordsZ).as_array (numColorRecords);
+ if (!out->serialize (c->serializer,
+ colorRecordIndices,
+ color_records,
+ first_color_index_for_layer,
+ first_color_to_layer_index,
+ retained_color_indices))
+ return_trace (false);
+
+ if (version == 1)
+ return_trace (v1 ().serialize (c->serializer, numPalettes, numColors, this, color_index_map));
+
+ return_trace (true);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ (this+colorRecordsZ).sanitize (c, numColorRecords) &&
+ colorRecordIndicesZ.sanitize (c, numPalettes) &&
+ (version == 0 || v1 ().sanitize (c, this, numPalettes, numColors)));
+ }
+
+ protected:
+ HBUINT16 version; /* Table version number */
+ /* Version 0 */
+ HBUINT16 numColors; /* Number of colors in each palette. */
+ HBUINT16 numPalettes; /* Number of palettes in the table. */
+ HBUINT16 numColorRecords; /* Total number of color records, combined for
+ * all palettes. */
+ NNOffset32To<UnsizedArrayOf<BGRAColor>>
+ colorRecordsZ; /* Offset from the beginning of CPAL table to
+ * the first ColorRecord. */
+ UnsizedArrayOf<HBUINT16>
+ colorRecordIndicesZ; /* Index of each palette’s first color record in
+ * the combined color record array. */
+/*CPALV1Tail v1;*/
+ public:
+ DEFINE_SIZE_ARRAY (12, colorRecordIndicesZ);
+};
+
+} /* namespace OT */
+
+
+#endif /* OT_COLOR_CPAL_CPAL_HH */
diff --git a/gfx/harfbuzz/src/OT/Color/sbix/sbix.hh b/gfx/harfbuzz/src/OT/Color/sbix/sbix.hh
new file mode 100644
index 0000000000..46ad3fd58e
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Color/sbix/sbix.hh
@@ -0,0 +1,452 @@
+/*
+ * Copyright © 2018 Ebrahim Byagowi
+ * Copyright © 2020 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Google Author(s): Calder Kitagawa
+ */
+
+#ifndef OT_COLOR_SBIX_SBIX_HH
+#define OT_COLOR_SBIX_SBIX_HH
+
+#include "../../../hb-open-type.hh"
+#include "../../../hb-paint.hh"
+
+/*
+ * sbix -- Standard Bitmap Graphics
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/sbix
+ * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6sbix.html
+ */
+#define HB_OT_TAG_sbix HB_TAG('s','b','i','x')
+
+
+namespace OT {
+
+
+struct SBIXGlyph
+{
+ SBIXGlyph* copy (hb_serialize_context_t *c, unsigned int data_length) const
+ {
+ TRACE_SERIALIZE (this);
+ SBIXGlyph* new_glyph = c->start_embed<SBIXGlyph> ();
+ if (unlikely (!new_glyph)) return_trace (nullptr);
+ if (unlikely (!c->extend_min (new_glyph))) return_trace (nullptr);
+
+ new_glyph->xOffset = xOffset;
+ new_glyph->yOffset = yOffset;
+ new_glyph->graphicType = graphicType;
+ data.copy (c, data_length);
+ return_trace (new_glyph);
+ }
+
+ HBINT16 xOffset; /* The horizontal (x-axis) offset from the left
+ * edge of the graphic to the glyph’s origin.
+ * That is, the x-coordinate of the point on the
+ * baseline at the left edge of the glyph. */
+ HBINT16 yOffset; /* The vertical (y-axis) offset from the bottom
+ * edge of the graphic to the glyph’s origin.
+ * That is, the y-coordinate of the point on the
+ * baseline at the left edge of the glyph. */
+ Tag graphicType; /* Indicates the format of the embedded graphic
+ * data: one of 'jpg ', 'png ' or 'tiff', or the
+ * special format 'dupe'. */
+ UnsizedArrayOf<HBUINT8>
+ data; /* The actual embedded graphic data. The total
+ * length is inferred from sequential entries in
+ * the glyphDataOffsets array and the fixed size
+ * (8 bytes) of the preceding fields. */
+ public:
+ DEFINE_SIZE_ARRAY (8, data);
+};
+
+struct SBIXStrike
+{
+ static unsigned int get_size (unsigned num_glyphs)
+ { return min_size + num_glyphs * HBUINT32::static_size; }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ imageOffsetsZ.sanitize_shallow (c, c->get_num_glyphs () + 1));
+ }
+
+ hb_blob_t *get_glyph_blob (unsigned int glyph_id,
+ hb_blob_t *sbix_blob,
+ hb_tag_t file_type,
+ int *x_offset,
+ int *y_offset,
+ unsigned int num_glyphs,
+ unsigned int *strike_ppem) const
+ {
+ if (unlikely (!ppem)) return hb_blob_get_empty (); /* To get Null() object out of the way. */
+
+ unsigned int retry_count = 8;
+ unsigned int sbix_len = sbix_blob->length;
+ unsigned int strike_offset = (const char *) this - (const char *) sbix_blob->data;
+ assert (strike_offset < sbix_len);
+
+ retry:
+ if (unlikely (glyph_id >= num_glyphs ||
+ imageOffsetsZ[glyph_id + 1] <= imageOffsetsZ[glyph_id] ||
+ imageOffsetsZ[glyph_id + 1] - imageOffsetsZ[glyph_id] <= SBIXGlyph::min_size ||
+ (unsigned int) imageOffsetsZ[glyph_id + 1] > sbix_len - strike_offset))
+ return hb_blob_get_empty ();
+
+ unsigned int glyph_offset = strike_offset + (unsigned int) imageOffsetsZ[glyph_id] + SBIXGlyph::min_size;
+ unsigned int glyph_length = imageOffsetsZ[glyph_id + 1] - imageOffsetsZ[glyph_id] - SBIXGlyph::min_size;
+
+ const SBIXGlyph *glyph = &(this+imageOffsetsZ[glyph_id]);
+
+ if (glyph->graphicType == HB_TAG ('d','u','p','e'))
+ {
+ if (glyph_length >= 2)
+ {
+ glyph_id = *((HBUINT16 *) &glyph->data);
+ if (retry_count--)
+ goto retry;
+ }
+ return hb_blob_get_empty ();
+ }
+
+ if (unlikely (file_type != glyph->graphicType))
+ return hb_blob_get_empty ();
+
+ if (strike_ppem) *strike_ppem = ppem;
+ if (x_offset) *x_offset = glyph->xOffset;
+ if (y_offset) *y_offset = glyph->yOffset;
+ return hb_blob_create_sub_blob (sbix_blob, glyph_offset, glyph_length);
+ }
+
+ bool subset (hb_subset_context_t *c, unsigned int available_len) const
+ {
+ TRACE_SUBSET (this);
+ unsigned int num_output_glyphs = c->plan->num_output_glyphs ();
+
+ auto* out = c->serializer->start_embed<SBIXStrike> ();
+ if (unlikely (!out)) return_trace (false);
+ auto snap = c->serializer->snapshot ();
+ if (unlikely (!c->serializer->extend (out, num_output_glyphs + 1))) return_trace (false);
+ out->ppem = ppem;
+ out->resolution = resolution;
+ HBUINT32 head;
+ head = get_size (num_output_glyphs + 1);
+
+ bool has_glyphs = false;
+ for (unsigned new_gid = 0; new_gid < num_output_glyphs; new_gid++)
+ {
+ hb_codepoint_t old_gid;
+ if (!c->plan->old_gid_for_new_gid (new_gid, &old_gid) ||
+ unlikely (imageOffsetsZ[old_gid].is_null () ||
+ imageOffsetsZ[old_gid + 1].is_null () ||
+ imageOffsetsZ[old_gid + 1] <= imageOffsetsZ[old_gid] ||
+ imageOffsetsZ[old_gid + 1] - imageOffsetsZ[old_gid] <= SBIXGlyph::min_size) ||
+ (unsigned int) imageOffsetsZ[old_gid + 1] > available_len)
+ {
+ out->imageOffsetsZ[new_gid] = head;
+ continue;
+ }
+ has_glyphs = true;
+ unsigned int delta = imageOffsetsZ[old_gid + 1] - imageOffsetsZ[old_gid];
+ unsigned int glyph_data_length = delta - SBIXGlyph::min_size;
+ if (!(this+imageOffsetsZ[old_gid]).copy (c->serializer, glyph_data_length))
+ return_trace (false);
+ out->imageOffsetsZ[new_gid] = head;
+ head += delta;
+ }
+ if (has_glyphs)
+ out->imageOffsetsZ[num_output_glyphs] = head;
+ else
+ c->serializer->revert (snap);
+ return_trace (has_glyphs);
+ }
+
+ public:
+ HBUINT16 ppem; /* The PPEM size for which this strike was designed. */
+ HBUINT16 resolution; /* The device pixel density (in PPI) for which this
+ * strike was designed. (E.g., 96 PPI, 192 PPI.) */
+ protected:
+ UnsizedArrayOf<Offset32To<SBIXGlyph>>
+ imageOffsetsZ; /* Offset from the beginning of the strike data header
+ * to bitmap data for an individual glyph ID. */
+ public:
+ DEFINE_SIZE_ARRAY (4, imageOffsetsZ);
+};
+
+struct sbix
+{
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_sbix;
+
+ bool has_data () const { return version; }
+
+ const SBIXStrike &get_strike (unsigned int i) const { return this+strikes[i]; }
+
+ struct accelerator_t
+ {
+ accelerator_t (hb_face_t *face)
+ {
+ table = hb_sanitize_context_t ().reference_table<sbix> (face);
+ num_glyphs = face->get_num_glyphs ();
+ }
+ ~accelerator_t () { table.destroy (); }
+
+ bool has_data () const { return table->has_data (); }
+
+ bool get_extents (hb_font_t *font,
+ hb_codepoint_t glyph,
+ hb_glyph_extents_t *extents,
+ bool scale = true) const
+ {
+ /* We only support PNG right now, and following function checks type. */
+ return get_png_extents (font, glyph, extents, scale);
+ }
+
+ hb_blob_t *reference_png (hb_font_t *font,
+ hb_codepoint_t glyph_id,
+ int *x_offset,
+ int *y_offset,
+ unsigned int *available_ppem) const
+ {
+ return choose_strike (font).get_glyph_blob (glyph_id, table.get_blob (),
+ HB_TAG ('p','n','g',' '),
+ x_offset, y_offset,
+ num_glyphs, available_ppem);
+ }
+
+ bool paint_glyph (hb_font_t *font, hb_codepoint_t glyph, hb_paint_funcs_t *funcs, void *data) const
+ {
+ if (!has_data ())
+ return false;
+
+ int x_offset = 0, y_offset = 0;
+ unsigned int strike_ppem = 0;
+ hb_blob_t *blob = reference_png (font, glyph, &x_offset, &y_offset, &strike_ppem);
+ hb_glyph_extents_t extents;
+ hb_glyph_extents_t pixel_extents;
+
+ if (blob == hb_blob_get_empty ())
+ return false;
+
+ if (!hb_font_get_glyph_extents (font, glyph, &extents))
+ return false;
+
+ if (unlikely (!get_extents (font, glyph, &pixel_extents, false)))
+ return false;
+
+ bool ret = funcs->image (data,
+ blob,
+ pixel_extents.width, -pixel_extents.height,
+ HB_PAINT_IMAGE_FORMAT_PNG,
+ font->slant_xy,
+ &extents);
+
+ hb_blob_destroy (blob);
+ return ret;
+ }
+
+ private:
+
+ const SBIXStrike &choose_strike (hb_font_t *font) const
+ {
+ unsigned count = table->strikes.len;
+ if (unlikely (!count))
+ return Null (SBIXStrike);
+
+ unsigned int requested_ppem = hb_max (font->x_ppem, font->y_ppem);
+ if (!requested_ppem)
+ requested_ppem = 1<<30; /* Choose largest strike. */
+ /* TODO Add DPI sensitivity as well? */
+ unsigned int best_i = 0;
+ unsigned int best_ppem = table->get_strike (0).ppem;
+
+ for (unsigned int i = 1; i < count; i++)
+ {
+ unsigned int ppem = (table->get_strike (i)).ppem;
+ if ((requested_ppem <= ppem && ppem < best_ppem) ||
+ (requested_ppem > best_ppem && ppem > best_ppem))
+ {
+ best_i = i;
+ best_ppem = ppem;
+ }
+ }
+
+ return table->get_strike (best_i);
+ }
+
+ struct PNGHeader
+ {
+ HBUINT8 signature[8];
+ struct
+ {
+ struct
+ {
+ HBUINT32 length;
+ Tag type;
+ } header;
+ HBUINT32 width;
+ HBUINT32 height;
+ HBUINT8 bitDepth;
+ HBUINT8 colorType;
+ HBUINT8 compressionMethod;
+ HBUINT8 filterMethod;
+ HBUINT8 interlaceMethod;
+ } IHDR;
+
+ public:
+ DEFINE_SIZE_STATIC (29);
+ };
+
+ bool get_png_extents (hb_font_t *font,
+ hb_codepoint_t glyph,
+ hb_glyph_extents_t *extents,
+ bool scale = true) const
+ {
+ /* Following code is safe to call even without data.
+ * But faster to short-circuit. */
+ if (!has_data ())
+ return false;
+
+ int x_offset = 0, y_offset = 0;
+ unsigned int strike_ppem = 0;
+ hb_blob_t *blob = reference_png (font, glyph, &x_offset, &y_offset, &strike_ppem);
+
+ const PNGHeader &png = *blob->as<PNGHeader>();
+
+ if (png.IHDR.height >= 65536 || png.IHDR.width >= 65536)
+ {
+ hb_blob_destroy (blob);
+ return false;
+ }
+
+ extents->x_bearing = x_offset;
+ extents->y_bearing = png.IHDR.height + y_offset;
+ extents->width = png.IHDR.width;
+ extents->height = -1 * png.IHDR.height;
+
+ /* Convert to font units. */
+ if (strike_ppem && scale)
+ {
+ float scale = font->face->get_upem () / (float) strike_ppem;
+ extents->x_bearing = roundf (extents->x_bearing * scale);
+ extents->y_bearing = roundf (extents->y_bearing * scale);
+ extents->width = roundf (extents->width * scale);
+ extents->height = roundf (extents->height * scale);
+ }
+
+ if (scale)
+ font->scale_glyph_extents (extents);
+
+ hb_blob_destroy (blob);
+
+ return strike_ppem;
+ }
+
+ private:
+ hb_blob_ptr_t<sbix> table;
+
+ unsigned int num_glyphs;
+ };
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (likely (c->check_struct (this) &&
+ version >= 1 &&
+ strikes.sanitize (c, this)));
+ }
+
+ bool
+ add_strike (hb_subset_context_t *c, unsigned i) const
+ {
+ if (strikes[i].is_null () || c->source_blob->length < (unsigned) strikes[i])
+ return false;
+
+ return (this+strikes[i]).subset (c, c->source_blob->length - (unsigned) strikes[i]);
+ }
+
+ bool serialize_strike_offsets (hb_subset_context_t *c) const
+ {
+ TRACE_SERIALIZE (this);
+
+ auto *out = c->serializer->start_embed<Array32OfOffset32To<SBIXStrike>> ();
+ if (unlikely (!out)) return_trace (false);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ hb_vector_t<Offset32To<SBIXStrike>*> new_strikes;
+ hb_vector_t<hb_serialize_context_t::objidx_t> objidxs;
+ for (int i = strikes.len - 1; i >= 0; --i)
+ {
+ auto* o = out->serialize_append (c->serializer);
+ if (unlikely (!o)) return_trace (false);
+ *o = 0;
+ auto snap = c->serializer->snapshot ();
+ c->serializer->push ();
+ bool ret = add_strike (c, i);
+ if (!ret)
+ {
+ c->serializer->pop_discard ();
+ out->pop ();
+ c->serializer->revert (snap);
+ }
+ else
+ {
+ objidxs.push (c->serializer->pop_pack ());
+ new_strikes.push (o);
+ }
+ }
+ for (unsigned int i = 0; i < new_strikes.length; ++i)
+ c->serializer->add_link (*new_strikes[i], objidxs[new_strikes.length - 1 - i]);
+
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t* c) const
+ {
+ TRACE_SUBSET (this);
+
+ sbix *sbix_prime = c->serializer->start_embed<sbix> ();
+ if (unlikely (!sbix_prime)) return_trace (false);
+ if (unlikely (!c->serializer->embed (this->version))) return_trace (false);
+ if (unlikely (!c->serializer->embed (this->flags))) return_trace (false);
+
+ return_trace (serialize_strike_offsets (c));
+ }
+
+ protected:
+ HBUINT16 version; /* Table version number — set to 1 */
+ HBUINT16 flags; /* Bit 0: Set to 1. Bit 1: Draw outlines.
+ * Bits 2 to 15: reserved (set to 0). */
+ Array32OfOffset32To<SBIXStrike>
+ strikes; /* Offsets from the beginning of the 'sbix'
+ * table to data for each individual bitmap strike. */
+ public:
+ DEFINE_SIZE_ARRAY (8, strikes);
+};
+
+struct sbix_accelerator_t : sbix::accelerator_t {
+ sbix_accelerator_t (hb_face_t *face) : sbix::accelerator_t (face) {}
+};
+
+
+} /* namespace OT */
+
+#endif /* OT_COLOR_SBIX_SBIX_HH */
diff --git a/gfx/harfbuzz/src/OT/Color/svg/svg.hh b/gfx/harfbuzz/src/OT/Color/svg/svg.hh
new file mode 100644
index 0000000000..c7d91b88ee
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Color/svg/svg.hh
@@ -0,0 +1,151 @@
+/*
+ * Copyright © 2018 Ebrahim Byagowi
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ */
+
+#ifndef OT_COLOR_SVG_SVG_HH
+#define OT_COLOR_SVG_SVG_HH
+
+#include "../../../hb-open-type.hh"
+#include "../../../hb-blob.hh"
+#include "../../../hb-paint.hh"
+
+/*
+ * SVG -- SVG (Scalable Vector Graphics)
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/svg
+ */
+
+#define HB_OT_TAG_SVG HB_TAG('S','V','G',' ')
+
+
+namespace OT {
+
+
+struct SVGDocumentIndexEntry
+{
+ int cmp (hb_codepoint_t g) const
+ { return g < startGlyphID ? -1 : g > endGlyphID ? 1 : 0; }
+
+ hb_blob_t *reference_blob (hb_blob_t *svg_blob, unsigned int index_offset) const
+ {
+ return hb_blob_create_sub_blob (svg_blob,
+ index_offset + (unsigned int) svgDoc,
+ svgDocLength);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ svgDoc.sanitize (c, base, svgDocLength));
+ }
+
+ protected:
+ HBUINT16 startGlyphID; /* The first glyph ID in the range described by
+ * this index entry. */
+ HBUINT16 endGlyphID; /* The last glyph ID in the range described by
+ * this index entry. Must be >= startGlyphID. */
+ NNOffset32To<UnsizedArrayOf<HBUINT8>>
+ svgDoc; /* Offset from the beginning of the SVG Document Index
+ * to an SVG document. Must be non-zero. */
+ HBUINT32 svgDocLength; /* Length of the SVG document.
+ * Must be non-zero. */
+ public:
+ DEFINE_SIZE_STATIC (12);
+};
+
+struct SVG
+{
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_SVG;
+
+ bool has_data () const { return svgDocEntries; }
+
+ struct accelerator_t
+ {
+ accelerator_t (hb_face_t *face)
+ { table = hb_sanitize_context_t ().reference_table<SVG> (face); }
+ ~accelerator_t () { table.destroy (); }
+
+ hb_blob_t *reference_blob_for_glyph (hb_codepoint_t glyph_id) const
+ {
+ return table->get_glyph_entry (glyph_id).reference_blob (table.get_blob (),
+ table->svgDocEntries);
+ }
+
+ bool has_data () const { return table->has_data (); }
+
+ bool paint_glyph (hb_font_t *font HB_UNUSED, hb_codepoint_t glyph, hb_paint_funcs_t *funcs, void *data) const
+ {
+ if (!has_data ())
+ return false;
+
+ hb_blob_t *blob = reference_blob_for_glyph (glyph);
+
+ if (blob == hb_blob_get_empty ())
+ return false;
+
+ funcs->image (data,
+ blob,
+ 0, 0,
+ HB_PAINT_IMAGE_FORMAT_SVG,
+ font->slant_xy,
+ nullptr);
+
+ hb_blob_destroy (blob);
+ return true;
+ }
+
+ private:
+ hb_blob_ptr_t<SVG> table;
+ public:
+ DEFINE_SIZE_STATIC (sizeof (hb_blob_ptr_t<SVG>));
+ };
+
+ const SVGDocumentIndexEntry &get_glyph_entry (hb_codepoint_t glyph_id) const
+ { return (this+svgDocEntries).bsearch (glyph_id); }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (likely (c->check_struct (this) &&
+ (this+svgDocEntries).sanitize_shallow (c)));
+ }
+
+ protected:
+ HBUINT16 version; /* Table version (starting at 0). */
+ Offset32To<SortedArray16Of<SVGDocumentIndexEntry>>
+ svgDocEntries; /* Offset (relative to the start of the SVG table) to the
+ * SVG Documents Index. Must be non-zero. */
+ /* Array of SVG Document Index Entries. */
+ HBUINT32 reserved; /* Set to 0. */
+ public:
+ DEFINE_SIZE_STATIC (10);
+};
+
+struct SVG_accelerator_t : SVG::accelerator_t {
+ SVG_accelerator_t (hb_face_t *face) : SVG::accelerator_t (face) {}
+};
+
+} /* namespace OT */
+
+
+#endif /* OT_COLOR_SVG_SVG_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/Common/Coverage.hh b/gfx/harfbuzz/src/OT/Layout/Common/Coverage.hh
new file mode 100644
index 0000000000..9ca88f788a
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/Common/Coverage.hh
@@ -0,0 +1,348 @@
+/*
+ * Copyright © 2007,2008,2009 Red Hat, Inc.
+ * Copyright © 2010,2012 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Red Hat Author(s): Behdad Esfahbod
+ * Google Author(s): Behdad Esfahbod, Garret Rieger
+ */
+
+#ifndef OT_LAYOUT_COMMON_COVERAGE_HH
+#define OT_LAYOUT_COMMON_COVERAGE_HH
+
+#include "../types.hh"
+#include "CoverageFormat1.hh"
+#include "CoverageFormat2.hh"
+
+namespace OT {
+namespace Layout {
+namespace Common {
+
+template<typename Iterator>
+static inline void Coverage_serialize (hb_serialize_context_t *c,
+ Iterator it);
+
+struct Coverage
+{
+
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ CoverageFormat1_3<SmallTypes> format1;
+ CoverageFormat2_4<SmallTypes> format2;
+#ifndef HB_NO_BEYOND_64K
+ CoverageFormat1_3<MediumTypes>format3;
+ CoverageFormat2_4<MediumTypes>format4;
+#endif
+ } u;
+ public:
+ DEFINE_SIZE_UNION (2, format);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ if (!u.format.sanitize (c)) return_trace (false);
+ switch (u.format)
+ {
+ case 1: return_trace (u.format1.sanitize (c));
+ case 2: return_trace (u.format2.sanitize (c));
+#ifndef HB_NO_BEYOND_64K
+ case 3: return_trace (u.format3.sanitize (c));
+ case 4: return_trace (u.format4.sanitize (c));
+#endif
+ default:return_trace (true);
+ }
+ }
+
+ /* Has interface. */
+ unsigned operator [] (hb_codepoint_t k) const { return get (k); }
+ bool has (hb_codepoint_t k) const { return (*this)[k] != NOT_COVERED; }
+ /* Predicate. */
+ bool operator () (hb_codepoint_t k) const { return has (k); }
+
+ unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
+ unsigned int get_coverage (hb_codepoint_t glyph_id) const
+ {
+ switch (u.format) {
+ case 1: return u.format1.get_coverage (glyph_id);
+ case 2: return u.format2.get_coverage (glyph_id);
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.get_coverage (glyph_id);
+ case 4: return u.format4.get_coverage (glyph_id);
+#endif
+ default:return NOT_COVERED;
+ }
+ }
+
+ unsigned get_population () const
+ {
+ switch (u.format) {
+ case 1: return u.format1.get_population ();
+ case 2: return u.format2.get_population ();
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.get_population ();
+ case 4: return u.format4.get_population ();
+#endif
+ default:return NOT_COVERED;
+ }
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
+ bool serialize (hb_serialize_context_t *c, Iterator glyphs)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+
+ unsigned count = hb_len (glyphs);
+ unsigned num_ranges = 0;
+ hb_codepoint_t last = (hb_codepoint_t) -2;
+ hb_codepoint_t max = 0;
+ bool unsorted = false;
+ for (auto g: glyphs)
+ {
+ if (last != (hb_codepoint_t) -2 && g < last)
+ unsorted = true;
+ if (last + 1 != g)
+ num_ranges++;
+ last = g;
+ if (g > max) max = g;
+ }
+ u.format = !unsorted && count <= num_ranges * 3 ? 1 : 2;
+
+#ifndef HB_NO_BEYOND_64K
+ if (max > 0xFFFFu)
+ u.format += 2;
+ if (unlikely (max > 0xFFFFFFu))
+#else
+ if (unlikely (max > 0xFFFFu))
+#endif
+ {
+ c->check_success (false, HB_SERIALIZE_ERROR_INT_OVERFLOW);
+ return_trace (false);
+ }
+
+ switch (u.format)
+ {
+ case 1: return_trace (u.format1.serialize (c, glyphs));
+ case 2: return_trace (u.format2.serialize (c, glyphs));
+#ifndef HB_NO_BEYOND_64K
+ case 3: return_trace (u.format3.serialize (c, glyphs));
+ case 4: return_trace (u.format4.serialize (c, glyphs));
+#endif
+ default:return_trace (false);
+ }
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto it =
+ + iter ()
+ | hb_take (c->plan->source->get_num_glyphs ())
+ | hb_map_retains_sorting (c->plan->glyph_map_gsub)
+ | hb_filter ([] (hb_codepoint_t glyph) { return glyph != HB_MAP_VALUE_INVALID; })
+ ;
+
+ // Cache the iterator result as it will be iterated multiple times
+ // by the serialize code below.
+ hb_sorted_vector_t<hb_codepoint_t> glyphs (it);
+ Coverage_serialize (c->serializer, glyphs.iter ());
+ return_trace (bool (glyphs));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ switch (u.format)
+ {
+ case 1: return u.format1.intersects (glyphs);
+ case 2: return u.format2.intersects (glyphs);
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.intersects (glyphs);
+ case 4: return u.format4.intersects (glyphs);
+#endif
+ default:return false;
+ }
+ }
+ bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
+ {
+ switch (u.format)
+ {
+ case 1: return u.format1.intersects_coverage (glyphs, index);
+ case 2: return u.format2.intersects_coverage (glyphs, index);
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.intersects_coverage (glyphs, index);
+ case 4: return u.format4.intersects_coverage (glyphs, index);
+#endif
+ default:return false;
+ }
+ }
+
+ /* Might return false if array looks unsorted.
+ * Used for faster rejection of corrupt data. */
+ template <typename set_t>
+ bool collect_coverage (set_t *glyphs) const
+ {
+ switch (u.format)
+ {
+ case 1: return u.format1.collect_coverage (glyphs);
+ case 2: return u.format2.collect_coverage (glyphs);
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.collect_coverage (glyphs);
+ case 4: return u.format4.collect_coverage (glyphs);
+#endif
+ default:return false;
+ }
+ }
+
+ template <typename IterableOut,
+ hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))>
+ void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const
+ {
+ switch (u.format)
+ {
+ case 1: return u.format1.intersect_set (glyphs, intersect_glyphs);
+ case 2: return u.format2.intersect_set (glyphs, intersect_glyphs);
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.intersect_set (glyphs, intersect_glyphs);
+ case 4: return u.format4.intersect_set (glyphs, intersect_glyphs);
+#endif
+ default:return ;
+ }
+ }
+
+ struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
+ {
+ static constexpr bool is_sorted_iterator = true;
+ iter_t (const Coverage &c_ = Null (Coverage))
+ {
+ hb_memset (this, 0, sizeof (*this));
+ format = c_.u.format;
+ switch (format)
+ {
+ case 1: u.format1.init (c_.u.format1); return;
+ case 2: u.format2.init (c_.u.format2); return;
+#ifndef HB_NO_BEYOND_64K
+ case 3: u.format3.init (c_.u.format3); return;
+ case 4: u.format4.init (c_.u.format4); return;
+#endif
+ default: return;
+ }
+ }
+ bool __more__ () const
+ {
+ switch (format)
+ {
+ case 1: return u.format1.__more__ ();
+ case 2: return u.format2.__more__ ();
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.__more__ ();
+ case 4: return u.format4.__more__ ();
+#endif
+ default:return false;
+ }
+ }
+ void __next__ ()
+ {
+ switch (format)
+ {
+ case 1: u.format1.__next__ (); break;
+ case 2: u.format2.__next__ (); break;
+#ifndef HB_NO_BEYOND_64K
+ case 3: u.format3.__next__ (); break;
+ case 4: u.format4.__next__ (); break;
+#endif
+ default: break;
+ }
+ }
+ typedef hb_codepoint_t __item_t__;
+ __item_t__ __item__ () const { return get_glyph (); }
+
+ hb_codepoint_t get_glyph () const
+ {
+ switch (format)
+ {
+ case 1: return u.format1.get_glyph ();
+ case 2: return u.format2.get_glyph ();
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3.get_glyph ();
+ case 4: return u.format4.get_glyph ();
+#endif
+ default:return 0;
+ }
+ }
+ bool operator != (const iter_t& o) const
+ {
+ if (unlikely (format != o.format)) return true;
+ switch (format)
+ {
+ case 1: return u.format1 != o.u.format1;
+ case 2: return u.format2 != o.u.format2;
+#ifndef HB_NO_BEYOND_64K
+ case 3: return u.format3 != o.u.format3;
+ case 4: return u.format4 != o.u.format4;
+#endif
+ default:return false;
+ }
+ }
+ iter_t __end__ () const
+ {
+ iter_t it = {};
+ it.format = format;
+ switch (format)
+ {
+ case 1: it.u.format1 = u.format1.__end__ (); break;
+ case 2: it.u.format2 = u.format2.__end__ (); break;
+#ifndef HB_NO_BEYOND_64K
+ case 3: it.u.format3 = u.format3.__end__ (); break;
+ case 4: it.u.format4 = u.format4.__end__ (); break;
+#endif
+ default: break;
+ }
+ return it;
+ }
+
+ private:
+ unsigned int format;
+ union {
+#ifndef HB_NO_BEYOND_64K
+ CoverageFormat2_4<MediumTypes>::iter_t format4; /* Put this one first since it's larger; helps shut up compiler. */
+ CoverageFormat1_3<MediumTypes>::iter_t format3;
+#endif
+ CoverageFormat2_4<SmallTypes>::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */
+ CoverageFormat1_3<SmallTypes>::iter_t format1;
+ } u;
+ };
+ iter_t iter () const { return iter_t (*this); }
+};
+
+template<typename Iterator>
+static inline void
+Coverage_serialize (hb_serialize_context_t *c,
+ Iterator it)
+{ c->start_embed<Coverage> ()->serialize (c, it); }
+
+}
+}
+}
+
+#endif // #ifndef OT_LAYOUT_COMMON_COVERAGE_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh b/gfx/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh
new file mode 100644
index 0000000000..5d68e3d15e
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/Common/CoverageFormat1.hh
@@ -0,0 +1,133 @@
+/*
+ * Copyright © 2007,2008,2009 Red Hat, Inc.
+ * Copyright © 2010,2012 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Red Hat Author(s): Behdad Esfahbod
+ * Google Author(s): Behdad Esfahbod, Garret Rieger
+ */
+
+
+#ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT1_HH
+#define OT_LAYOUT_COMMON_COVERAGEFORMAT1_HH
+
+namespace OT {
+namespace Layout {
+namespace Common {
+
+#define NOT_COVERED ((unsigned int) -1)
+
+template <typename Types>
+struct CoverageFormat1_3
+{
+ friend struct Coverage;
+
+ protected:
+ HBUINT16 coverageFormat; /* Format identifier--format = 1 */
+ SortedArray16Of<typename Types::HBGlyphID>
+ glyphArray; /* Array of GlyphIDs--in numerical order */
+ public:
+ DEFINE_SIZE_ARRAY (4, glyphArray);
+
+ private:
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (glyphArray.sanitize (c));
+ }
+
+ unsigned int get_coverage (hb_codepoint_t glyph_id) const
+ {
+ unsigned int i;
+ glyphArray.bfind (glyph_id, &i, HB_NOT_FOUND_STORE, NOT_COVERED);
+ return i;
+ }
+
+ unsigned get_population () const
+ {
+ return glyphArray.len;
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
+ bool serialize (hb_serialize_context_t *c, Iterator glyphs)
+ {
+ TRACE_SERIALIZE (this);
+ return_trace (glyphArray.serialize (c, glyphs));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ if (glyphArray.len > glyphs->get_population () * hb_bit_storage ((unsigned) glyphArray.len) / 2)
+ {
+ for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
+ if (get_coverage (g) != NOT_COVERED)
+ return true;
+ return false;
+ }
+
+ for (const auto& g : glyphArray.as_array ())
+ if (glyphs->has (g))
+ return true;
+ return false;
+ }
+ bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
+ { return glyphs->has (glyphArray[index]); }
+
+ template <typename IterableOut,
+ hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))>
+ void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const
+ {
+ unsigned count = glyphArray.len;
+ for (unsigned i = 0; i < count; i++)
+ if (glyphs.has (glyphArray[i]))
+ intersect_glyphs << glyphArray[i];
+ }
+
+ template <typename set_t>
+ bool collect_coverage (set_t *glyphs) const
+ { return glyphs->add_sorted_array (glyphArray.as_array ()); }
+
+ public:
+ /* Older compilers need this to be public. */
+ struct iter_t
+ {
+ void init (const struct CoverageFormat1_3 &c_) { c = &c_; i = 0; }
+ bool __more__ () const { return i < c->glyphArray.len; }
+ void __next__ () { i++; }
+ hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
+ bool operator != (const iter_t& o) const
+ { return i != o.i; }
+ iter_t __end__ () const { iter_t it; it.init (*c); it.i = c->glyphArray.len; return it; }
+
+ private:
+ const struct CoverageFormat1_3 *c;
+ unsigned int i;
+ };
+ private:
+};
+
+}
+}
+}
+
+#endif // #ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT1_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh b/gfx/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh
new file mode 100644
index 0000000000..fa501d659d
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/Common/CoverageFormat2.hh
@@ -0,0 +1,239 @@
+/*
+ * Copyright © 2007,2008,2009 Red Hat, Inc.
+ * Copyright © 2010,2012 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Red Hat Author(s): Behdad Esfahbod
+ * Google Author(s): Behdad Esfahbod, Garret Rieger
+ */
+
+#ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH
+#define OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH
+
+#include "RangeRecord.hh"
+
+namespace OT {
+namespace Layout {
+namespace Common {
+
+template <typename Types>
+struct CoverageFormat2_4
+{
+ friend struct Coverage;
+
+ protected:
+ HBUINT16 coverageFormat; /* Format identifier--format = 2 */
+ SortedArray16Of<RangeRecord<Types>>
+ rangeRecord; /* Array of glyph ranges--ordered by
+ * Start GlyphID. rangeCount entries
+ * long */
+ public:
+ DEFINE_SIZE_ARRAY (4, rangeRecord);
+
+ private:
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (rangeRecord.sanitize (c));
+ }
+
+ unsigned int get_coverage (hb_codepoint_t glyph_id) const
+ {
+ const RangeRecord<Types> &range = rangeRecord.bsearch (glyph_id);
+ return likely (range.first <= range.last)
+ ? (unsigned int) range.value + (glyph_id - range.first)
+ : NOT_COVERED;
+ }
+
+ unsigned get_population () const
+ {
+ typename Types::large_int ret = 0;
+ for (const auto &r : rangeRecord)
+ ret += r.get_population ();
+ return ret > UINT_MAX ? UINT_MAX : (unsigned) ret;
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
+ bool serialize (hb_serialize_context_t *c, Iterator glyphs)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+
+ unsigned num_ranges = 0;
+ hb_codepoint_t last = (hb_codepoint_t) -2;
+ for (auto g: glyphs)
+ {
+ if (last + 1 != g)
+ num_ranges++;
+ last = g;
+ }
+
+ if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
+ if (!num_ranges) return_trace (true);
+
+ unsigned count = 0;
+ unsigned range = (unsigned) -1;
+ last = (hb_codepoint_t) -2;
+ unsigned unsorted = false;
+ for (auto g: glyphs)
+ {
+ if (last + 1 != g)
+ {
+ if (unlikely (last != (hb_codepoint_t) -2 && last + 1 > g))
+ unsorted = true;
+
+ range++;
+ rangeRecord.arrayZ[range].first = g;
+ rangeRecord.arrayZ[range].value = count;
+ }
+ rangeRecord.arrayZ[range].last = g;
+ last = g;
+ count++;
+ }
+
+ if (unlikely (unsorted))
+ rangeRecord.as_array ().qsort (RangeRecord<Types>::cmp_range);
+
+ return_trace (true);
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
+ {
+ for (hb_codepoint_t g = HB_SET_VALUE_INVALID; glyphs->next (&g);)
+ if (get_coverage (g) != NOT_COVERED)
+ return true;
+ return false;
+ }
+
+ return hb_any (+ hb_iter (rangeRecord)
+ | hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs); }));
+ }
+ bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
+ {
+ auto *range = rangeRecord.as_array ().bsearch (index);
+ if (range)
+ return range->intersects (*glyphs);
+ return false;
+ }
+
+ template <typename IterableOut,
+ hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))>
+ void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const
+ {
+ /* Break out of loop for overlapping, broken, tables,
+ * to avoid fuzzer timouts. */
+ hb_codepoint_t last = 0;
+ for (const auto& range : rangeRecord)
+ {
+ if (unlikely (range.first < last))
+ break;
+ last = range.last;
+ for (hb_codepoint_t g = range.first - 1;
+ glyphs.next (&g) && g <= last;)
+ intersect_glyphs << g;
+ }
+ }
+
+ template <typename set_t>
+ bool collect_coverage (set_t *glyphs) const
+ {
+ for (const auto& range: rangeRecord)
+ if (unlikely (!range.collect_coverage (glyphs)))
+ return false;
+ return true;
+ }
+
+ public:
+ /* Older compilers need this to be public. */
+ struct iter_t
+ {
+ void init (const CoverageFormat2_4 &c_)
+ {
+ c = &c_;
+ coverage = 0;
+ i = 0;
+ j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
+ if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
+ {
+ /* Broken table. Skip. */
+ i = c->rangeRecord.len;
+ j = 0;
+ }
+ }
+ bool __more__ () const { return i < c->rangeRecord.len; }
+ void __next__ ()
+ {
+ if (j >= c->rangeRecord[i].last)
+ {
+ i++;
+ if (__more__ ())
+ {
+ unsigned int old = coverage;
+ j = c->rangeRecord.arrayZ[i].first;
+ coverage = c->rangeRecord.arrayZ[i].value;
+ if (unlikely (coverage != old + 1))
+ {
+ /* Broken table. Skip. Important to avoid DoS.
+ * Also, our callers depend on coverage being
+ * consecutive and monotonically increasing,
+ * ie. iota(). */
+ i = c->rangeRecord.len;
+ j = 0;
+ return;
+ }
+ }
+ else
+ j = 0;
+ return;
+ }
+ coverage++;
+ j++;
+ }
+ hb_codepoint_t get_glyph () const { return j; }
+ bool operator != (const iter_t& o) const
+ { return i != o.i || j != o.j; }
+ iter_t __end__ () const
+ {
+ iter_t it;
+ it.init (*c);
+ it.i = c->rangeRecord.len;
+ it.j = 0;
+ return it;
+ }
+
+ private:
+ const struct CoverageFormat2_4 *c;
+ unsigned int i, coverage;
+ hb_codepoint_t j;
+ };
+ private:
+};
+
+}
+}
+}
+
+#endif // #ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/Common/RangeRecord.hh b/gfx/harfbuzz/src/OT/Layout/Common/RangeRecord.hh
new file mode 100644
index 0000000000..85aacace9a
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/Common/RangeRecord.hh
@@ -0,0 +1,97 @@
+/*
+ * Copyright © 2007,2008,2009 Red Hat, Inc.
+ * Copyright © 2010,2012 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Red Hat Author(s): Behdad Esfahbod
+ * Google Author(s): Behdad Esfahbod, Garret Rieger
+ */
+
+#ifndef OT_LAYOUT_COMMON_RANGERECORD_HH
+#define OT_LAYOUT_COMMON_RANGERECORD_HH
+
+namespace OT {
+namespace Layout {
+namespace Common {
+
+template <typename Types>
+struct RangeRecord
+{
+ typename Types::HBGlyphID first; /* First GlyphID in the range */
+ typename Types::HBGlyphID last; /* Last GlyphID in the range */
+ HBUINT16 value; /* Value */
+
+ DEFINE_SIZE_STATIC (2 + 2 * Types::size);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ int cmp (hb_codepoint_t g) const
+ { return g < first ? -1 : g <= last ? 0 : +1; }
+
+ HB_INTERNAL static int cmp_range (const void *pa, const void *pb) {
+ const RangeRecord *a = (const RangeRecord *) pa;
+ const RangeRecord *b = (const RangeRecord *) pb;
+ if (a->first < b->first) return -1;
+ if (a->first > b->first) return +1;
+ if (a->last < b->last) return -1;
+ if (a->last > b->last) return +1;
+ if (a->value < b->value) return -1;
+ if (a->value > b->value) return +1;
+ return 0;
+ }
+
+ unsigned get_population () const
+ {
+ if (unlikely (last < first)) return 0;
+ return (last - first + 1);
+ }
+
+ bool intersects (const hb_set_t &glyphs) const
+ { return glyphs.intersects (first, last); }
+
+ template <typename set_t>
+ bool collect_coverage (set_t *glyphs) const
+ { return glyphs->add_range (first, last); }
+};
+
+}
+}
+}
+
+// TODO(garretrieger): This was previously implemented using
+// DECLARE_NULL_NAMESPACE_BYTES_TEMPLATE1 (OT, RangeRecord, 9);
+// but that only works when there is only a single namespace level.
+// The macro should probably be fixed so it can work in this situation.
+extern HB_INTERNAL const unsigned char _hb_Null_OT_RangeRecord[9];
+template <typename Spec>
+struct Null<OT::Layout::Common::RangeRecord<Spec>> {
+ static OT::Layout::Common::RangeRecord<Spec> const & get_null () {
+ return *reinterpret_cast<const OT::Layout::Common::RangeRecord<Spec> *> (_hb_Null_OT_RangeRecord);
+ }
+};
+
+
+#endif // #ifndef OT_LAYOUT_COMMON_RANGERECORD_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GDEF/GDEF.hh b/gfx/harfbuzz/src/OT/Layout/GDEF/GDEF.hh
new file mode 100644
index 0000000000..c1ff796199
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GDEF/GDEF.hh
@@ -0,0 +1,942 @@
+/*
+ * Copyright © 2007,2008,2009 Red Hat, Inc.
+ * Copyright © 2010,2011,2012 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Red Hat Author(s): Behdad Esfahbod
+ * Google Author(s): Behdad Esfahbod
+ */
+
+#ifndef OT_LAYOUT_GDEF_GDEF_HH
+#define OT_LAYOUT_GDEF_GDEF_HH
+
+#include "../../../hb-ot-layout-common.hh"
+
+#include "../../../hb-font.hh"
+#include "../../../hb-cache.hh"
+
+
+namespace OT {
+
+
+/*
+ * Attachment List Table
+ */
+
+/* Array of contour point indices--in increasing numerical order */
+struct AttachPoint : Array16Of<HBUINT16>
+{
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!out)) return_trace (false);
+
+ return_trace (out->serialize (c->serializer, + iter ()));
+ }
+};
+
+struct AttachList
+{
+ unsigned int get_attach_points (hb_codepoint_t glyph_id,
+ unsigned int start_offset,
+ unsigned int *point_count /* IN/OUT */,
+ unsigned int *point_array /* OUT */) const
+ {
+ unsigned int index = (this+coverage).get_coverage (glyph_id);
+ if (index == NOT_COVERED)
+ {
+ if (point_count)
+ *point_count = 0;
+ return 0;
+ }
+
+ const AttachPoint &points = this+attachPoint[index];
+
+ if (point_count)
+ {
+ + points.as_array ().sub_array (start_offset, point_count)
+ | hb_sink (hb_array (point_array, *point_count))
+ ;
+ }
+
+ return points.len;
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ + hb_zip (this+coverage, attachPoint)
+ | hb_filter (glyphset, hb_first)
+ | hb_filter (subset_offset_array (c, out->attachPoint, this), hb_second)
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+ out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
+ return_trace (bool (new_coverage));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (coverage.sanitize (c, this) && attachPoint.sanitize (c, this));
+ }
+
+ protected:
+ Offset16To<Coverage>
+ coverage; /* Offset to Coverage table -- from
+ * beginning of AttachList table */
+ Array16OfOffset16To<AttachPoint>
+ attachPoint; /* Array of AttachPoint tables
+ * in Coverage Index order */
+ public:
+ DEFINE_SIZE_ARRAY (4, attachPoint);
+};
+
+/*
+ * Ligature Caret Table
+ */
+
+struct CaretValueFormat1
+{
+ friend struct CaretValue;
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+ return_trace (true);
+ }
+
+ private:
+ hb_position_t get_caret_value (hb_font_t *font, hb_direction_t direction) const
+ {
+ return HB_DIRECTION_IS_HORIZONTAL (direction) ? font->em_scale_x (coordinate) : font->em_scale_y (coordinate);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ protected:
+ HBUINT16 caretValueFormat; /* Format identifier--format = 1 */
+ FWORD coordinate; /* X or Y value, in design units */
+ public:
+ DEFINE_SIZE_STATIC (4);
+};
+
+struct CaretValueFormat2
+{
+ friend struct CaretValue;
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (false);
+ return_trace (true);
+ }
+
+ private:
+ hb_position_t get_caret_value (hb_font_t *font, hb_direction_t direction, hb_codepoint_t glyph_id) const
+ {
+ hb_position_t x, y;
+ font->get_glyph_contour_point_for_origin (glyph_id, caretValuePoint, direction, &x, &y);
+ return HB_DIRECTION_IS_HORIZONTAL (direction) ? x : y;
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ protected:
+ HBUINT16 caretValueFormat; /* Format identifier--format = 2 */
+ HBUINT16 caretValuePoint; /* Contour point index on glyph */
+ public:
+ DEFINE_SIZE_STATIC (4);
+};
+
+struct CaretValueFormat3
+{
+ friend struct CaretValue;
+
+ hb_position_t get_caret_value (hb_font_t *font, hb_direction_t direction,
+ const VariationStore &var_store) const
+ {
+ return HB_DIRECTION_IS_HORIZONTAL (direction) ?
+ font->em_scale_x (coordinate) + (this+deviceTable).get_x_delta (font, var_store) :
+ font->em_scale_y (coordinate) + (this+deviceTable).get_y_delta (font, var_store);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!out)) return_trace (false);
+ if (!c->serializer->embed (caretValueFormat)) return_trace (false);
+ if (!c->serializer->embed (coordinate)) return_trace (false);
+
+ unsigned varidx = (this+deviceTable).get_variation_index ();
+ if (c->plan->layout_variation_idx_delta_map.has (varidx))
+ {
+ int delta = hb_second (c->plan->layout_variation_idx_delta_map.get (varidx));
+ if (delta != 0)
+ {
+ if (!c->serializer->check_assign (out->coordinate, coordinate + delta, HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return_trace (false);
+ }
+ }
+
+ if (c->plan->all_axes_pinned)
+ return_trace (c->serializer->check_assign (out->caretValueFormat, 1, HB_SERIALIZE_ERROR_INT_OVERFLOW));
+
+ if (!c->serializer->embed (deviceTable))
+ return_trace (false);
+
+ return_trace (out->deviceTable.serialize_copy (c->serializer, deviceTable, this, c->serializer->to_bias (out),
+ hb_serialize_context_t::Head, &c->plan->layout_variation_idx_delta_map));
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ { (this+deviceTable).collect_variation_indices (c); }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && deviceTable.sanitize (c, this));
+ }
+
+ protected:
+ HBUINT16 caretValueFormat; /* Format identifier--format = 3 */
+ FWORD coordinate; /* X or Y value, in design units */
+ Offset16To<Device>
+ deviceTable; /* Offset to Device table for X or Y
+ * value--from beginning of CaretValue
+ * table */
+ public:
+ DEFINE_SIZE_STATIC (6);
+};
+
+struct CaretValue
+{
+ hb_position_t get_caret_value (hb_font_t *font,
+ hb_direction_t direction,
+ hb_codepoint_t glyph_id,
+ const VariationStore &var_store) const
+ {
+ switch (u.format) {
+ case 1: return u.format1.get_caret_value (font, direction);
+ case 2: return u.format2.get_caret_value (font, direction, glyph_id);
+ case 3: return u.format3.get_caret_value (font, direction, var_store);
+ default:return 0;
+ }
+ }
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+ case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
+ default:return_trace (c->default_return_value ());
+ }
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ switch (u.format) {
+ case 1:
+ case 2:
+ return;
+ case 3:
+ u.format3.collect_variation_indices (c);
+ return;
+ default: return;
+ }
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ if (!u.format.sanitize (c)) return_trace (false);
+ switch (u.format) {
+ case 1: return_trace (u.format1.sanitize (c));
+ case 2: return_trace (u.format2.sanitize (c));
+ case 3: return_trace (u.format3.sanitize (c));
+ default:return_trace (true);
+ }
+ }
+
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ CaretValueFormat1 format1;
+ CaretValueFormat2 format2;
+ CaretValueFormat3 format3;
+ } u;
+ public:
+ DEFINE_SIZE_UNION (2, format);
+};
+
+struct LigGlyph
+{
+ unsigned get_lig_carets (hb_font_t *font,
+ hb_direction_t direction,
+ hb_codepoint_t glyph_id,
+ const VariationStore &var_store,
+ unsigned start_offset,
+ unsigned *caret_count /* IN/OUT */,
+ hb_position_t *caret_array /* OUT */) const
+ {
+ if (caret_count)
+ {
+ + carets.as_array ().sub_array (start_offset, caret_count)
+ | hb_map (hb_add (this))
+ | hb_map ([&] (const CaretValue &value) { return value.get_caret_value (font, direction, glyph_id, var_store); })
+ | hb_sink (hb_array (caret_array, *caret_count))
+ ;
+ }
+
+ return carets.len;
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ + hb_iter (carets)
+ | hb_apply (subset_offset_array (c, out->carets, this))
+ ;
+
+ return_trace (bool (out->carets));
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ for (const Offset16To<CaretValue>& offset : carets.iter ())
+ (this+offset).collect_variation_indices (c);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (carets.sanitize (c, this));
+ }
+
+ protected:
+ Array16OfOffset16To<CaretValue>
+ carets; /* Offset array of CaretValue tables
+ * --from beginning of LigGlyph table
+ * --in increasing coordinate order */
+ public:
+ DEFINE_SIZE_ARRAY (2, carets);
+};
+
+struct LigCaretList
+{
+ unsigned int get_lig_carets (hb_font_t *font,
+ hb_direction_t direction,
+ hb_codepoint_t glyph_id,
+ const VariationStore &var_store,
+ unsigned int start_offset,
+ unsigned int *caret_count /* IN/OUT */,
+ hb_position_t *caret_array /* OUT */) const
+ {
+ unsigned int index = (this+coverage).get_coverage (glyph_id);
+ if (index == NOT_COVERED)
+ {
+ if (caret_count)
+ *caret_count = 0;
+ return 0;
+ }
+ const LigGlyph &lig_glyph = this+ligGlyph[index];
+ return lig_glyph.get_lig_carets (font, direction, glyph_id, var_store, start_offset, caret_count, caret_array);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ + hb_zip (this+coverage, ligGlyph)
+ | hb_filter (glyphset, hb_first)
+ | hb_filter (subset_offset_array (c, out->ligGlyph, this), hb_second)
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+ out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
+ return_trace (bool (new_coverage));
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ + hb_zip (this+coverage, ligGlyph)
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const LigGlyph& _) { _.collect_variation_indices (c); })
+ ;
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (coverage.sanitize (c, this) && ligGlyph.sanitize (c, this));
+ }
+
+ protected:
+ Offset16To<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of LigCaretList table */
+ Array16OfOffset16To<LigGlyph>
+ ligGlyph; /* Array of LigGlyph tables
+ * in Coverage Index order */
+ public:
+ DEFINE_SIZE_ARRAY (4, ligGlyph);
+};
+
+
+struct MarkGlyphSetsFormat1
+{
+ bool covers (unsigned int set_index, hb_codepoint_t glyph_id) const
+ { return (this+coverage[set_index]).get_coverage (glyph_id) != NOT_COVERED; }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ bool ret = true;
+ for (const Offset32To<Coverage>& offset : coverage.iter ())
+ {
+ auto *o = out->coverage.serialize_append (c->serializer);
+ if (unlikely (!o))
+ {
+ ret = false;
+ break;
+ }
+
+ //not using o->serialize_subset (c, offset, this, out) here because
+ //OTS doesn't allow null offset.
+ //See issue: https://github.com/khaledhosny/ots/issues/172
+ c->serializer->push ();
+ c->dispatch (this+offset);
+ c->serializer->add_link (*o, c->serializer->pop_pack ());
+ }
+
+ return_trace (ret && out->coverage.len);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (coverage.sanitize (c, this));
+ }
+
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ Array16Of<Offset32To<Coverage>>
+ coverage; /* Array of long offsets to mark set
+ * coverage tables */
+ public:
+ DEFINE_SIZE_ARRAY (4, coverage);
+};
+
+struct MarkGlyphSets
+{
+ bool covers (unsigned int set_index, hb_codepoint_t glyph_id) const
+ {
+ switch (u.format) {
+ case 1: return u.format1.covers (set_index, glyph_id);
+ default:return false;
+ }
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ switch (u.format) {
+ case 1: return_trace (u.format1.subset (c));
+ default:return_trace (false);
+ }
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ if (!u.format.sanitize (c)) return_trace (false);
+ switch (u.format) {
+ case 1: return_trace (u.format1.sanitize (c));
+ default:return_trace (true);
+ }
+ }
+
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ MarkGlyphSetsFormat1 format1;
+ } u;
+ public:
+ DEFINE_SIZE_UNION (2, format);
+};
+
+
+/*
+ * GDEF -- Glyph Definition
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/gdef
+ */
+
+
+template <typename Types>
+struct GDEFVersion1_2
+{
+ friend struct GDEF;
+
+ protected:
+ FixedVersion<>version; /* Version of the GDEF table--currently
+ * 0x00010003u */
+ typename Types::template OffsetTo<ClassDef>
+ glyphClassDef; /* Offset to class definition table
+ * for glyph type--from beginning of
+ * GDEF header (may be Null) */
+ typename Types::template OffsetTo<AttachList>
+ attachList; /* Offset to list of glyphs with
+ * attachment points--from beginning
+ * of GDEF header (may be Null) */
+ typename Types::template OffsetTo<LigCaretList>
+ ligCaretList; /* Offset to list of positioning points
+ * for ligature carets--from beginning
+ * of GDEF header (may be Null) */
+ typename Types::template OffsetTo<ClassDef>
+ markAttachClassDef; /* Offset to class definition table for
+ * mark attachment type--from beginning
+ * of GDEF header (may be Null) */
+ typename Types::template OffsetTo<MarkGlyphSets>
+ markGlyphSetsDef; /* Offset to the table of mark set
+ * definitions--from beginning of GDEF
+ * header (may be NULL). Introduced
+ * in version 0x00010002. */
+ Offset32To<VariationStore>
+ varStore; /* Offset to the table of Item Variation
+ * Store--from beginning of GDEF
+ * header (may be NULL). Introduced
+ * in version 0x00010003. */
+ public:
+ DEFINE_SIZE_MIN (4 + 4 * Types::size);
+
+ unsigned int get_size () const
+ {
+ return min_size +
+ (version.to_int () >= 0x00010002u ? markGlyphSetsDef.static_size : 0) +
+ (version.to_int () >= 0x00010003u ? varStore.static_size : 0);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (version.sanitize (c) &&
+ glyphClassDef.sanitize (c, this) &&
+ attachList.sanitize (c, this) &&
+ ligCaretList.sanitize (c, this) &&
+ markAttachClassDef.sanitize (c, this) &&
+ (version.to_int () < 0x00010002u || markGlyphSetsDef.sanitize (c, this)) &&
+ (version.to_int () < 0x00010003u || varStore.sanitize (c, this)));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (*this);
+ if (unlikely (!out)) return_trace (false);
+
+ bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this, nullptr, false, true);
+ bool subset_attachlist = out->attachList.serialize_subset (c, attachList, this);
+ bool subset_ligcaretlist = out->ligCaretList.serialize_subset (c, ligCaretList, this);
+ bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this, nullptr, false, true);
+
+ bool subset_markglyphsetsdef = false;
+ if (version.to_int () >= 0x00010002u)
+ {
+ subset_markglyphsetsdef = out->markGlyphSetsDef.serialize_subset (c, markGlyphSetsDef, this);
+ }
+
+ bool subset_varstore = false;
+ if (version.to_int () >= 0x00010003u)
+ {
+ if (c->plan->all_axes_pinned)
+ out->varStore = 0;
+ else
+ subset_varstore = out->varStore.serialize_subset (c, varStore, this, c->plan->gdef_varstore_inner_maps.as_array ());
+ }
+
+ if (subset_varstore)
+ {
+ out->version.minor = 3;
+ } else if (subset_markglyphsetsdef) {
+ out->version.minor = 2;
+ } else {
+ out->version.minor = 0;
+ }
+
+ return_trace (subset_glyphclassdef || subset_attachlist ||
+ subset_ligcaretlist || subset_markattachclassdef ||
+ (out->version.to_int () >= 0x00010002u && subset_markglyphsetsdef) ||
+ (out->version.to_int () >= 0x00010003u && subset_varstore));
+ }
+};
+
+struct GDEF
+{
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_GDEF;
+
+ enum GlyphClasses {
+ UnclassifiedGlyph = 0,
+ BaseGlyph = 1,
+ LigatureGlyph = 2,
+ MarkGlyph = 3,
+ ComponentGlyph = 4
+ };
+
+ unsigned int get_size () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version1.get_size ();
+#ifndef HB_NO_BEYOND_64K
+ case 2: return u.version2.get_size ();
+#endif
+ default: return u.version.static_size;
+ }
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ if (unlikely (!u.version.sanitize (c))) return_trace (false);
+ switch (u.version.major) {
+ case 1: return_trace (u.version1.sanitize (c));
+#ifndef HB_NO_BEYOND_64K
+ case 2: return_trace (u.version2.sanitize (c));
+#endif
+ default: return_trace (true);
+ }
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ switch (u.version.major) {
+ case 1: return u.version1.subset (c);
+#ifndef HB_NO_BEYOND_64K
+ case 2: return u.version2.subset (c);
+#endif
+ default: return false;
+ }
+ }
+
+ bool has_glyph_classes () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version1.glyphClassDef != 0;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return u.version2.glyphClassDef != 0;
+#endif
+ default: return false;
+ }
+ }
+ const ClassDef &get_glyph_class_def () const
+ {
+ switch (u.version.major) {
+ case 1: return this+u.version1.glyphClassDef;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return this+u.version2.glyphClassDef;
+#endif
+ default: return Null(ClassDef);
+ }
+ }
+ bool has_attach_list () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version1.attachList != 0;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return u.version2.attachList != 0;
+#endif
+ default: return false;
+ }
+ }
+ const AttachList &get_attach_list () const
+ {
+ switch (u.version.major) {
+ case 1: return this+u.version1.attachList;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return this+u.version2.attachList;
+#endif
+ default: return Null(AttachList);
+ }
+ }
+ bool has_lig_carets () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version1.ligCaretList != 0;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return u.version2.ligCaretList != 0;
+#endif
+ default: return false;
+ }
+ }
+ const LigCaretList &get_lig_caret_list () const
+ {
+ switch (u.version.major) {
+ case 1: return this+u.version1.ligCaretList;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return this+u.version2.ligCaretList;
+#endif
+ default: return Null(LigCaretList);
+ }
+ }
+ bool has_mark_attachment_types () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version1.markAttachClassDef != 0;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return u.version2.markAttachClassDef != 0;
+#endif
+ default: return false;
+ }
+ }
+ const ClassDef &get_mark_attach_class_def () const
+ {
+ switch (u.version.major) {
+ case 1: return this+u.version1.markAttachClassDef;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return this+u.version2.markAttachClassDef;
+#endif
+ default: return Null(ClassDef);
+ }
+ }
+ bool has_mark_glyph_sets () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version.to_int () >= 0x00010002u && u.version1.markGlyphSetsDef != 0;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return u.version2.markGlyphSetsDef != 0;
+#endif
+ default: return false;
+ }
+ }
+ const MarkGlyphSets &get_mark_glyph_sets () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version.to_int () >= 0x00010002u ? this+u.version1.markGlyphSetsDef : Null(MarkGlyphSets);
+#ifndef HB_NO_BEYOND_64K
+ case 2: return this+u.version2.markGlyphSetsDef;
+#endif
+ default: return Null(MarkGlyphSets);
+ }
+ }
+ bool has_var_store () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version.to_int () >= 0x00010003u && u.version1.varStore != 0;
+#ifndef HB_NO_BEYOND_64K
+ case 2: return u.version2.varStore != 0;
+#endif
+ default: return false;
+ }
+ }
+ const VariationStore &get_var_store () const
+ {
+ switch (u.version.major) {
+ case 1: return u.version.to_int () >= 0x00010003u ? this+u.version1.varStore : Null(VariationStore);
+#ifndef HB_NO_BEYOND_64K
+ case 2: return this+u.version2.varStore;
+#endif
+ default: return Null(VariationStore);
+ }
+ }
+
+
+ bool has_data () const { return u.version.to_int (); }
+ unsigned int get_glyph_class (hb_codepoint_t glyph) const
+ { return get_glyph_class_def ().get_class (glyph); }
+ void get_glyphs_in_class (unsigned int klass, hb_set_t *glyphs) const
+ { get_glyph_class_def ().collect_class (glyphs, klass); }
+
+ unsigned int get_mark_attachment_type (hb_codepoint_t glyph) const
+ { return get_mark_attach_class_def ().get_class (glyph); }
+
+ unsigned int get_attach_points (hb_codepoint_t glyph_id,
+ unsigned int start_offset,
+ unsigned int *point_count /* IN/OUT */,
+ unsigned int *point_array /* OUT */) const
+ { return get_attach_list ().get_attach_points (glyph_id, start_offset, point_count, point_array); }
+
+ unsigned int get_lig_carets (hb_font_t *font,
+ hb_direction_t direction,
+ hb_codepoint_t glyph_id,
+ unsigned int start_offset,
+ unsigned int *caret_count /* IN/OUT */,
+ hb_position_t *caret_array /* OUT */) const
+ { return get_lig_caret_list ().get_lig_carets (font,
+ direction, glyph_id, get_var_store(),
+ start_offset, caret_count, caret_array); }
+
+ bool mark_set_covers (unsigned int set_index, hb_codepoint_t glyph_id) const
+ { return get_mark_glyph_sets ().covers (set_index, glyph_id); }
+
+ /* glyph_props is a 16-bit integer where the lower 8-bit have bits representing
+ * glyph class and other bits, and high 8-bit the mark attachment type (if any).
+ * Not to be confused with lookup_props which is very similar. */
+ unsigned int get_glyph_props (hb_codepoint_t glyph) const
+ {
+ unsigned int klass = get_glyph_class (glyph);
+
+ static_assert (((unsigned int) HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH == (unsigned int) LookupFlag::IgnoreBaseGlyphs), "");
+ static_assert (((unsigned int) HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE == (unsigned int) LookupFlag::IgnoreLigatures), "");
+ static_assert (((unsigned int) HB_OT_LAYOUT_GLYPH_PROPS_MARK == (unsigned int) LookupFlag::IgnoreMarks), "");
+
+ switch (klass) {
+ default: return HB_OT_LAYOUT_GLYPH_CLASS_UNCLASSIFIED;
+ case BaseGlyph: return HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH;
+ case LigatureGlyph: return HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
+ case MarkGlyph:
+ klass = get_mark_attachment_type (glyph);
+ return HB_OT_LAYOUT_GLYPH_PROPS_MARK | (klass << 8);
+ }
+ }
+
+ HB_INTERNAL bool is_blocklisted (hb_blob_t *blob,
+ hb_face_t *face) const;
+
+ struct accelerator_t
+ {
+ accelerator_t (hb_face_t *face)
+ {
+ table = hb_sanitize_context_t ().reference_table<GDEF> (face);
+ if (unlikely (table->is_blocklisted (table.get_blob (), face)))
+ {
+ hb_blob_destroy (table.get_blob ());
+ table = hb_blob_get_empty ();
+ }
+ }
+ ~accelerator_t () { table.destroy (); }
+
+ unsigned int get_glyph_props (hb_codepoint_t glyph) const
+ {
+ unsigned v;
+
+#ifndef HB_NO_GDEF_CACHE
+ if (glyph_props_cache.get (glyph, &v))
+ return v;
+#endif
+
+ v = table->get_glyph_props (glyph);
+
+#ifndef HB_NO_GDEF_CACHE
+ if (likely (table.get_blob ())) // Don't try setting if we are the null instance!
+ glyph_props_cache.set (glyph, v);
+#endif
+
+ return v;
+
+ }
+
+ hb_blob_ptr_t<GDEF> table;
+#ifndef HB_NO_GDEF_CACHE
+ mutable hb_cache_t<21, 3, 8> glyph_props_cache;
+#endif
+ };
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ { get_lig_caret_list ().collect_variation_indices (c); }
+
+ void remap_layout_variation_indices (const hb_set_t *layout_variation_indices,
+ hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map /* OUT */) const
+ {
+ if (!has_var_store ()) return;
+ if (layout_variation_indices->is_empty ()) return;
+
+ unsigned new_major = 0, new_minor = 0;
+ unsigned last_major = (layout_variation_indices->get_min ()) >> 16;
+ for (unsigned idx : layout_variation_indices->iter ())
+ {
+ uint16_t major = idx >> 16;
+ if (major >= get_var_store ().get_sub_table_count ()) break;
+ if (major != last_major)
+ {
+ new_minor = 0;
+ ++new_major;
+ }
+
+ unsigned new_idx = (new_major << 16) + new_minor;
+ if (!layout_variation_idx_delta_map->has (idx))
+ continue;
+ int delta = hb_second (layout_variation_idx_delta_map->get (idx));
+
+ layout_variation_idx_delta_map->set (idx, hb_pair_t<unsigned, int> (new_idx, delta));
+ ++new_minor;
+ last_major = major;
+ }
+ }
+
+ protected:
+ union {
+ FixedVersion<> version; /* Version identifier */
+ GDEFVersion1_2<SmallTypes> version1;
+#ifndef HB_NO_BEYOND_64K
+ GDEFVersion1_2<MediumTypes> version2;
+#endif
+ } u;
+ public:
+ DEFINE_SIZE_MIN (4);
+};
+
+struct GDEF_accelerator_t : GDEF::accelerator_t {
+ GDEF_accelerator_t (hb_face_t *face) : GDEF::accelerator_t (face) {}
+};
+
+} /* namespace OT */
+
+
+#endif /* OT_LAYOUT_GDEF_GDEF_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/Anchor.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/Anchor.hh
new file mode 100644
index 0000000000..49e76e7750
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/Anchor.hh
@@ -0,0 +1,83 @@
+#ifndef OT_LAYOUT_GPOS_ANCHOR_HH
+#define OT_LAYOUT_GPOS_ANCHOR_HH
+
+#include "AnchorFormat1.hh"
+#include "AnchorFormat2.hh"
+#include "AnchorFormat3.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct Anchor
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ AnchorFormat1 format1;
+ AnchorFormat2 format2;
+ AnchorFormat3 format3;
+ } u;
+ public:
+ DEFINE_SIZE_UNION (2, format);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ if (!u.format.sanitize (c)) return_trace (false);
+ switch (u.format) {
+ case 1: return_trace (u.format1.sanitize (c));
+ case 2: return_trace (u.format2.sanitize (c));
+ case 3: return_trace (u.format3.sanitize (c));
+ default:return_trace (true);
+ }
+ }
+
+ void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
+ float *x, float *y) const
+ {
+ *x = *y = 0;
+ switch (u.format) {
+ case 1: u.format1.get_anchor (c, glyph_id, x, y); return;
+ case 2: u.format2.get_anchor (c, glyph_id, x, y); return;
+ case 3: u.format3.get_anchor (c, glyph_id, x, y); return;
+ default: return;
+ }
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ switch (u.format) {
+ case 1: return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
+ case 2:
+ if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
+ {
+ // AnchorFormat 2 just containins extra hinting information, so
+ // if hints are being dropped convert to format 1.
+ return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
+ }
+ return_trace (bool (reinterpret_cast<Anchor *> (u.format2.copy (c->serializer))));
+ case 3: return_trace (u.format3.subset (c));
+ default:return_trace (false);
+ }
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ switch (u.format) {
+ case 1: case 2:
+ return;
+ case 3:
+ u.format3.collect_variation_indices (c);
+ return;
+ default: return;
+ }
+ }
+};
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_ANCHOR_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat1.hh
new file mode 100644
index 0000000000..738cc31bbf
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat1.hh
@@ -0,0 +1,46 @@
+#ifndef OT_LAYOUT_GPOS_ANCHORFORMAT1_HH
+#define OT_LAYOUT_GPOS_ANCHORFORMAT1_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct AnchorFormat1
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ FWORD xCoordinate; /* Horizontal value--in design units */
+ FWORD yCoordinate; /* Vertical value--in design units */
+ public:
+ DEFINE_SIZE_STATIC (6);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
+ float *x, float *y) const
+ {
+ hb_font_t *font = c->font;
+ *x = font->em_fscale_x (xCoordinate);
+ *y = font->em_fscale_y (yCoordinate);
+ }
+
+ AnchorFormat1* copy (hb_serialize_context_t *c) const
+ {
+ TRACE_SERIALIZE (this);
+ AnchorFormat1* out = c->embed<AnchorFormat1> (this);
+ if (!out) return_trace (out);
+ out->format = 1;
+ return_trace (out);
+ }
+};
+
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_ANCHORFORMAT1_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat2.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat2.hh
new file mode 100644
index 0000000000..70b4d19f53
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat2.hh
@@ -0,0 +1,58 @@
+#ifndef OT_LAYOUT_GPOS_ANCHORFORMAT2_HH
+#define OT_LAYOUT_GPOS_ANCHORFORMAT2_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct AnchorFormat2
+{
+
+ protected:
+ HBUINT16 format; /* Format identifier--format = 2 */
+ FWORD xCoordinate; /* Horizontal value--in design units */
+ FWORD yCoordinate; /* Vertical value--in design units */
+ HBUINT16 anchorPoint; /* Index to glyph contour point */
+ public:
+ DEFINE_SIZE_STATIC (8);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this));
+ }
+
+ void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
+ float *x, float *y) const
+ {
+ hb_font_t *font = c->font;
+
+#ifdef HB_NO_HINTING
+ *x = font->em_fscale_x (xCoordinate);
+ *y = font->em_fscale_y (yCoordinate);
+ return;
+#endif
+
+ unsigned int x_ppem = font->x_ppem;
+ unsigned int y_ppem = font->y_ppem;
+ hb_position_t cx = 0, cy = 0;
+ bool ret;
+
+ ret = (x_ppem || y_ppem) &&
+ font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
+ *x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
+ *y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
+ }
+
+ AnchorFormat2* copy (hb_serialize_context_t *c) const
+ {
+ TRACE_SERIALIZE (this);
+ return_trace (c->embed<AnchorFormat2> (this));
+ }
+};
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_ANCHORFORMAT2_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat3.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat3.hh
new file mode 100644
index 0000000000..e7e3c5c6d1
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorFormat3.hh
@@ -0,0 +1,100 @@
+#ifndef OT_LAYOUT_GPOS_ANCHORFORMAT3_HH
+#define OT_LAYOUT_GPOS_ANCHORFORMAT3_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct AnchorFormat3
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 3 */
+ FWORD xCoordinate; /* Horizontal value--in design units */
+ FWORD yCoordinate; /* Vertical value--in design units */
+ Offset16To<Device>
+ xDeviceTable; /* Offset to Device table for X
+ * coordinate-- from beginning of
+ * Anchor table (may be NULL) */
+ Offset16To<Device>
+ yDeviceTable; /* Offset to Device table for Y
+ * coordinate-- from beginning of
+ * Anchor table (may be NULL) */
+ public:
+ DEFINE_SIZE_STATIC (10);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
+ }
+
+ void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
+ float *x, float *y) const
+ {
+ hb_font_t *font = c->font;
+ *x = font->em_fscale_x (xCoordinate);
+ *y = font->em_fscale_y (yCoordinate);
+
+ if (font->x_ppem || font->num_coords)
+ *x += (this+xDeviceTable).get_x_delta (font, c->var_store, c->var_store_cache);
+ if (font->y_ppem || font->num_coords)
+ *y += (this+yDeviceTable).get_y_delta (font, c->var_store, c->var_store_cache);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!out)) return_trace (false);
+ if (unlikely (!c->serializer->embed (format))) return_trace (false);
+ if (unlikely (!c->serializer->embed (xCoordinate))) return_trace (false);
+ if (unlikely (!c->serializer->embed (yCoordinate))) return_trace (false);
+
+ unsigned x_varidx = xDeviceTable ? (this+xDeviceTable).get_variation_index () : HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
+ if (c->plan->layout_variation_idx_delta_map.has (x_varidx))
+ {
+ int delta = hb_second (c->plan->layout_variation_idx_delta_map.get (x_varidx));
+ if (delta != 0)
+ {
+ if (!c->serializer->check_assign (out->xCoordinate, xCoordinate + delta,
+ HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return_trace (false);
+ }
+ }
+
+ unsigned y_varidx = yDeviceTable ? (this+yDeviceTable).get_variation_index () : HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
+ if (c->plan->layout_variation_idx_delta_map.has (y_varidx))
+ {
+ int delta = hb_second (c->plan->layout_variation_idx_delta_map.get (y_varidx));
+ if (delta != 0)
+ {
+ if (!c->serializer->check_assign (out->yCoordinate, yCoordinate + delta,
+ HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return_trace (false);
+ }
+ }
+
+ if (c->plan->all_axes_pinned)
+ return_trace (c->serializer->check_assign (out->format, 1, HB_SERIALIZE_ERROR_INT_OVERFLOW));
+
+ if (!c->serializer->embed (xDeviceTable)) return_trace (false);
+ if (!c->serializer->embed (yDeviceTable)) return_trace (false);
+
+ out->xDeviceTable.serialize_copy (c->serializer, xDeviceTable, this, 0, hb_serialize_context_t::Head, &c->plan->layout_variation_idx_delta_map);
+ out->yDeviceTable.serialize_copy (c->serializer, yDeviceTable, this, 0, hb_serialize_context_t::Head, &c->plan->layout_variation_idx_delta_map);
+ return_trace (out);
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ (this+xDeviceTable).collect_variation_indices (c);
+ (this+yDeviceTable).collect_variation_indices (c);
+ }
+};
+
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_ANCHORFORMAT3_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorMatrix.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorMatrix.hh
new file mode 100644
index 0000000000..c442efa1ea
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/AnchorMatrix.hh
@@ -0,0 +1,77 @@
+#ifndef OT_LAYOUT_GPOS_ANCHORMATRIX_HH
+#define OT_LAYOUT_GPOS_ANCHORMATRIX_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct AnchorMatrix
+{
+ HBUINT16 rows; /* Number of rows */
+ UnsizedArrayOf<Offset16To<Anchor>>
+ matrixZ; /* Matrix of offsets to Anchor tables--
+ * from beginning of AnchorMatrix table */
+ public:
+ DEFINE_SIZE_ARRAY (2, matrixZ);
+
+ bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
+ {
+ TRACE_SANITIZE (this);
+ if (!c->check_struct (this)) return_trace (false);
+ if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
+ unsigned int count = rows * cols;
+ if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
+ for (unsigned int i = 0; i < count; i++)
+ if (!matrixZ[i].sanitize (c, this)) return_trace (false);
+ return_trace (true);
+ }
+
+ const Anchor& get_anchor (unsigned int row, unsigned int col,
+ unsigned int cols, bool *found) const
+ {
+ *found = false;
+ if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
+ *found = !matrixZ[row * cols + col].is_null ();
+ return this+matrixZ[row * cols + col];
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c,
+ Iterator index_iter) const
+ {
+ for (unsigned i : index_iter)
+ (this+matrixZ[i]).collect_variation_indices (c);
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ bool subset (hb_subset_context_t *c,
+ unsigned num_rows,
+ Iterator index_iter) const
+ {
+ TRACE_SUBSET (this);
+
+ auto *out = c->serializer->start_embed (this);
+
+ if (!index_iter) return_trace (false);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ out->rows = num_rows;
+ for (const unsigned i : index_iter)
+ {
+ auto *offset = c->serializer->embed (matrixZ[i]);
+ if (!offset) return_trace (false);
+ offset->serialize_subset (c, matrixZ[i], this);
+ }
+
+ return_trace (true);
+ }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_ANCHORMATRIX_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/ChainContextPos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/ChainContextPos.hh
new file mode 100644
index 0000000000..d551ac2a2b
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/ChainContextPos.hh
@@ -0,0 +1,14 @@
+#ifndef OT_LAYOUT_GPOS_CHAINCONTEXTPOS_HH
+#define OT_LAYOUT_GPOS_CHAINCONTEXTPOS_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct ChainContextPos : ChainContext {};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_CHAINCONTEXTPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/Common.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/Common.hh
new file mode 100644
index 0000000000..408197454f
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/Common.hh
@@ -0,0 +1,33 @@
+#ifndef OT_LAYOUT_GPOS_COMMON_HH
+#define OT_LAYOUT_GPOS_COMMON_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+enum attach_type_t {
+ ATTACH_TYPE_NONE = 0X00,
+
+ /* Each attachment should be either a mark or a cursive; can't be both. */
+ ATTACH_TYPE_MARK = 0X01,
+ ATTACH_TYPE_CURSIVE = 0X02,
+};
+
+/* buffer **position** var allocations */
+#define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
+#define attach_type() var.u8[2] /* attachment type */
+/* Note! if attach_chain() is zero, the value of attach_type() is irrelevant. */
+
+template<typename Iterator, typename SrcLookup>
+static void SinglePos_serialize (hb_serialize_context_t *c,
+ const SrcLookup *src,
+ Iterator it,
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
+ bool all_axes_pinned);
+
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_COMMON_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/ContextPos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/ContextPos.hh
new file mode 100644
index 0000000000..2a01eaa3a6
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/ContextPos.hh
@@ -0,0 +1,14 @@
+#ifndef OT_LAYOUT_GPOS_CONTEXTPOS_HH
+#define OT_LAYOUT_GPOS_CONTEXTPOS_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct ContextPos : Context {};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_CONTEXTPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/CursivePos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/CursivePos.hh
new file mode 100644
index 0000000000..0105a9b854
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/CursivePos.hh
@@ -0,0 +1,35 @@
+#ifndef OT_LAYOUT_GPOS_CURSIVEPOS_HH
+#define OT_LAYOUT_GPOS_CURSIVEPOS_HH
+
+#include "CursivePosFormat1.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct CursivePos
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ CursivePosFormat1 format1;
+ } u;
+
+ public:
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+ default:return_trace (c->default_return_value ());
+ }
+ }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_CURSIVEPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/CursivePosFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/CursivePosFormat1.hh
new file mode 100644
index 0000000000..b8773ba0aa
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/CursivePosFormat1.hh
@@ -0,0 +1,301 @@
+#ifndef OT_LAYOUT_GPOS_CURSIVEPOSFORMAT1_HH
+#define OT_LAYOUT_GPOS_CURSIVEPOSFORMAT1_HH
+
+#include "Anchor.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct EntryExitRecord
+{
+ friend struct CursivePosFormat1;
+
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c,
+ const void *src_base) const
+ {
+ (src_base+entryAnchor).collect_variation_indices (c);
+ (src_base+exitAnchor).collect_variation_indices (c);
+ }
+
+ EntryExitRecord* subset (hb_subset_context_t *c,
+ const void *src_base) const
+ {
+ TRACE_SERIALIZE (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (nullptr);
+
+ out->entryAnchor.serialize_subset (c, entryAnchor, src_base);
+ out->exitAnchor.serialize_subset (c, exitAnchor, src_base);
+ return_trace (out);
+ }
+
+ protected:
+ Offset16To<Anchor>
+ entryAnchor; /* Offset to EntryAnchor table--from
+ * beginning of CursivePos
+ * subtable--may be NULL */
+ Offset16To<Anchor>
+ exitAnchor; /* Offset to ExitAnchor table--from
+ * beginning of CursivePos
+ * subtable--may be NULL */
+ public:
+ DEFINE_SIZE_STATIC (4);
+};
+
+static void
+reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent) {
+ int chain = pos[i].attach_chain(), type = pos[i].attach_type();
+ if (likely (!chain || 0 == (type & ATTACH_TYPE_CURSIVE)))
+ return;
+
+ pos[i].attach_chain() = 0;
+
+ unsigned int j = (int) i + chain;
+
+ /* Stop if we see new parent in the chain. */
+ if (j == new_parent)
+ return;
+
+ reverse_cursive_minor_offset (pos, j, direction, new_parent);
+
+ if (HB_DIRECTION_IS_HORIZONTAL (direction))
+ pos[j].y_offset = -pos[i].y_offset;
+ else
+ pos[j].x_offset = -pos[i].x_offset;
+
+ pos[j].attach_chain() = -chain;
+ pos[j].attach_type() = type;
+}
+
+
+struct CursivePosFormat1
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ Offset16To<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of subtable */
+ Array16Of<EntryExitRecord>
+ entryExitRecord; /* Array of EntryExit records--in
+ * Coverage Index order */
+ public:
+ DEFINE_SIZE_ARRAY (6, entryExitRecord);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return (this+coverage).intersects (glyphs); }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ + hb_zip (this+coverage, entryExitRecord)
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ | hb_apply ([&] (const EntryExitRecord& record) { record.collect_variation_indices (c, this); })
+ ;
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+
+ const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
+ if (!this_record.entryAnchor) return_trace (false);
+
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
+ skippy_iter.reset_fast (buffer->idx, 1);
+ unsigned unsafe_from;
+ if (unlikely (!skippy_iter.prev (&unsafe_from)))
+ {
+ buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)];
+ if (!prev_record.exitAnchor)
+ {
+ buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ unsigned int i = skippy_iter.idx;
+ unsigned int j = buffer->idx;
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "cursive attaching glyph at %u to glyph at %u",
+ i, j);
+ }
+
+ buffer->unsafe_to_break (i, j + 1);
+ float entry_x, entry_y, exit_x, exit_y;
+ (this+prev_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
+ (this+this_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
+
+ hb_glyph_position_t *pos = buffer->pos;
+
+ hb_position_t d;
+ /* Main-direction adjustment */
+ switch (c->direction) {
+ case HB_DIRECTION_LTR:
+ pos[i].x_advance = roundf (exit_x) + pos[i].x_offset;
+
+ d = roundf (entry_x) + pos[j].x_offset;
+ pos[j].x_advance -= d;
+ pos[j].x_offset -= d;
+ break;
+ case HB_DIRECTION_RTL:
+ d = roundf (exit_x) + pos[i].x_offset;
+ pos[i].x_advance -= d;
+ pos[i].x_offset -= d;
+
+ pos[j].x_advance = roundf (entry_x) + pos[j].x_offset;
+ break;
+ case HB_DIRECTION_TTB:
+ pos[i].y_advance = roundf (exit_y) + pos[i].y_offset;
+
+ d = roundf (entry_y) + pos[j].y_offset;
+ pos[j].y_advance -= d;
+ pos[j].y_offset -= d;
+ break;
+ case HB_DIRECTION_BTT:
+ d = roundf (exit_y) + pos[i].y_offset;
+ pos[i].y_advance -= d;
+ pos[i].y_offset -= d;
+
+ pos[j].y_advance = roundf (entry_y);
+ break;
+ case HB_DIRECTION_INVALID:
+ default:
+ break;
+ }
+
+ /* Cross-direction adjustment */
+
+ /* We attach child to parent (think graph theory and rooted trees whereas
+ * the root stays on baseline and each node aligns itself against its
+ * parent.
+ *
+ * Optimize things for the case of RightToLeft, as that's most common in
+ * Arabic. */
+ unsigned int child = i;
+ unsigned int parent = j;
+ hb_position_t x_offset = entry_x - exit_x;
+ hb_position_t y_offset = entry_y - exit_y;
+ if (!(c->lookup_props & LookupFlag::RightToLeft))
+ {
+ unsigned int k = child;
+ child = parent;
+ parent = k;
+ x_offset = -x_offset;
+ y_offset = -y_offset;
+ }
+
+ /* If child was already connected to someone else, walk through its old
+ * chain and reverse the link direction, such that the whole tree of its
+ * previous connection now attaches to new parent. Watch out for case
+ * where new parent is on the path from old chain...
+ */
+ reverse_cursive_minor_offset (pos, child, c->direction, parent);
+
+ pos[child].attach_type() = ATTACH_TYPE_CURSIVE;
+ pos[child].attach_chain() = (int) parent - (int) child;
+ buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
+ if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
+ pos[child].y_offset = y_offset;
+ else
+ pos[child].x_offset = x_offset;
+
+ /* If parent was attached to child, separate them.
+ * https://github.com/harfbuzz/harfbuzz/issues/2469
+ */
+ if (unlikely (pos[parent].attach_chain() == -pos[child].attach_chain()))
+ {
+ pos[parent].attach_chain() = 0;
+ if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
+ pos[parent].y_offset = 0;
+ else
+ pos[parent].x_offset = 0;
+ }
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "cursive attached glyph at %u to glyph at %u",
+ i, j);
+ }
+
+ buffer->idx++;
+ return_trace (true);
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ void serialize (hb_subset_context_t *c,
+ Iterator it,
+ const void *src_base)
+ {
+ if (unlikely (!c->serializer->extend_min ((*this)))) return;
+ this->format = 1;
+ this->entryExitRecord.len = it.len ();
+
+ for (const EntryExitRecord& entry_record : + it
+ | hb_map (hb_second))
+ entry_record.subset (c, src_base);
+
+ auto glyphs =
+ + it
+ | hb_map_retains_sorting (hb_first)
+ ;
+
+ coverage.serialize_serialize (c->serializer, glyphs);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!out)) return_trace (false);
+
+ auto it =
+ + hb_zip (this+coverage, entryExitRecord)
+ | hb_filter (glyphset, hb_first)
+ | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const EntryExitRecord&> p) -> hb_pair_t<hb_codepoint_t, const EntryExitRecord&>
+ { return hb_pair (glyph_map[p.first], p.second);})
+ ;
+
+ bool ret = bool (it);
+ out->serialize (c, it, this);
+ return_trace (ret);
+ }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_CURSIVEPOSFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/ExtensionPos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/ExtensionPos.hh
new file mode 100644
index 0000000000..d1808adab4
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/ExtensionPos.hh
@@ -0,0 +1,17 @@
+#ifndef OT_LAYOUT_GPOS_EXTENSIONPOS_HH
+#define OT_LAYOUT_GPOS_EXTENSIONPOS_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct ExtensionPos : Extension<ExtensionPos>
+{
+ typedef struct PosLookupSubTable SubTable;
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_EXTENSIONPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/GPOS.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/GPOS.hh
new file mode 100644
index 0000000000..f4af98b25f
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/GPOS.hh
@@ -0,0 +1,171 @@
+#ifndef OT_LAYOUT_GPOS_GPOS_HH
+#define OT_LAYOUT_GPOS_GPOS_HH
+
+#include "../../../hb-ot-layout-common.hh"
+#include "../../../hb-ot-layout-gsubgpos.hh"
+#include "Common.hh"
+#include "PosLookup.hh"
+
+namespace OT {
+
+using Layout::GPOS_impl::PosLookup;
+
+namespace Layout {
+
+static void
+propagate_attachment_offsets (hb_glyph_position_t *pos,
+ unsigned int len,
+ unsigned int i,
+ hb_direction_t direction,
+ unsigned nesting_level = HB_MAX_NESTING_LEVEL);
+
+/*
+ * GPOS -- Glyph Positioning
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/gpos
+ */
+
+struct GPOS : GSUBGPOS
+{
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
+
+ using Lookup = PosLookup;
+
+ const PosLookup& get_lookup (unsigned int i) const
+ { return static_cast<const PosLookup &> (GSUBGPOS::get_lookup (i)); }
+
+ static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
+ static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
+ static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ hb_subset_layout_context_t l (c, tableTag);
+ return GSUBGPOS::subset<PosLookup> (&l);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (GSUBGPOS::sanitize<PosLookup> (c));
+ }
+
+ HB_INTERNAL bool is_blocklisted (hb_blob_t *blob,
+ hb_face_t *face) const;
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ for (unsigned i = 0; i < GSUBGPOS::get_lookup_count (); i++)
+ {
+ if (!c->gpos_lookups->has (i)) continue;
+ const PosLookup &l = get_lookup (i);
+ l.dispatch (c);
+ }
+ }
+
+ void closure_lookups (hb_face_t *face,
+ const hb_set_t *glyphs,
+ hb_set_t *lookup_indexes /* IN/OUT */) const
+ { GSUBGPOS::closure_lookups<PosLookup> (face, glyphs, lookup_indexes); }
+
+ typedef GSUBGPOS::accelerator_t<GPOS> accelerator_t;
+};
+
+
+static void
+propagate_attachment_offsets (hb_glyph_position_t *pos,
+ unsigned int len,
+ unsigned int i,
+ hb_direction_t direction,
+ unsigned nesting_level)
+{
+ /* Adjusts offsets of attached glyphs (both cursive and mark) to accumulate
+ * offset of glyph they are attached to. */
+ int chain = pos[i].attach_chain(), type = pos[i].attach_type();
+ if (likely (!chain))
+ return;
+
+ pos[i].attach_chain() = 0;
+
+ unsigned int j = (int) i + chain;
+
+ if (unlikely (j >= len))
+ return;
+
+ if (unlikely (!nesting_level))
+ return;
+
+ propagate_attachment_offsets (pos, len, j, direction, nesting_level - 1);
+
+ assert (!!(type & GPOS_impl::ATTACH_TYPE_MARK) ^ !!(type & GPOS_impl::ATTACH_TYPE_CURSIVE));
+
+ if (type & GPOS_impl::ATTACH_TYPE_CURSIVE)
+ {
+ if (HB_DIRECTION_IS_HORIZONTAL (direction))
+ pos[i].y_offset += pos[j].y_offset;
+ else
+ pos[i].x_offset += pos[j].x_offset;
+ }
+ else /*if (type & GPOS_impl::ATTACH_TYPE_MARK)*/
+ {
+ pos[i].x_offset += pos[j].x_offset;
+ pos[i].y_offset += pos[j].y_offset;
+
+ assert (j < i);
+ if (HB_DIRECTION_IS_FORWARD (direction))
+ for (unsigned int k = j; k < i; k++) {
+ pos[i].x_offset -= pos[k].x_advance;
+ pos[i].y_offset -= pos[k].y_advance;
+ }
+ else
+ for (unsigned int k = j + 1; k < i + 1; k++) {
+ pos[i].x_offset += pos[k].x_advance;
+ pos[i].y_offset += pos[k].y_advance;
+ }
+ }
+}
+
+void
+GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
+{
+ unsigned int count = buffer->len;
+ for (unsigned int i = 0; i < count; i++)
+ buffer->pos[i].attach_chain() = buffer->pos[i].attach_type() = 0;
+}
+
+void
+GPOS::position_finish_advances (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
+{
+ //_hb_buffer_assert_gsubgpos_vars (buffer);
+}
+
+void
+GPOS::position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer)
+{
+ _hb_buffer_assert_gsubgpos_vars (buffer);
+
+ unsigned int len;
+ hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
+ hb_direction_t direction = buffer->props.direction;
+
+ /* Handle attachments */
+ if (buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT)
+ for (unsigned i = 0; i < len; i++)
+ propagate_attachment_offsets (pos, len, i, direction);
+
+ if (unlikely (font->slant))
+ {
+ for (unsigned i = 0; i < len; i++)
+ if (unlikely (pos[i].y_offset))
+ pos[i].x_offset += roundf (font->slant_xy * pos[i].y_offset);
+ }
+}
+
+}
+
+struct GPOS_accelerator_t : Layout::GPOS::accelerator_t {
+ GPOS_accelerator_t (hb_face_t *face) : Layout::GPOS::accelerator_t (face) {}
+};
+
+}
+
+#endif /* OT_LAYOUT_GPOS_GPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/LigatureArray.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/LigatureArray.hh
new file mode 100644
index 0000000000..a2d807cc32
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/LigatureArray.hh
@@ -0,0 +1,56 @@
+#ifndef OT_LAYOUT_GPOS_LIGATUREARRAY_HH
+#define OT_LAYOUT_GPOS_LIGATUREARRAY_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+
+typedef AnchorMatrix LigatureAttach; /* component-major--
+ * in order of writing direction--,
+ * mark-minor--
+ * ordered by class--zero-based. */
+
+/* Array of LigatureAttach tables ordered by LigatureCoverage Index */
+struct LigatureArray : List16OfOffset16To<LigatureAttach>
+{
+ template <typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ bool subset (hb_subset_context_t *c,
+ Iterator coverage,
+ unsigned class_count,
+ const hb_map_t *klass_mapping) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+
+ auto *out = c->serializer->start_embed (this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ for (const auto _ : + hb_zip (coverage, *this)
+ | hb_filter (glyphset, hb_first))
+ {
+ auto *matrix = out->serialize_append (c->serializer);
+ if (unlikely (!matrix)) return_trace (false);
+
+ const LigatureAttach& src = (this + _.second);
+ auto indexes =
+ + hb_range (src.rows * class_count)
+ | hb_filter ([=] (unsigned index) { return klass_mapping->has (index % class_count); })
+ ;
+ matrix->serialize_subset (c,
+ _.second,
+ this,
+ src.rows,
+ indexes);
+ }
+ return_trace (this->len);
+ }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_LIGATUREARRAY_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/MarkArray.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkArray.hh
new file mode 100644
index 0000000000..ff43ffb8c5
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkArray.hh
@@ -0,0 +1,128 @@
+#ifndef OT_LAYOUT_GPOS_MARKARRAY_HH
+#define OT_LAYOUT_GPOS_MARKARRAY_HH
+
+#include "AnchorMatrix.hh"
+#include "MarkRecord.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct MarkArray : Array16Of<MarkRecord> /* Array of MarkRecords--in Coverage order */
+{
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (Array16Of<MarkRecord>::sanitize (c, this));
+ }
+
+ bool apply (hb_ot_apply_context_t *c,
+ unsigned int mark_index, unsigned int glyph_index,
+ const AnchorMatrix &anchors, unsigned int class_count,
+ unsigned int glyph_pos) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ const MarkRecord &record = Array16Of<MarkRecord>::operator[](mark_index);
+ unsigned int mark_class = record.klass;
+
+ const Anchor& mark_anchor = this + record.markAnchor;
+ bool found;
+ const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
+ /* If this subtable doesn't have an anchor for this base and this class,
+ * return false such that the subsequent subtables have a chance at it. */
+ if (unlikely (!found)) return_trace (false);
+
+ float mark_x, mark_y, base_x, base_y;
+
+ buffer->unsafe_to_break (glyph_pos, buffer->idx + 1);
+ mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
+ glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "attaching mark glyph at %u to glyph at %u",
+ c->buffer->idx, glyph_pos);
+ }
+
+ hb_glyph_position_t &o = buffer->cur_pos();
+ o.x_offset = roundf (base_x - mark_x);
+ o.y_offset = roundf (base_y - mark_y);
+ o.attach_type() = ATTACH_TYPE_MARK;
+ o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
+ buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "attached mark glyph at %u to glyph at %u",
+ c->buffer->idx, glyph_pos);
+ }
+
+ buffer->idx++;
+ return_trace (true);
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ bool subset (hb_subset_context_t *c,
+ Iterator coverage,
+ const hb_map_t *klass_mapping) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+
+ auto* out = c->serializer->start_embed (this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ auto mark_iter =
+ + hb_zip (coverage, this->iter ())
+ | hb_filter (glyphset, hb_first)
+ | hb_map (hb_second)
+ ;
+
+ unsigned new_length = 0;
+ for (const auto& mark_record : mark_iter) {
+ if (unlikely (!mark_record.subset (c, this, klass_mapping)))
+ return_trace (false);
+ new_length++;
+ }
+
+ if (unlikely (!c->serializer->check_assign (out->len, new_length,
+ HB_SERIALIZE_ERROR_ARRAY_OVERFLOW)))
+ return_trace (false);
+
+ return_trace (true);
+ }
+};
+
+HB_INTERNAL inline
+void Markclass_closure_and_remap_indexes (const Coverage &mark_coverage,
+ const MarkArray &mark_array,
+ const hb_set_t &glyphset,
+ hb_map_t* klass_mapping /* INOUT */)
+{
+ hb_set_t orig_classes;
+
+ + hb_zip (mark_coverage, mark_array)
+ | hb_filter (glyphset, hb_first)
+ | hb_map (hb_second)
+ | hb_map (&MarkRecord::get_class)
+ | hb_sink (orig_classes)
+ ;
+
+ unsigned idx = 0;
+ for (auto klass : orig_classes.iter ())
+ {
+ if (klass_mapping->has (klass)) continue;
+ klass_mapping->set (klass, idx);
+ idx++;
+ }
+}
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_MARKARRAY_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh
new file mode 100644
index 0000000000..cd2fc7ccfd
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkBasePos.hh
@@ -0,0 +1,41 @@
+#ifndef OT_LAYOUT_GPOS_MARKBASEPOS_HH
+#define OT_LAYOUT_GPOS_MARKBASEPOS_HH
+
+#include "MarkBasePosFormat1.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct MarkBasePos
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ MarkBasePosFormat1_2<SmallTypes> format1;
+#ifndef HB_NO_BEYOND_64K
+ MarkBasePosFormat1_2<MediumTypes> format2;
+#endif
+ } u;
+
+ public:
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+#ifndef HB_NO_BEYOND_64K
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+#endif
+ default:return_trace (c->default_return_value ());
+ }
+ }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_MARKBASEPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/MarkBasePosFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkBasePosFormat1.hh
new file mode 100644
index 0000000000..eb4712049b
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkBasePosFormat1.hh
@@ -0,0 +1,244 @@
+#ifndef OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH
+#define OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH
+
+#include "MarkArray.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+typedef AnchorMatrix BaseArray; /* base-major--
+ * in order of BaseCoverage Index--,
+ * mark-minor--
+ * ordered by class--zero-based. */
+
+template <typename Types>
+struct MarkBasePosFormat1_2
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ typename Types::template OffsetTo<Coverage>
+ markCoverage; /* Offset to MarkCoverage table--from
+ * beginning of MarkBasePos subtable */
+ typename Types::template OffsetTo<Coverage>
+ baseCoverage; /* Offset to BaseCoverage table--from
+ * beginning of MarkBasePos subtable */
+ HBUINT16 classCount; /* Number of classes defined for marks */
+ typename Types::template OffsetTo<MarkArray>
+ markArray; /* Offset to MarkArray table--from
+ * beginning of MarkBasePos subtable */
+ typename Types::template OffsetTo<BaseArray>
+ baseArray; /* Offset to BaseArray table--from
+ * beginning of MarkBasePos subtable */
+
+ public:
+ DEFINE_SIZE_STATIC (4 + 4 * Types::size);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ markCoverage.sanitize (c, this) &&
+ baseCoverage.sanitize (c, this) &&
+ markArray.sanitize (c, this) &&
+ baseArray.sanitize (c, this, (unsigned int) classCount));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ return (this+markCoverage).intersects (glyphs) &&
+ (this+baseCoverage).intersects (glyphs);
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ + hb_zip (this+markCoverage, this+markArray)
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
+ ;
+
+ hb_map_t klass_mapping;
+ Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
+
+ unsigned basecount = (this+baseArray).rows;
+ auto base_iter =
+ + hb_zip (this+baseCoverage, hb_range (basecount))
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ ;
+
+ hb_sorted_vector_t<unsigned> base_indexes;
+ for (const unsigned row : base_iter)
+ {
+ + hb_range ((unsigned) classCount)
+ | hb_filter (klass_mapping)
+ | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
+ | hb_sink (base_indexes)
+ ;
+ }
+ (this+baseArray).collect_variation_indices (c, base_indexes.iter ());
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
+ if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
+ }
+
+ const Coverage &get_coverage () const { return this+markCoverage; }
+
+ static inline bool accept (hb_buffer_t *buffer, unsigned idx)
+ {
+ /* We only want to attach to the first of a MultipleSubst sequence.
+ * https://github.com/harfbuzz/harfbuzz/issues/740
+ * Reject others...
+ * ...but stop if we find a mark in the MultipleSubst sequence:
+ * https://github.com/harfbuzz/harfbuzz/issues/1020 */
+ return !_hb_glyph_info_multiplied (&buffer->info[idx]) ||
+ 0 == _hb_glyph_info_get_lig_comp (&buffer->info[idx]) ||
+ (idx == 0 ||
+ _hb_glyph_info_is_mark (&buffer->info[idx - 1]) ||
+ !_hb_glyph_info_multiplied (&buffer->info[idx - 1]) ||
+ _hb_glyph_info_get_lig_id (&buffer->info[idx]) !=
+ _hb_glyph_info_get_lig_id (&buffer->info[idx - 1]) ||
+ _hb_glyph_info_get_lig_comp (&buffer->info[idx]) !=
+ _hb_glyph_info_get_lig_comp (&buffer->info[idx - 1]) + 1
+ );
+ }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
+ if (likely (mark_index == NOT_COVERED)) return_trace (false);
+
+ /* Now we search backwards for a non-mark glyph.
+ * We don't use skippy_iter.prev() to avoid O(n^2) behavior. */
+
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
+ skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
+
+ if (c->last_base_until > buffer->idx)
+ {
+ c->last_base_until = 0;
+ c->last_base = -1;
+ }
+ unsigned j;
+ for (j = buffer->idx; j > c->last_base_until; j--)
+ {
+ auto match = skippy_iter.match (buffer->info[j - 1]);
+ if (match == skippy_iter.MATCH)
+ {
+ // https://github.com/harfbuzz/harfbuzz/issues/4124
+ if (!accept (buffer, j - 1) &&
+ NOT_COVERED == (this+baseCoverage).get_coverage (buffer->info[j - 1].codepoint))
+ match = skippy_iter.SKIP;
+ }
+ if (match == skippy_iter.MATCH)
+ {
+ c->last_base = (signed) j - 1;
+ break;
+ }
+ }
+ c->last_base_until = buffer->idx;
+ if (c->last_base == -1)
+ {
+ buffer->unsafe_to_concat_from_outbuffer (0, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ unsigned idx = (unsigned) c->last_base;
+
+ /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
+ //if (!_hb_glyph_info_is_base_glyph (&buffer->info[idx])) { return_trace (false); }
+
+ unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[idx].codepoint);
+ if (base_index == NOT_COVERED)
+ {
+ buffer->unsafe_to_concat_from_outbuffer (idx, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, idx));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ hb_map_t klass_mapping;
+ Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
+
+ if (!klass_mapping.get_population ()) return_trace (false);
+ out->classCount = klass_mapping.get_population ();
+
+ auto mark_iter =
+ + hb_zip (this+markCoverage, this+markArray)
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ + mark_iter
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ if (!out->markCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
+ return_trace (false);
+
+ out->markArray.serialize_subset (c, markArray, this,
+ (this+markCoverage).iter (),
+ &klass_mapping);
+
+ unsigned basecount = (this+baseArray).rows;
+ auto base_iter =
+ + hb_zip (this+baseCoverage, hb_range (basecount))
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ new_coverage.reset ();
+ + base_iter
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ if (!out->baseCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
+ return_trace (false);
+
+ hb_sorted_vector_t<unsigned> base_indexes;
+ for (const unsigned row : + base_iter
+ | hb_map (hb_second))
+ {
+ + hb_range ((unsigned) classCount)
+ | hb_filter (klass_mapping)
+ | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
+ | hb_sink (base_indexes)
+ ;
+ }
+
+ out->baseArray.serialize_subset (c, baseArray, this,
+ base_iter.len (),
+ base_indexes.iter ());
+
+ return_trace (true);
+ }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_MARKBASEPOSFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh
new file mode 100644
index 0000000000..739c325411
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkLigPos.hh
@@ -0,0 +1,41 @@
+#ifndef OT_LAYOUT_GPOS_MARKLIGPOS_HH
+#define OT_LAYOUT_GPOS_MARKLIGPOS_HH
+
+#include "MarkLigPosFormat1.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct MarkLigPos
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ MarkLigPosFormat1_2<SmallTypes> format1;
+#ifndef HB_NO_BEYOND_64K
+ MarkLigPosFormat1_2<MediumTypes> format2;
+#endif
+ } u;
+
+ public:
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+#ifndef HB_NO_BEYOND_64K
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+#endif
+ default:return_trace (c->default_return_value ());
+ }
+ }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_MARKLIGPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/MarkLigPosFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkLigPosFormat1.hh
new file mode 100644
index 0000000000..92e83a0e99
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkLigPosFormat1.hh
@@ -0,0 +1,223 @@
+#ifndef OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH
+#define OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH
+
+#include "LigatureArray.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+
+template <typename Types>
+struct MarkLigPosFormat1_2
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ typename Types::template OffsetTo<Coverage>
+ markCoverage; /* Offset to Mark Coverage table--from
+ * beginning of MarkLigPos subtable */
+ typename Types::template OffsetTo<Coverage>
+ ligatureCoverage; /* Offset to Ligature Coverage
+ * table--from beginning of MarkLigPos
+ * subtable */
+ HBUINT16 classCount; /* Number of defined mark classes */
+ typename Types::template OffsetTo<MarkArray>
+ markArray; /* Offset to MarkArray table--from
+ * beginning of MarkLigPos subtable */
+ typename Types::template OffsetTo<LigatureArray>
+ ligatureArray; /* Offset to LigatureArray table--from
+ * beginning of MarkLigPos subtable */
+ public:
+ DEFINE_SIZE_STATIC (4 + 4 * Types::size);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ markCoverage.sanitize (c, this) &&
+ ligatureCoverage.sanitize (c, this) &&
+ markArray.sanitize (c, this) &&
+ ligatureArray.sanitize (c, this, (unsigned int) classCount));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ return (this+markCoverage).intersects (glyphs) &&
+ (this+ligatureCoverage).intersects (glyphs);
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ + hb_zip (this+markCoverage, this+markArray)
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
+ ;
+
+ hb_map_t klass_mapping;
+ Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
+
+ unsigned ligcount = (this+ligatureArray).len;
+ auto lig_iter =
+ + hb_zip (this+ligatureCoverage, hb_range (ligcount))
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ ;
+
+ const LigatureArray& lig_array = this+ligatureArray;
+ for (const unsigned i : lig_iter)
+ {
+ hb_sorted_vector_t<unsigned> lig_indexes;
+ unsigned row_count = lig_array[i].rows;
+ for (unsigned row : + hb_range (row_count))
+ {
+ + hb_range ((unsigned) classCount)
+ | hb_filter (klass_mapping)
+ | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
+ | hb_sink (lig_indexes)
+ ;
+ }
+
+ lig_array[i].collect_variation_indices (c, lig_indexes.iter ());
+ }
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
+ if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return;
+ }
+
+ const Coverage &get_coverage () const { return this+markCoverage; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
+ if (likely (mark_index == NOT_COVERED)) return_trace (false);
+
+ /* Now we search backwards for a non-mark glyph */
+
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
+ skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
+
+ if (c->last_base_until > buffer->idx)
+ {
+ c->last_base_until = 0;
+ c->last_base = -1;
+ }
+ unsigned j;
+ for (j = buffer->idx; j > c->last_base_until; j--)
+ {
+ auto match = skippy_iter.match (buffer->info[j - 1]);
+ if (match == skippy_iter.MATCH)
+ {
+ c->last_base = (signed) j - 1;
+ break;
+ }
+ }
+ c->last_base_until = buffer->idx;
+ if (c->last_base == -1)
+ {
+ buffer->unsafe_to_concat_from_outbuffer (0, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ unsigned idx = (unsigned) c->last_base;
+
+ /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
+ //if (!_hb_glyph_info_is_ligature (&buffer->info[idx])) { return_trace (false); }
+
+ unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info[idx].codepoint);
+ if (lig_index == NOT_COVERED)
+ {
+ buffer->unsafe_to_concat_from_outbuffer (idx, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ const LigatureArray& lig_array = this+ligatureArray;
+ const LigatureAttach& lig_attach = lig_array[lig_index];
+
+ /* Find component to attach to */
+ unsigned int comp_count = lig_attach.rows;
+ if (unlikely (!comp_count))
+ {
+ buffer->unsafe_to_concat_from_outbuffer (idx, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ /* We must now check whether the ligature ID of the current mark glyph
+ * is identical to the ligature ID of the found ligature. If yes, we
+ * can directly use the component index. If not, we attach the mark
+ * glyph to the last component of the ligature. */
+ unsigned int comp_index;
+ unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[idx]);
+ unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
+ unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
+ if (lig_id && lig_id == mark_id && mark_comp > 0)
+ comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
+ else
+ comp_index = comp_count - 1;
+
+ return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, idx));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ hb_map_t klass_mapping;
+ Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
+
+ if (!klass_mapping.get_population ()) return_trace (false);
+ out->classCount = klass_mapping.get_population ();
+
+ auto mark_iter =
+ + hb_zip (this+markCoverage, this+markArray)
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ auto new_mark_coverage =
+ + mark_iter
+ | hb_map_retains_sorting (hb_first)
+ | hb_map_retains_sorting (glyph_map)
+ ;
+
+ if (!out->markCoverage.serialize_serialize (c->serializer, new_mark_coverage))
+ return_trace (false);
+
+ out->markArray.serialize_subset (c, markArray, this,
+ (this+markCoverage).iter (),
+ &klass_mapping);
+
+ auto new_ligature_coverage =
+ + hb_iter (this + ligatureCoverage)
+ | hb_filter (glyphset)
+ | hb_map_retains_sorting (glyph_map)
+ ;
+
+ if (!out->ligatureCoverage.serialize_serialize (c->serializer, new_ligature_coverage))
+ return_trace (false);
+
+ out->ligatureArray.serialize_subset (c, ligatureArray, this,
+ hb_iter (this+ligatureCoverage), classCount, &klass_mapping);
+
+ return_trace (true);
+ }
+
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_MARKLIGPOSFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh
new file mode 100644
index 0000000000..cddd2a3d50
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkMarkPos.hh
@@ -0,0 +1,42 @@
+#ifndef OT_LAYOUT_GPOS_MARKMARKPOS_HH
+#define OT_LAYOUT_GPOS_MARKMARKPOS_HH
+
+#include "MarkMarkPosFormat1.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct MarkMarkPos
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ MarkMarkPosFormat1_2<SmallTypes> format1;
+#ifndef HB_NO_BEYOND_64K
+ MarkMarkPosFormat1_2<MediumTypes> format2;
+#endif
+ } u;
+
+ public:
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+#ifndef HB_NO_BEYOND_64K
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+#endif
+ default:return_trace (c->default_return_value ());
+ }
+ }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_MARKMARKPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/MarkMarkPosFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkMarkPosFormat1.hh
new file mode 100644
index 0000000000..9dae5ce5da
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkMarkPosFormat1.hh
@@ -0,0 +1,228 @@
+#ifndef OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH
+#define OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH
+
+#include "MarkMarkPosFormat1.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+typedef AnchorMatrix Mark2Array; /* mark2-major--
+ * in order of Mark2Coverage Index--,
+ * mark1-minor--
+ * ordered by class--zero-based. */
+
+template <typename Types>
+struct MarkMarkPosFormat1_2
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ typename Types::template OffsetTo<Coverage>
+ mark1Coverage; /* Offset to Combining Mark1 Coverage
+ * table--from beginning of MarkMarkPos
+ * subtable */
+ typename Types::template OffsetTo<Coverage>
+ mark2Coverage; /* Offset to Combining Mark2 Coverage
+ * table--from beginning of MarkMarkPos
+ * subtable */
+ HBUINT16 classCount; /* Number of defined mark classes */
+ typename Types::template OffsetTo<MarkArray>
+ mark1Array; /* Offset to Mark1Array table--from
+ * beginning of MarkMarkPos subtable */
+ typename Types::template OffsetTo<Mark2Array>
+ mark2Array; /* Offset to Mark2Array table--from
+ * beginning of MarkMarkPos subtable */
+ public:
+ DEFINE_SIZE_STATIC (4 + 4 * Types::size);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ mark1Coverage.sanitize (c, this) &&
+ mark2Coverage.sanitize (c, this) &&
+ mark1Array.sanitize (c, this) &&
+ mark2Array.sanitize (c, this, (unsigned int) classCount));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ return (this+mark1Coverage).intersects (glyphs) &&
+ (this+mark2Coverage).intersects (glyphs);
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ + hb_zip (this+mark1Coverage, this+mark1Array)
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); })
+ ;
+
+ hb_map_t klass_mapping;
+ Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping);
+
+ unsigned mark2_count = (this+mark2Array).rows;
+ auto mark2_iter =
+ + hb_zip (this+mark2Coverage, hb_range (mark2_count))
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ ;
+
+ hb_sorted_vector_t<unsigned> mark2_indexes;
+ for (const unsigned row : mark2_iter)
+ {
+ + hb_range ((unsigned) classCount)
+ | hb_filter (klass_mapping)
+ | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
+ | hb_sink (mark2_indexes)
+ ;
+ }
+ (this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ());
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return;
+ if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return;
+ }
+
+ const Coverage &get_coverage () const { return this+mark1Coverage; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint);
+ if (likely (mark1_index == NOT_COVERED)) return_trace (false);
+
+ /* now we search backwards for a suitable mark glyph until a non-mark glyph */
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
+ skippy_iter.reset_fast (buffer->idx, 1);
+ skippy_iter.set_lookup_props (c->lookup_props & ~(uint32_t)LookupFlag::IgnoreFlags);
+ unsigned unsafe_from;
+ if (unlikely (!skippy_iter.prev (&unsafe_from)))
+ {
+ buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ if (likely (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])))
+ {
+ buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ unsigned int j = skippy_iter.idx;
+
+ unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur());
+ unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]);
+ unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur());
+ unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]);
+
+ if (likely (id1 == id2))
+ {
+ if (id1 == 0) /* Marks belonging to the same base. */
+ goto good;
+ else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
+ goto good;
+ }
+ else
+ {
+ /* If ligature ids don't match, it may be the case that one of the marks
+ * itself is a ligature. In which case match. */
+ if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
+ goto good;
+ }
+
+ /* Didn't match. */
+ buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
+ return_trace (false);
+
+ good:
+ unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint);
+ if (mark2_index == NOT_COVERED)
+ {
+ buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
+ return_trace (false);
+ }
+
+ return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ hb_map_t klass_mapping;
+ Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping);
+
+ if (!klass_mapping.get_population ()) return_trace (false);
+ out->classCount = klass_mapping.get_population ();
+
+ auto mark1_iter =
+ + hb_zip (this+mark1Coverage, this+mark1Array)
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ + mark1_iter
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ if (!out->mark1Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
+ return_trace (false);
+
+ out->mark1Array.serialize_subset (c, mark1Array, this,
+ (this+mark1Coverage).iter (),
+ &klass_mapping);
+
+ unsigned mark2count = (this+mark2Array).rows;
+ auto mark2_iter =
+ + hb_zip (this+mark2Coverage, hb_range (mark2count))
+ | hb_filter (glyphset, hb_first)
+ ;
+
+ new_coverage.reset ();
+ + mark2_iter
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ if (!out->mark2Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
+ return_trace (false);
+
+ hb_sorted_vector_t<unsigned> mark2_indexes;
+ for (const unsigned row : + mark2_iter
+ | hb_map (hb_second))
+ {
+ + hb_range ((unsigned) classCount)
+ | hb_filter (klass_mapping)
+ | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
+ | hb_sink (mark2_indexes)
+ ;
+ }
+
+ out->mark2Array.serialize_subset (c, mark2Array, this, mark2_iter.len (), mark2_indexes.iter ());
+
+ return_trace (true);
+ }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_MARKMARKPOSFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/MarkRecord.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkRecord.hh
new file mode 100644
index 0000000000..a7d489d2a5
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/MarkRecord.hh
@@ -0,0 +1,52 @@
+#ifndef OT_LAYOUT_GPOS_MARKRECORD_HH
+#define OT_LAYOUT_GPOS_MARKRECORD_HH
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct MarkRecord
+{
+ friend struct MarkArray;
+
+ public:
+ HBUINT16 klass; /* Class defined for this mark */
+ Offset16To<Anchor>
+ markAnchor; /* Offset to Anchor table--from
+ * beginning of MarkArray table */
+ public:
+ DEFINE_SIZE_STATIC (4);
+
+ unsigned get_class () const { return (unsigned) klass; }
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
+ }
+
+ MarkRecord *subset (hb_subset_context_t *c,
+ const void *src_base,
+ const hb_map_t *klass_mapping) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->embed (this);
+ if (unlikely (!out)) return_trace (nullptr);
+
+ out->klass = klass_mapping->get (klass);
+ out->markAnchor.serialize_subset (c, markAnchor, src_base);
+ return_trace (out);
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c,
+ const void *src_base) const
+ {
+ (src_base+markAnchor).collect_variation_indices (c);
+ }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_MARKRECORD_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/PairPos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/PairPos.hh
new file mode 100644
index 0000000000..c13d4f4894
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/PairPos.hh
@@ -0,0 +1,46 @@
+#ifndef OT_LAYOUT_GPOS_PAIRPOS_HH
+#define OT_LAYOUT_GPOS_PAIRPOS_HH
+
+#include "PairPosFormat1.hh"
+#include "PairPosFormat2.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct PairPos
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ PairPosFormat1_3<SmallTypes> format1;
+ PairPosFormat2_4<SmallTypes> format2;
+#ifndef HB_NO_BEYOND_64K
+ PairPosFormat1_3<MediumTypes> format3;
+ PairPosFormat2_4<MediumTypes> format4;
+#endif
+ } u;
+
+ public:
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+#ifndef HB_NO_BEYOND_64K
+ case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
+ case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
+#endif
+ default:return_trace (c->default_return_value ());
+ }
+ }
+};
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_PAIRPOS_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh
new file mode 100644
index 0000000000..714b4bec72
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/PairPosFormat1.hh
@@ -0,0 +1,217 @@
+#ifndef OT_LAYOUT_GPOS_PAIRPOSFORMAT1_HH
+#define OT_LAYOUT_GPOS_PAIRPOSFORMAT1_HH
+
+#include "PairSet.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+
+template <typename Types>
+struct PairPosFormat1_3
+{
+ using PairSet = GPOS_impl::PairSet<Types>;
+ using PairValueRecord = GPOS_impl::PairValueRecord<Types>;
+
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ typename Types::template OffsetTo<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of subtable */
+ ValueFormat valueFormat[2]; /* [0] Defines the types of data in
+ * ValueRecord1--for the first glyph
+ * in the pair--may be zero (0) */
+ /* [1] Defines the types of data in
+ * ValueRecord2--for the second glyph
+ * in the pair--may be zero (0) */
+ Array16Of<typename Types::template OffsetTo<PairSet>>
+ pairSet; /* Array of PairSet tables
+ * ordered by Coverage Index */
+ public:
+ DEFINE_SIZE_ARRAY (8 + Types::size, pairSet);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+
+ if (!c->check_struct (this)) return_trace (false);
+
+ unsigned int len1 = valueFormat[0].get_len ();
+ unsigned int len2 = valueFormat[1].get_len ();
+ typename PairSet::sanitize_closure_t closure =
+ {
+ valueFormat,
+ len1,
+ PairSet::get_size (len1, len2)
+ };
+
+ return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ auto &cov = this+coverage;
+
+ if (pairSet.len > glyphs->get_population () * hb_bit_storage ((unsigned) pairSet.len) / 4)
+ {
+ for (hb_codepoint_t g : glyphs->iter())
+ {
+ unsigned i = cov.get_coverage (g);
+ if ((this+pairSet[i]).intersects (glyphs, valueFormat))
+ return true;
+ }
+ return false;
+ }
+
+ return
+ + hb_zip (cov, pairSet)
+ | hb_filter (*glyphs, hb_first)
+ | hb_map (hb_second)
+ | hb_map ([glyphs, this] (const typename Types::template OffsetTo<PairSet> &_)
+ { return (this+_).intersects (glyphs, valueFormat); })
+ | hb_any
+ ;
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ if ((!valueFormat[0].has_device ()) && (!valueFormat[1].has_device ())) return;
+
+ auto it =
+ + hb_zip (this+coverage, pairSet)
+ | hb_filter (c->glyph_set, hb_first)
+ | hb_map (hb_second)
+ ;
+
+ if (!it) return;
+ + it
+ | hb_map (hb_add (this))
+ | hb_apply ([&] (const PairSet& _) { _.collect_variation_indices (c, valueFormat); })
+ ;
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+ unsigned int count = pairSet.len;
+ for (unsigned int i = 0; i < count; i++)
+ (this+pairSet[i]).collect_glyphs (c, valueFormat);
+ }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
+ skippy_iter.reset_fast (buffer->idx, 1);
+ unsigned unsafe_to;
+ if (unlikely (!skippy_iter.next (&unsafe_to)))
+ {
+ buffer->unsafe_to_concat (buffer->idx, unsafe_to);
+ return_trace (false);
+ }
+
+ return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+ out->valueFormat[0] = valueFormat[0];
+ out->valueFormat[1] = valueFormat[1];
+ if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
+ {
+ hb_pair_t<unsigned, unsigned> newFormats = compute_effective_value_formats (glyphset);
+ out->valueFormat[0] = newFormats.first;
+ out->valueFormat[1] = newFormats.second;
+ }
+
+ if (c->plan->all_axes_pinned)
+ {
+ out->valueFormat[0] = out->valueFormat[0].drop_device_table_flags ();
+ out->valueFormat[1] = out->valueFormat[1].drop_device_table_flags ();
+ }
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+
+ + hb_zip (this+coverage, pairSet)
+ | hb_filter (glyphset, hb_first)
+ | hb_filter ([this, c, out] (const typename Types::template OffsetTo<PairSet>& _)
+ {
+ auto snap = c->serializer->snapshot ();
+ auto *o = out->pairSet.serialize_append (c->serializer);
+ if (unlikely (!o)) return false;
+ bool ret = o->serialize_subset (c, _, this, valueFormat, out->valueFormat);
+ if (!ret)
+ {
+ out->pairSet.pop ();
+ c->serializer->revert (snap);
+ }
+ return ret;
+ },
+ hb_second)
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+
+ out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
+
+ return_trace (bool (new_coverage));
+ }
+
+
+ hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset) const
+ {
+ unsigned record_size = PairSet::get_size (valueFormat);
+
+ unsigned format1 = 0;
+ unsigned format2 = 0;
+ for (const auto & _ :
+ + hb_zip (this+coverage, pairSet)
+ | hb_filter (glyphset, hb_first)
+ | hb_map (hb_second)
+ )
+ {
+ const PairSet& set = (this + _);
+ const PairValueRecord *record = &set.firstPairValueRecord;
+
+ unsigned count = set.len;
+ for (unsigned i = 0; i < count; i++)
+ {
+ if (record->intersects (glyphset))
+ {
+ format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 ());
+ format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]));
+ }
+ record = &StructAtOffset<const PairValueRecord> (record, record_size);
+ }
+
+ if (format1 == valueFormat[0] && format2 == valueFormat[1])
+ break;
+ }
+
+ return hb_pair (format1, format2);
+ }
+};
+
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_PAIRPOSFORMAT1_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh
new file mode 100644
index 0000000000..31329dfcb5
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/PairPosFormat2.hh
@@ -0,0 +1,356 @@
+#ifndef OT_LAYOUT_GPOS_PAIRPOSFORMAT2_HH
+#define OT_LAYOUT_GPOS_PAIRPOSFORMAT2_HH
+
+#include "ValueFormat.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+template <typename Types>
+struct PairPosFormat2_4
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 2 */
+ typename Types::template OffsetTo<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of subtable */
+ ValueFormat valueFormat1; /* ValueRecord definition--for the
+ * first glyph of the pair--may be zero
+ * (0) */
+ ValueFormat valueFormat2; /* ValueRecord definition--for the
+ * second glyph of the pair--may be
+ * zero (0) */
+ typename Types::template OffsetTo<ClassDef>
+ classDef1; /* Offset to ClassDef table--from
+ * beginning of PairPos subtable--for
+ * the first glyph of the pair */
+ typename Types::template OffsetTo<ClassDef>
+ classDef2; /* Offset to ClassDef table--from
+ * beginning of PairPos subtable--for
+ * the second glyph of the pair */
+ HBUINT16 class1Count; /* Number of classes in ClassDef1
+ * table--includes Class0 */
+ HBUINT16 class2Count; /* Number of classes in ClassDef2
+ * table--includes Class0 */
+ ValueRecord values; /* Matrix of value pairs:
+ * class1-major, class2-minor,
+ * Each entry has value1 and value2 */
+ public:
+ DEFINE_SIZE_ARRAY (10 + 3 * Types::size, values);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ if (!(c->check_struct (this)
+ && coverage.sanitize (c, this)
+ && classDef1.sanitize (c, this)
+ && classDef2.sanitize (c, this))) return_trace (false);
+
+ unsigned int len1 = valueFormat1.get_len ();
+ unsigned int len2 = valueFormat2.get_len ();
+ unsigned int stride = HBUINT16::static_size * (len1 + len2);
+ unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
+ return_trace (c->check_range ((const void *) values,
+ count,
+ stride) &&
+ valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
+ valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ return (this+coverage).intersects (glyphs) &&
+ (this+classDef2).intersects (glyphs);
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ if (!intersects (c->glyph_set)) return;
+ if ((!valueFormat1.has_device ()) && (!valueFormat2.has_device ())) return;
+
+ hb_set_t klass1_glyphs, klass2_glyphs;
+ if (!(this+classDef1).collect_coverage (&klass1_glyphs)) return;
+ if (!(this+classDef2).collect_coverage (&klass2_glyphs)) return;
+
+ hb_set_t class1_set, class2_set;
+ for (const unsigned cp : + c->glyph_set->iter () | hb_filter (this + coverage))
+ {
+ if (!klass1_glyphs.has (cp)) class1_set.add (0);
+ else
+ {
+ unsigned klass1 = (this+classDef1).get (cp);
+ class1_set.add (klass1);
+ }
+ }
+
+ class2_set.add (0);
+ for (const unsigned cp : + c->glyph_set->iter () | hb_filter (klass2_glyphs))
+ {
+ unsigned klass2 = (this+classDef2).get (cp);
+ class2_set.add (klass2);
+ }
+
+ if (class1_set.is_empty ()
+ || class2_set.is_empty ()
+ || (class2_set.get_population() == 1 && class2_set.has(0)))
+ return;
+
+ unsigned len1 = valueFormat1.get_len ();
+ unsigned len2 = valueFormat2.get_len ();
+ const hb_array_t<const Value> values_array = values.as_array ((unsigned)class1Count * (unsigned) class2Count * (len1 + len2));
+ for (const unsigned class1_idx : class1_set.iter ())
+ {
+ for (const unsigned class2_idx : class2_set.iter ())
+ {
+ unsigned start_offset = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
+ if (valueFormat1.has_device ())
+ valueFormat1.collect_variation_indices (c, this, values_array.sub_array (start_offset, len1));
+
+ if (valueFormat2.has_device ())
+ valueFormat2.collect_variation_indices (c, this, values_array.sub_array (start_offset+len1, len2));
+ }
+ }
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+ if (unlikely (!(this+classDef2).collect_coverage (c->input))) return;
+ }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
+ skippy_iter.reset_fast (buffer->idx, 1);
+ unsigned unsafe_to;
+ if (unlikely (!skippy_iter.next (&unsafe_to)))
+ {
+ buffer->unsafe_to_concat (buffer->idx, unsafe_to);
+ return_trace (false);
+ }
+
+ unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
+ if (!klass2)
+ {
+ buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
+ return_trace (false);
+ }
+
+ unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
+ if (unlikely (klass1 >= class1Count || klass2 >= class2Count))
+ {
+ buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
+ return_trace (false);
+ }
+
+ unsigned int len1 = valueFormat1.get_len ();
+ unsigned int len2 = valueFormat2.get_len ();
+ unsigned int record_len = len1 + len2;
+
+ const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
+
+ bool applied_first = false, applied_second = false;
+
+
+ /* Isolate simple kerning and apply it half to each side.
+ * Results in better cursor positinoing / underline drawing.
+ *
+ * Disabled, because causes issues... :-(
+ * https://github.com/harfbuzz/harfbuzz/issues/3408
+ * https://github.com/harfbuzz/harfbuzz/pull/3235#issuecomment-1029814978
+ */
+#ifndef HB_SPLIT_KERN
+ if (false)
+#endif
+ {
+ if (!len2)
+ {
+ const hb_direction_t dir = buffer->props.direction;
+ const bool horizontal = HB_DIRECTION_IS_HORIZONTAL (dir);
+ const bool backward = HB_DIRECTION_IS_BACKWARD (dir);
+ unsigned mask = horizontal ? ValueFormat::xAdvance : ValueFormat::yAdvance;
+ if (backward)
+ mask |= mask >> 2; /* Add eg. xPlacement in RTL. */
+ /* Add Devices. */
+ mask |= mask << 4;
+
+ if (valueFormat1 & ~mask)
+ goto bail;
+
+ /* Is simple kern. Apply value on an empty position slot,
+ * then split it between sides. */
+
+ hb_glyph_position_t pos{};
+ if (valueFormat1.apply_value (c, this, v, pos))
+ {
+ hb_position_t *src = &pos.x_advance;
+ hb_position_t *dst1 = &buffer->cur_pos().x_advance;
+ hb_position_t *dst2 = &buffer->pos[skippy_iter.idx].x_advance;
+ unsigned i = horizontal ? 0 : 1;
+
+ hb_position_t kern = src[i];
+ hb_position_t kern1 = kern >> 1;
+ hb_position_t kern2 = kern - kern1;
+
+ if (!backward)
+ {
+ dst1[i] += kern1;
+ dst2[i] += kern2;
+ dst2[i + 2] += kern2;
+ }
+ else
+ {
+ dst1[i] += kern1;
+ dst1[i + 2] += src[i + 2] - kern2;
+ dst2[i] += kern2;
+ }
+
+ applied_first = applied_second = kern != 0;
+ goto success;
+ }
+ goto boring;
+ }
+ }
+ bail:
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "try kerning glyphs at %u,%u",
+ c->buffer->idx, skippy_iter.idx);
+ }
+
+ applied_first = len1 && valueFormat1.apply_value (c, this, v, buffer->cur_pos());
+ applied_second = len2 && valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]);
+
+ if (applied_first || applied_second)
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "kerned glyphs at %u,%u",
+ c->buffer->idx, skippy_iter.idx);
+ }
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "tried kerning glyphs at %u,%u",
+ c->buffer->idx, skippy_iter.idx);
+ }
+
+ success:
+ if (applied_first || applied_second)
+ buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
+ else
+ boring:
+ buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
+
+ if (len2)
+ {
+ skippy_iter.idx++;
+ // https://github.com/harfbuzz/harfbuzz/issues/3824
+ // https://github.com/harfbuzz/harfbuzz/issues/3888#issuecomment-1326781116
+ buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
+ }
+
+ buffer->idx = skippy_iter.idx;
+
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ hb_map_t klass1_map;
+ out->classDef1.serialize_subset (c, classDef1, this, &klass1_map, true, true, &(this + coverage));
+ out->class1Count = klass1_map.get_population ();
+
+ hb_map_t klass2_map;
+ out->classDef2.serialize_subset (c, classDef2, this, &klass2_map, true, false);
+ out->class2Count = klass2_map.get_population ();
+
+ unsigned len1 = valueFormat1.get_len ();
+ unsigned len2 = valueFormat2.get_len ();
+
+ hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat1, valueFormat2);
+ if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
+ newFormats = compute_effective_value_formats (klass1_map, klass2_map);
+
+ out->valueFormat1 = newFormats.first;
+ out->valueFormat2 = newFormats.second;
+
+ if (c->plan->all_axes_pinned)
+ {
+ out->valueFormat1 = out->valueFormat1.drop_device_table_flags ();
+ out->valueFormat2 = out->valueFormat2.drop_device_table_flags ();
+ }
+
+ for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
+ {
+ for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
+ {
+ unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
+ valueFormat1.copy_values (c->serializer, out->valueFormat1, this, &values[idx], &c->plan->layout_variation_idx_delta_map);
+ valueFormat2.copy_values (c->serializer, out->valueFormat2, this, &values[idx + len1], &c->plan->layout_variation_idx_delta_map);
+ }
+ }
+
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto it =
+ + hb_iter (this+coverage)
+ | hb_filter (glyphset)
+ | hb_map_retains_sorting (glyph_map)
+ ;
+
+ out->coverage.serialize_serialize (c->serializer, it);
+ return_trace (out->class1Count && out->class2Count && bool (it));
+ }
+
+
+ hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_map_t& klass1_map,
+ const hb_map_t& klass2_map) const
+ {
+ unsigned len1 = valueFormat1.get_len ();
+ unsigned len2 = valueFormat2.get_len ();
+ unsigned record_size = len1 + len2;
+
+ unsigned format1 = 0;
+ unsigned format2 = 0;
+
+ for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
+ {
+ for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
+ {
+ unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_size;
+ format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
+ format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
+ }
+
+ if (format1 == valueFormat1 && format2 == valueFormat2)
+ break;
+ }
+
+ return hb_pair (format1, format2);
+ }
+};
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_PAIRPOSFORMAT2_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/PairSet.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/PairSet.hh
new file mode 100644
index 0000000000..9faff49909
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/PairSet.hh
@@ -0,0 +1,207 @@
+#ifndef OT_LAYOUT_GPOS_PAIRSET_HH
+#define OT_LAYOUT_GPOS_PAIRSET_HH
+
+#include "PairValueRecord.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+
+template <typename Types>
+struct PairSet
+{
+ template <typename Types2>
+ friend struct PairPosFormat1_3;
+
+ using PairValueRecord = GPOS_impl::PairValueRecord<Types>;
+
+ protected:
+ HBUINT16 len; /* Number of PairValueRecords */
+ PairValueRecord firstPairValueRecord;
+ /* Array of PairValueRecords--ordered
+ * by GlyphID of the second glyph */
+ public:
+ DEFINE_SIZE_MIN (2);
+
+ static unsigned get_size (unsigned len1, unsigned len2)
+ {
+ return Types::HBGlyphID::static_size + Value::static_size * (len1 + len2);
+ }
+ static unsigned get_size (const ValueFormat valueFormats[2])
+ {
+ unsigned len1 = valueFormats[0].get_len ();
+ unsigned len2 = valueFormats[1].get_len ();
+ return get_size (len1, len2);
+ }
+
+ struct sanitize_closure_t
+ {
+ const ValueFormat *valueFormats;
+ unsigned int len1; /* valueFormats[0].get_len() */
+ unsigned int stride; /* bytes */
+ };
+
+ bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
+ {
+ TRACE_SANITIZE (this);
+ if (!(c->check_struct (this)
+ && c->check_range (&firstPairValueRecord,
+ len,
+ closure->stride))) return_trace (false);
+
+ unsigned int count = len;
+ const PairValueRecord *record = &firstPairValueRecord;
+ return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
+ closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
+ }
+
+ bool intersects (const hb_set_t *glyphs,
+ const ValueFormat *valueFormats) const
+ {
+ unsigned record_size = get_size (valueFormats);
+
+ const PairValueRecord *record = &firstPairValueRecord;
+ unsigned int count = len;
+ for (unsigned int i = 0; i < count; i++)
+ {
+ if (glyphs->has (record->secondGlyph))
+ return true;
+ record = &StructAtOffset<const PairValueRecord> (record, record_size);
+ }
+ return false;
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c,
+ const ValueFormat *valueFormats) const
+ {
+ unsigned record_size = get_size (valueFormats);
+
+ const PairValueRecord *record = &firstPairValueRecord;
+ c->input->add_array (&record->secondGlyph, len, record_size);
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c,
+ const ValueFormat *valueFormats) const
+ {
+ unsigned record_size = get_size (valueFormats);
+
+ const PairValueRecord *record = &firstPairValueRecord;
+ unsigned count = len;
+ for (unsigned i = 0; i < count; i++)
+ {
+ if (c->glyph_set->has (record->secondGlyph))
+ { record->collect_variation_indices (c, valueFormats, this); }
+
+ record = &StructAtOffset<const PairValueRecord> (record, record_size);
+ }
+ }
+
+ bool apply (hb_ot_apply_context_t *c,
+ const ValueFormat *valueFormats,
+ unsigned int pos) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ unsigned int len1 = valueFormats[0].get_len ();
+ unsigned int len2 = valueFormats[1].get_len ();
+ unsigned record_size = get_size (len1, len2);
+
+ const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint,
+ &firstPairValueRecord,
+ len,
+ record_size);
+ if (record)
+ {
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "try kerning glyphs at %u,%u",
+ c->buffer->idx, pos);
+ }
+
+ bool applied_first = len1 && valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos());
+ bool applied_second = len2 && valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]);
+
+ if (applied_first || applied_second)
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "kerned glyphs at %u,%u",
+ c->buffer->idx, pos);
+ }
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "tried kerning glyphs at %u,%u",
+ c->buffer->idx, pos);
+ }
+
+ if (applied_first || applied_second)
+ buffer->unsafe_to_break (buffer->idx, pos + 1);
+
+ if (len2)
+ {
+ pos++;
+ // https://github.com/harfbuzz/harfbuzz/issues/3824
+ // https://github.com/harfbuzz/harfbuzz/issues/3888#issuecomment-1326781116
+ buffer->unsafe_to_break (buffer->idx, pos + 1);
+ }
+
+ buffer->idx = pos;
+ return_trace (true);
+ }
+ buffer->unsafe_to_concat (buffer->idx, pos + 1);
+ return_trace (false);
+ }
+
+ bool subset (hb_subset_context_t *c,
+ const ValueFormat valueFormats[2],
+ const ValueFormat newFormats[2]) const
+ {
+ TRACE_SUBSET (this);
+ auto snap = c->serializer->snapshot ();
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->len = 0;
+
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ unsigned len1 = valueFormats[0].get_len ();
+ unsigned len2 = valueFormats[1].get_len ();
+ unsigned record_size = get_size (len1, len2);
+
+ typename PairValueRecord::context_t context =
+ {
+ this,
+ valueFormats,
+ newFormats,
+ len1,
+ &glyph_map,
+ &c->plan->layout_variation_idx_delta_map
+ };
+
+ const PairValueRecord *record = &firstPairValueRecord;
+ unsigned count = len, num = 0;
+ for (unsigned i = 0; i < count; i++)
+ {
+ if (glyphset.has (record->secondGlyph)
+ && record->subset (c, &context)) num++;
+ record = &StructAtOffset<const PairValueRecord> (record, record_size);
+ }
+
+ out->len = num;
+ if (!num) c->serializer->revert (snap);
+ return_trace (num);
+ }
+};
+
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_PAIRSET_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/PairValueRecord.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/PairValueRecord.hh
new file mode 100644
index 0000000000..3222477764
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/PairValueRecord.hh
@@ -0,0 +1,99 @@
+#ifndef OT_LAYOUT_GPOS_PAIRVALUERECORD_HH
+#define OT_LAYOUT_GPOS_PAIRVALUERECORD_HH
+
+#include "ValueFormat.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+
+template <typename Types>
+struct PairValueRecord
+{
+ template <typename Types2>
+ friend struct PairSet;
+
+ protected:
+ typename Types::HBGlyphID
+ secondGlyph; /* GlyphID of second glyph in the
+ * pair--first glyph is listed in the
+ * Coverage table */
+ ValueRecord values; /* Positioning data for the first glyph
+ * followed by for second glyph */
+ public:
+ DEFINE_SIZE_ARRAY (Types::size, values);
+
+ int cmp (hb_codepoint_t k) const
+ { return secondGlyph.cmp (k); }
+
+ struct context_t
+ {
+ const void *base;
+ const ValueFormat *valueFormats;
+ const ValueFormat *newFormats;
+ unsigned len1; /* valueFormats[0].get_len() */
+ const hb_map_t *glyph_map;
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map;
+ };
+
+ bool subset (hb_subset_context_t *c,
+ context_t *closure) const
+ {
+ TRACE_SERIALIZE (this);
+ auto *s = c->serializer;
+ auto *out = s->start_embed (*this);
+ if (unlikely (!s->extend_min (out))) return_trace (false);
+
+ out->secondGlyph = (*closure->glyph_map)[secondGlyph];
+
+ closure->valueFormats[0].copy_values (s,
+ closure->newFormats[0],
+ closure->base, &values[0],
+ closure->layout_variation_idx_delta_map);
+ closure->valueFormats[1].copy_values (s,
+ closure->newFormats[1],
+ closure->base,
+ &values[closure->len1],
+ closure->layout_variation_idx_delta_map);
+
+ return_trace (true);
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c,
+ const ValueFormat *valueFormats,
+ const void *base) const
+ {
+ unsigned record1_len = valueFormats[0].get_len ();
+ unsigned record2_len = valueFormats[1].get_len ();
+ const hb_array_t<const Value> values_array = values.as_array (record1_len + record2_len);
+
+ if (valueFormats[0].has_device ())
+ valueFormats[0].collect_variation_indices (c, base, values_array.sub_array (0, record1_len));
+
+ if (valueFormats[1].has_device ())
+ valueFormats[1].collect_variation_indices (c, base, values_array.sub_array (record1_len, record2_len));
+ }
+
+ bool intersects (const hb_set_t& glyphset) const
+ {
+ return glyphset.has(secondGlyph);
+ }
+
+ const Value* get_values_1 () const
+ {
+ return &values[0];
+ }
+
+ const Value* get_values_2 (ValueFormat format1) const
+ {
+ return &values[format1.get_len ()];
+ }
+};
+
+
+}
+}
+}
+
+#endif // OT_LAYOUT_GPOS_PAIRVALUERECORD_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/PosLookup.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/PosLookup.hh
new file mode 100644
index 0000000000..c4e57bb543
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/PosLookup.hh
@@ -0,0 +1,79 @@
+#ifndef OT_LAYOUT_GPOS_POSLOOKUP_HH
+#define OT_LAYOUT_GPOS_POSLOOKUP_HH
+
+#include "PosLookupSubTable.hh"
+#include "../../../hb-ot-layout-common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct PosLookup : Lookup
+{
+ using SubTable = PosLookupSubTable;
+
+ const SubTable& get_subtable (unsigned int i) const
+ { return Lookup::get_subtable<SubTable> (i); }
+
+ bool is_reverse () const
+ {
+ return false;
+ }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ return_trace (dispatch (c));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ hb_intersects_context_t c (glyphs);
+ return dispatch (&c);
+ }
+
+ hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
+ { return dispatch (c); }
+
+ hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
+ {
+ if (c->is_lookup_visited (this_index))
+ return hb_closure_lookups_context_t::default_return_value ();
+
+ c->set_lookup_visited (this_index);
+ if (!intersects (c->glyphs))
+ {
+ c->set_lookup_inactive (this_index);
+ return hb_closure_lookups_context_t::default_return_value ();
+ }
+
+ hb_closure_lookups_context_t::return_t ret = dispatch (c);
+ return ret;
+ }
+
+ template <typename set_t>
+ void collect_coverage (set_t *glyphs) const
+ {
+ hb_collect_coverage_context_t<set_t> c (glyphs);
+ dispatch (&c);
+ }
+
+ template <typename context_t>
+ static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ { return Lookup::dispatch<SubTable> (c, std::forward<Ts> (ds)...); }
+
+ bool subset (hb_subset_context_t *c) const
+ { return Lookup::subset<SubTable> (c); }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ { return Lookup::sanitize<SubTable> (c); }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_POSLOOKUP_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/PosLookupSubTable.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/PosLookupSubTable.hh
new file mode 100644
index 0000000000..c19fbc323f
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/PosLookupSubTable.hh
@@ -0,0 +1,79 @@
+#ifndef OT_LAYOUT_GPOS_POSLOOKUPSUBTABLE_HH
+#define OT_LAYOUT_GPOS_POSLOOKUPSUBTABLE_HH
+
+#include "SinglePos.hh"
+#include "PairPos.hh"
+#include "CursivePos.hh"
+#include "MarkBasePos.hh"
+#include "MarkLigPos.hh"
+#include "MarkMarkPos.hh"
+#include "ContextPos.hh"
+#include "ChainContextPos.hh"
+#include "ExtensionPos.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct PosLookupSubTable
+{
+ friend struct ::OT::Lookup;
+ friend struct PosLookup;
+
+ enum Type {
+ Single = 1,
+ Pair = 2,
+ Cursive = 3,
+ MarkBase = 4,
+ MarkLig = 5,
+ MarkMark = 6,
+ Context = 7,
+ ChainContext = 8,
+ Extension = 9
+ };
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
+ {
+ TRACE_DISPATCH (this, lookup_type);
+ switch (lookup_type) {
+ case Single: return_trace (u.single.dispatch (c, std::forward<Ts> (ds)...));
+ case Pair: return_trace (u.pair.dispatch (c, std::forward<Ts> (ds)...));
+ case Cursive: return_trace (u.cursive.dispatch (c, std::forward<Ts> (ds)...));
+ case MarkBase: return_trace (u.markBase.dispatch (c, std::forward<Ts> (ds)...));
+ case MarkLig: return_trace (u.markLig.dispatch (c, std::forward<Ts> (ds)...));
+ case MarkMark: return_trace (u.markMark.dispatch (c, std::forward<Ts> (ds)...));
+ case Context: return_trace (u.context.dispatch (c, std::forward<Ts> (ds)...));
+ case ChainContext: return_trace (u.chainContext.dispatch (c, std::forward<Ts> (ds)...));
+ case Extension: return_trace (u.extension.dispatch (c, std::forward<Ts> (ds)...));
+ default: return_trace (c->default_return_value ());
+ }
+ }
+
+ bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
+ {
+ hb_intersects_context_t c (glyphs);
+ return dispatch (&c, lookup_type);
+ }
+
+ protected:
+ union {
+ SinglePos single;
+ PairPos pair;
+ CursivePos cursive;
+ MarkBasePos markBase;
+ MarkLigPos markLig;
+ MarkMarkPos markMark;
+ ContextPos context;
+ ChainContextPos chainContext;
+ ExtensionPos extension;
+ } u;
+ public:
+ DEFINE_SIZE_MIN (0);
+};
+
+}
+}
+}
+
+#endif /* HB_OT_LAYOUT_GPOS_POSLOOKUPSUBTABLE_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePos.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePos.hh
new file mode 100644
index 0000000000..3af6c49965
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePos.hh
@@ -0,0 +1,100 @@
+#ifndef OT_LAYOUT_GPOS_SINGLEPOS_HH
+#define OT_LAYOUT_GPOS_SINGLEPOS_HH
+
+#include "SinglePosFormat1.hh"
+#include "SinglePosFormat2.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct SinglePos
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ SinglePosFormat1 format1;
+ SinglePosFormat2 format2;
+ } u;
+
+ public:
+ template<typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ unsigned get_format (Iterator glyph_val_iter_pairs)
+ {
+ hb_array_t<const Value> first_val_iter = hb_second (*glyph_val_iter_pairs);
+
+ for (const auto iter : glyph_val_iter_pairs)
+ for (const auto _ : hb_zip (iter.second, first_val_iter))
+ if (_.first != _.second)
+ return 2;
+
+ return 1;
+ }
+
+ template<typename Iterator,
+ typename SrcLookup,
+ hb_requires (hb_is_iterator (Iterator))>
+ void serialize (hb_serialize_context_t *c,
+ const SrcLookup* src,
+ Iterator glyph_val_iter_pairs,
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
+ bool all_axes_pinned)
+ {
+ if (unlikely (!c->extend_min (u.format))) return;
+ unsigned format = 2;
+ ValueFormat new_format = src->get_value_format ();
+
+ if (all_axes_pinned)
+ new_format = new_format.drop_device_table_flags ();
+
+ if (glyph_val_iter_pairs)
+ format = get_format (glyph_val_iter_pairs);
+
+ u.format = format;
+ switch (u.format) {
+ case 1: u.format1.serialize (c,
+ src,
+ glyph_val_iter_pairs,
+ new_format,
+ layout_variation_idx_delta_map);
+ return;
+ case 2: u.format2.serialize (c,
+ src,
+ glyph_val_iter_pairs,
+ new_format,
+ layout_variation_idx_delta_map);
+ return;
+ default:return;
+ }
+ }
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+ default:return_trace (c->default_return_value ());
+ }
+ }
+};
+
+
+template<typename Iterator, typename SrcLookup>
+static void
+SinglePos_serialize (hb_serialize_context_t *c,
+ const SrcLookup *src,
+ Iterator it,
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
+ bool all_axes_pinned)
+{ c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_delta_map, all_axes_pinned); }
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_SINGLEPOS_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh
new file mode 100644
index 0000000000..623e4e66b2
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat1.hh
@@ -0,0 +1,164 @@
+#ifndef OT_LAYOUT_GPOS_SINGLEPOSFORMAT1_HH
+#define OT_LAYOUT_GPOS_SINGLEPOSFORMAT1_HH
+
+#include "Common.hh"
+#include "ValueFormat.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct SinglePosFormat1
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ Offset16To<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of subtable */
+ ValueFormat valueFormat; /* Defines the types of data in the
+ * ValueRecord */
+ ValueRecord values; /* Defines positioning
+ * value(s)--applied to all glyphs in
+ * the Coverage table */
+ public:
+ DEFINE_SIZE_ARRAY (6, values);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ coverage.sanitize (c, this) &&
+ /* The coverage table may use a range to represent a set
+ * of glyphs, which means a small number of bytes can
+ * generate a large glyph set. Manually modify the
+ * sanitizer max ops to take this into account.
+ *
+ * Note: This check *must* be right after coverage sanitize. */
+ c->check_ops ((this + coverage).get_population () >> 1) &&
+ valueFormat.sanitize_value (c, this, values));
+
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return (this+coverage).intersects (glyphs); }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ if (!valueFormat.has_device ()) return;
+
+ hb_set_t intersection;
+ (this+coverage).intersect_set (*c->glyph_set, intersection);
+ if (!intersection) return;
+
+ valueFormat.collect_variation_indices (c, this, values.as_array (valueFormat.get_len ()));
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ ValueFormat get_value_format () const { return valueFormat; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "positioning glyph at %u",
+ c->buffer->idx);
+ }
+
+ valueFormat.apply_value (c, this, values, buffer->cur_pos());
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "positioned glyph at %u",
+ c->buffer->idx);
+ }
+
+ buffer->idx++;
+ return_trace (true);
+ }
+
+ bool
+ position_single (hb_font_t *font,
+ hb_direction_t direction,
+ hb_codepoint_t gid,
+ hb_glyph_position_t &pos) const
+ {
+ unsigned int index = (this+coverage).get_coverage (gid);
+ if (likely (index == NOT_COVERED)) return false;
+
+ /* This is ugly... */
+ hb_buffer_t buffer;
+ buffer.props.direction = direction;
+ OT::hb_ot_apply_context_t c (1, font, &buffer);
+
+ valueFormat.apply_value (&c, this, values, pos);
+ return true;
+ }
+
+ template<typename Iterator,
+ typename SrcLookup,
+ hb_requires (hb_is_iterator (Iterator))>
+ void serialize (hb_serialize_context_t *c,
+ const SrcLookup *src,
+ Iterator it,
+ ValueFormat newFormat,
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map)
+ {
+ if (unlikely (!c->extend_min (this))) return;
+ if (unlikely (!c->check_assign (valueFormat,
+ newFormat,
+ HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
+
+ for (const hb_array_t<const Value>& _ : + it | hb_map (hb_second))
+ {
+ src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_delta_map);
+ // Only serialize the first entry in the iterator, the rest are assumed to
+ // be the same.
+ break;
+ }
+
+ auto glyphs =
+ + it
+ | hb_map_retains_sorting (hb_first)
+ ;
+
+ coverage.serialize_serialize (c, glyphs);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ hb_set_t intersection;
+ (this+coverage).intersect_set (glyphset, intersection);
+
+ auto it =
+ + hb_iter (intersection)
+ | hb_map_retains_sorting (glyph_map)
+ | hb_zip (hb_repeat (values.as_array (valueFormat.get_len ())))
+ ;
+
+ bool ret = bool (it);
+ SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, c->plan->all_axes_pinned);
+ return_trace (ret);
+ }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_SINGLEPOSFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh
new file mode 100644
index 0000000000..e8f2d7c2c6
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/SinglePosFormat2.hh
@@ -0,0 +1,176 @@
+#ifndef OT_LAYOUT_GPOS_SINGLEPOSFORMAT2_HH
+#define OT_LAYOUT_GPOS_SINGLEPOSFORMAT2_HH
+
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+struct SinglePosFormat2
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 2 */
+ Offset16To<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of subtable */
+ ValueFormat valueFormat; /* Defines the types of data in the
+ * ValueRecord */
+ HBUINT16 valueCount; /* Number of ValueRecords */
+ ValueRecord values; /* Array of ValueRecords--positioning
+ * values applied to glyphs */
+ public:
+ DEFINE_SIZE_ARRAY (8, values);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ coverage.sanitize (c, this) &&
+ valueFormat.sanitize_values (c, this, values, valueCount));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return (this+coverage).intersects (glyphs); }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
+ {
+ if (!valueFormat.has_device ()) return;
+
+ auto it =
+ + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
+ | hb_filter (c->glyph_set, hb_first)
+ ;
+
+ if (!it) return;
+
+ unsigned sub_length = valueFormat.get_len ();
+ const hb_array_t<const Value> values_array = values.as_array (valueCount * sub_length);
+
+ for (unsigned i : + it
+ | hb_map (hb_second))
+ valueFormat.collect_variation_indices (c, this, values_array.sub_array (i * sub_length, sub_length));
+
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ ValueFormat get_value_format () const { return valueFormat; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_buffer_t *buffer = c->buffer;
+ unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ if (unlikely (index >= valueCount)) return_trace (false);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "positioning glyph at %u",
+ c->buffer->idx);
+ }
+
+ valueFormat.apply_value (c, this,
+ &values[index * valueFormat.get_len ()],
+ buffer->cur_pos());
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "positioned glyph at %u",
+ c->buffer->idx);
+ }
+
+ buffer->idx++;
+ return_trace (true);
+ }
+
+ bool
+ position_single (hb_font_t *font,
+ hb_direction_t direction,
+ hb_codepoint_t gid,
+ hb_glyph_position_t &pos) const
+ {
+ unsigned int index = (this+coverage).get_coverage (gid);
+ if (likely (index == NOT_COVERED)) return false;
+ if (unlikely (index >= valueCount)) return false;
+
+ /* This is ugly... */
+ hb_buffer_t buffer;
+ buffer.props.direction = direction;
+ OT::hb_ot_apply_context_t c (1, font, &buffer);
+
+ valueFormat.apply_value (&c, this,
+ &values[index * valueFormat.get_len ()],
+ pos);
+ return true;
+ }
+
+
+ template<typename Iterator,
+ typename SrcLookup,
+ hb_requires (hb_is_iterator (Iterator))>
+ void serialize (hb_serialize_context_t *c,
+ const SrcLookup *src,
+ Iterator it,
+ ValueFormat newFormat,
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map)
+ {
+ auto out = c->extend_min (this);
+ if (unlikely (!out)) return;
+ if (unlikely (!c->check_assign (valueFormat, newFormat, HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
+ if (unlikely (!c->check_assign (valueCount, it.len (), HB_SERIALIZE_ERROR_ARRAY_OVERFLOW))) return;
+
+ + it
+ | hb_map (hb_second)
+ | hb_apply ([&] (hb_array_t<const Value> _)
+ { src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_delta_map); })
+ ;
+
+ auto glyphs =
+ + it
+ | hb_map_retains_sorting (hb_first)
+ ;
+
+ coverage.serialize_serialize (c, glyphs);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ unsigned sub_length = valueFormat.get_len ();
+ auto values_array = values.as_array (valueCount * sub_length);
+
+ auto it =
+ + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
+ | hb_filter (glyphset, hb_first)
+ | hb_map_retains_sorting ([&] (const hb_pair_t<hb_codepoint_t, unsigned>& _)
+ {
+ return hb_pair (glyph_map[_.first],
+ values_array.sub_array (_.second * sub_length,
+ sub_length));
+ })
+ ;
+
+ bool ret = bool (it);
+ SinglePos_serialize (c->serializer, this, it, &c->plan->layout_variation_idx_delta_map, c->plan->all_axes_pinned);
+ return_trace (ret);
+ }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GPOS_SINGLEPOSFORMAT2_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GPOS/ValueFormat.hh b/gfx/harfbuzz/src/OT/Layout/GPOS/ValueFormat.hh
new file mode 100644
index 0000000000..1aa451abcc
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GPOS/ValueFormat.hh
@@ -0,0 +1,394 @@
+#ifndef OT_LAYOUT_GPOS_VALUEFORMAT_HH
+#define OT_LAYOUT_GPOS_VALUEFORMAT_HH
+
+#include "../../../hb-ot-layout-gsubgpos.hh"
+
+namespace OT {
+namespace Layout {
+namespace GPOS_impl {
+
+typedef HBUINT16 Value;
+
+typedef UnsizedArrayOf<Value> ValueRecord;
+
+struct ValueFormat : HBUINT16
+{
+ enum Flags {
+ xPlacement = 0x0001u, /* Includes horizontal adjustment for placement */
+ yPlacement = 0x0002u, /* Includes vertical adjustment for placement */
+ xAdvance = 0x0004u, /* Includes horizontal adjustment for advance */
+ yAdvance = 0x0008u, /* Includes vertical adjustment for advance */
+ xPlaDevice = 0x0010u, /* Includes horizontal Device table for placement */
+ yPlaDevice = 0x0020u, /* Includes vertical Device table for placement */
+ xAdvDevice = 0x0040u, /* Includes horizontal Device table for advance */
+ yAdvDevice = 0x0080u, /* Includes vertical Device table for advance */
+ ignored = 0x0F00u, /* Was used in TrueType Open for MM fonts */
+ reserved = 0xF000u, /* For future use */
+
+ devices = 0x00F0u /* Mask for having any Device table */
+ };
+
+/* All fields are options. Only those available advance the value pointer. */
+#if 0
+ HBINT16 xPlacement; /* Horizontal adjustment for
+ * placement--in design units */
+ HBINT16 yPlacement; /* Vertical adjustment for
+ * placement--in design units */
+ HBINT16 xAdvance; /* Horizontal adjustment for
+ * advance--in design units (only used
+ * for horizontal writing) */
+ HBINT16 yAdvance; /* Vertical adjustment for advance--in
+ * design units (only used for vertical
+ * writing) */
+ Offset16To<Device> xPlaDevice; /* Offset to Device table for
+ * horizontal placement--measured from
+ * beginning of PosTable (may be NULL) */
+ Offset16To<Device> yPlaDevice; /* Offset to Device table for vertical
+ * placement--measured from beginning
+ * of PosTable (may be NULL) */
+ Offset16To<Device> xAdvDevice; /* Offset to Device table for
+ * horizontal advance--measured from
+ * beginning of PosTable (may be NULL) */
+ Offset16To<Device> yAdvDevice; /* Offset to Device table for vertical
+ * advance--measured from beginning of
+ * PosTable (may be NULL) */
+#endif
+
+ IntType& operator = (uint16_t i) { v = i; return *this; }
+
+ unsigned int get_len () const { return hb_popcount ((unsigned int) *this); }
+ unsigned int get_size () const { return get_len () * Value::static_size; }
+
+ hb_vector_t<unsigned> get_device_table_indices () const {
+ unsigned i = 0;
+ hb_vector_t<unsigned> result;
+ unsigned format = *this;
+
+ if (format & xPlacement) i++;
+ if (format & yPlacement) i++;
+ if (format & xAdvance) i++;
+ if (format & yAdvance) i++;
+
+ if (format & xPlaDevice) result.push (i++);
+ if (format & yPlaDevice) result.push (i++);
+ if (format & xAdvDevice) result.push (i++);
+ if (format & yAdvDevice) result.push (i++);
+
+ return result;
+ }
+
+ bool apply_value (hb_ot_apply_context_t *c,
+ const void *base,
+ const Value *values,
+ hb_glyph_position_t &glyph_pos) const
+ {
+ bool ret = false;
+ unsigned int format = *this;
+ if (!format) return ret;
+
+ hb_font_t *font = c->font;
+ bool horizontal =
+#ifndef HB_NO_VERTICAL
+ HB_DIRECTION_IS_HORIZONTAL (c->direction)
+#else
+ true
+#endif
+ ;
+
+ if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++, &ret));
+ if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++, &ret));
+ if (format & xAdvance) {
+ if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
+ values++;
+ }
+ /* y_advance values grow downward but font-space grows upward, hence negation */
+ if (format & yAdvance) {
+ if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
+ values++;
+ }
+
+ if (!has_device ()) return ret;
+
+ bool use_x_device = font->x_ppem || font->num_coords;
+ bool use_y_device = font->y_ppem || font->num_coords;
+
+ if (!use_x_device && !use_y_device) return ret;
+
+ const VariationStore &store = c->var_store;
+ auto *cache = c->var_store_cache;
+
+ /* pixel -> fractional pixel */
+ if (format & xPlaDevice) {
+ if (use_x_device) glyph_pos.x_offset += (base + get_device (values, &ret)).get_x_delta (font, store, cache);
+ values++;
+ }
+ if (format & yPlaDevice) {
+ if (use_y_device) glyph_pos.y_offset += (base + get_device (values, &ret)).get_y_delta (font, store, cache);
+ values++;
+ }
+ if (format & xAdvDevice) {
+ if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store, cache);
+ values++;
+ }
+ if (format & yAdvDevice) {
+ /* y_advance values grow downward but font-space grows upward, hence negation */
+ if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store, cache);
+ values++;
+ }
+ return ret;
+ }
+
+ unsigned int get_effective_format (const Value *values) const
+ {
+ unsigned int format = *this;
+ for (unsigned flag = xPlacement; flag <= yAdvDevice; flag = flag << 1) {
+ if (format & flag) should_drop (*values++, (Flags) flag, &format);
+ }
+
+ return format;
+ }
+
+ template<typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ unsigned int get_effective_format (Iterator it) const {
+ unsigned int new_format = 0;
+
+ for (const hb_array_t<const Value>& values : it)
+ new_format = new_format | get_effective_format (&values);
+
+ return new_format;
+ }
+
+ void copy_values (hb_serialize_context_t *c,
+ unsigned int new_format,
+ const void *base,
+ const Value *values,
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
+ {
+ unsigned int format = *this;
+ if (!format) return;
+
+ HBINT16 *x_placement = nullptr, *y_placement = nullptr, *x_adv = nullptr, *y_adv = nullptr;
+ if (format & xPlacement) x_placement = copy_value (c, new_format, xPlacement, *values++);
+ if (format & yPlacement) y_placement = copy_value (c, new_format, yPlacement, *values++);
+ if (format & xAdvance) x_adv = copy_value (c, new_format, xAdvance, *values++);
+ if (format & yAdvance) y_adv = copy_value (c, new_format, yAdvance, *values++);
+
+ if (format & xPlaDevice)
+ {
+ add_delta_to_value (x_placement, base, values, layout_variation_idx_delta_map);
+ copy_device (c, base, values++, layout_variation_idx_delta_map, new_format, xPlaDevice);
+ }
+
+ if (format & yPlaDevice)
+ {
+ add_delta_to_value (y_placement, base, values, layout_variation_idx_delta_map);
+ copy_device (c, base, values++, layout_variation_idx_delta_map, new_format, yPlaDevice);
+ }
+
+ if (format & xAdvDevice)
+ {
+ add_delta_to_value (x_adv, base, values, layout_variation_idx_delta_map);
+ copy_device (c, base, values++, layout_variation_idx_delta_map, new_format, xAdvDevice);
+ }
+
+ if (format & yAdvDevice)
+ {
+ add_delta_to_value (y_adv, base, values, layout_variation_idx_delta_map);
+ copy_device (c, base, values++, layout_variation_idx_delta_map, new_format, yAdvDevice);
+ }
+ }
+
+ HBINT16* copy_value (hb_serialize_context_t *c,
+ unsigned int new_format,
+ Flags flag,
+ Value value) const
+ {
+ // Filter by new format.
+ if (!(new_format & flag)) return nullptr;
+ return reinterpret_cast<HBINT16 *> (c->copy (value));
+ }
+
+ void collect_variation_indices (hb_collect_variation_indices_context_t *c,
+ const void *base,
+ const hb_array_t<const Value>& values) const
+ {
+ unsigned format = *this;
+ unsigned i = 0;
+ if (format & xPlacement) i++;
+ if (format & yPlacement) i++;
+ if (format & xAdvance) i++;
+ if (format & yAdvance) i++;
+ if (format & xPlaDevice)
+ {
+ (base + get_device (&(values[i]))).collect_variation_indices (c);
+ i++;
+ }
+
+ if (format & ValueFormat::yPlaDevice)
+ {
+ (base + get_device (&(values[i]))).collect_variation_indices (c);
+ i++;
+ }
+
+ if (format & ValueFormat::xAdvDevice)
+ {
+
+ (base + get_device (&(values[i]))).collect_variation_indices (c);
+ i++;
+ }
+
+ if (format & ValueFormat::yAdvDevice)
+ {
+
+ (base + get_device (&(values[i]))).collect_variation_indices (c);
+ i++;
+ }
+ }
+
+ unsigned drop_device_table_flags () const
+ {
+ unsigned format = *this;
+ for (unsigned flag = xPlaDevice; flag <= yAdvDevice; flag = flag << 1)
+ format = format & ~flag;
+
+ return format;
+ }
+
+ private:
+ bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
+ {
+ unsigned int format = *this;
+
+ if (format & xPlacement) values++;
+ if (format & yPlacement) values++;
+ if (format & xAdvance) values++;
+ if (format & yAdvance) values++;
+
+ if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
+ if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
+ if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
+ if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
+
+ return true;
+ }
+
+ static inline Offset16To<Device>& get_device (Value* value)
+ {
+ return *static_cast<Offset16To<Device> *> (value);
+ }
+ static inline const Offset16To<Device>& get_device (const Value* value, bool *worked=nullptr)
+ {
+ if (worked) *worked |= bool (*value);
+ return *static_cast<const Offset16To<Device> *> (value);
+ }
+
+ void add_delta_to_value (HBINT16 *value,
+ const void *base,
+ const Value *src_value,
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
+ {
+ if (!value) return;
+ unsigned varidx = (base + get_device (src_value)).get_variation_index ();
+ hb_pair_t<unsigned, int> *varidx_delta;
+ if (!layout_variation_idx_delta_map->has (varidx, &varidx_delta)) return;
+
+ *value += hb_second (*varidx_delta);
+ }
+
+ bool copy_device (hb_serialize_context_t *c, const void *base,
+ const Value *src_value,
+ const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map,
+ unsigned int new_format, Flags flag) const
+ {
+ // Filter by new format.
+ if (!(new_format & flag)) return true;
+
+ Value *dst_value = c->copy (*src_value);
+
+ if (!dst_value) return false;
+ if (*dst_value == 0) return true;
+
+ *dst_value = 0;
+ c->push ();
+ if ((base + get_device (src_value)).copy (c, layout_variation_idx_delta_map))
+ {
+ c->add_link (*dst_value, c->pop_pack ());
+ return true;
+ }
+ else
+ {
+ c->pop_discard ();
+ return false;
+ }
+ }
+
+ static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
+ {
+ if (worked) *worked |= bool (*value);
+ return *reinterpret_cast<const HBINT16 *> (value);
+ }
+
+ public:
+
+ bool has_device () const
+ {
+ unsigned int format = *this;
+ return (format & devices) != 0;
+ }
+
+ bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
+ }
+
+ bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
+ {
+ TRACE_SANITIZE (this);
+ unsigned int len = get_len ();
+
+ if (!c->check_range (values, count, get_size ())) return_trace (false);
+
+ if (!has_device ()) return_trace (true);
+
+ for (unsigned int i = 0; i < count; i++) {
+ if (!sanitize_value_devices (c, base, values))
+ return_trace (false);
+ values += len;
+ }
+
+ return_trace (true);
+ }
+
+ /* Just sanitize referenced Device tables. Doesn't check the values themselves. */
+ bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
+ {
+ TRACE_SANITIZE (this);
+
+ if (!has_device ()) return_trace (true);
+
+ for (unsigned int i = 0; i < count; i++) {
+ if (!sanitize_value_devices (c, base, values))
+ return_trace (false);
+ values = &StructAtOffset<const Value> (values, stride);
+ }
+
+ return_trace (true);
+ }
+
+ private:
+
+ void should_drop (Value value, Flags flag, unsigned int* format) const
+ {
+ if (value) return;
+ *format = *format & ~flag;
+ }
+
+};
+
+}
+}
+}
+
+#endif // #ifndef OT_LAYOUT_GPOS_VALUEFORMAT_HH
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh
new file mode 100644
index 0000000000..b4466119be
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSet.hh
@@ -0,0 +1,126 @@
+#ifndef OT_LAYOUT_GSUB_ALTERNATESET_HH
+#define OT_LAYOUT_GSUB_ALTERNATESET_HH
+
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct AlternateSet
+{
+ protected:
+ Array16Of<typename Types::HBGlyphID>
+ alternates; /* Array of alternate GlyphIDs--in
+ * arbitrary order */
+ public:
+ DEFINE_SIZE_ARRAY (2, alternates);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (alternates.sanitize (c));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return hb_any (alternates, glyphs); }
+
+ void closure (hb_closure_context_t *c) const
+ { c->output->add_array (alternates.arrayZ, alternates.len); }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ { c->output->add_array (alternates.arrayZ, alternates.len); }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ unsigned int count = alternates.len;
+
+ if (unlikely (!count)) return_trace (false);
+
+ hb_mask_t glyph_mask = c->buffer->cur().mask;
+ hb_mask_t lookup_mask = c->lookup_mask;
+
+ /* Note: This breaks badly if two features enabled this lookup together. */
+ unsigned int shift = hb_ctz (lookup_mask);
+ unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
+
+ /* If alt_index is MAX_VALUE, randomize feature if it is the rand feature. */
+ if (alt_index == HB_OT_MAP_MAX_VALUE && c->random)
+ {
+ /* Maybe we can do better than unsafe-to-break all; but since we are
+ * changing random state, it would be hard to track that. Good 'nough. */
+ c->buffer->unsafe_to_break (0, c->buffer->len);
+ alt_index = c->random_number () % count + 1;
+ }
+
+ if (unlikely (alt_index > count || alt_index == 0)) return_trace (false);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "replacing glyph at %u (alternate substitution)",
+ c->buffer->idx);
+ }
+
+ c->replace_glyph (alternates[alt_index - 1]);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "replaced glyph at %u (alternate substitution)",
+ c->buffer->idx - 1u);
+ }
+
+ return_trace (true);
+ }
+
+ unsigned
+ get_alternates (unsigned start_offset,
+ unsigned *alternate_count /* IN/OUT. May be NULL. */,
+ hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */) const
+ {
+ if (alternates.len && alternate_count)
+ {
+ + alternates.as_array ().sub_array (start_offset, alternate_count)
+ | hb_sink (hb_array (alternate_glyphs, *alternate_count))
+ ;
+ }
+ return alternates.len;
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator alts)
+ {
+ TRACE_SERIALIZE (this);
+ return_trace (alternates.serialize (c, alts));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto it =
+ + hb_iter (alternates)
+ | hb_filter (glyphset)
+ | hb_map (glyph_map)
+ ;
+
+ auto *out = c->serializer->start_embed (*this);
+ return_trace (out->serialize (c->serializer, it) &&
+ out->alternates);
+ }
+};
+
+}
+}
+}
+
+
+#endif /* OT_LAYOUT_GSUB_ALTERNATESET_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh
new file mode 100644
index 0000000000..04a052a783
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSubst.hh
@@ -0,0 +1,62 @@
+#ifndef OT_LAYOUT_GSUB_ALTERNATESUBST_HH
+#define OT_LAYOUT_GSUB_ALTERNATESUBST_HH
+
+#include "AlternateSubstFormat1.hh"
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct AlternateSubst
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ AlternateSubstFormat1_2<SmallTypes> format1;
+#ifndef HB_NO_BEYOND_64K
+ AlternateSubstFormat1_2<MediumTypes> format2;
+#endif
+ } u;
+ public:
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+#ifndef HB_NO_BEYOND_64K
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+#endif
+ default:return_trace (c->default_return_value ());
+ }
+ }
+
+ /* TODO This function is unused and not updated to 24bit GIDs. Should be done by using
+ * iterators. While at it perhaps using iterator of arrays of hb_codepoint_t instead. */
+ bool serialize (hb_serialize_context_t *c,
+ hb_sorted_array_t<const HBGlyphID16> glyphs,
+ hb_array_t<const unsigned int> alternate_len_list,
+ hb_array_t<const HBGlyphID16> alternate_glyphs_list)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (u.format))) return_trace (false);
+ unsigned int format = 1;
+ u.format = format;
+ switch (u.format) {
+ case 1: return_trace (u.format1.serialize (c, glyphs, alternate_len_list, alternate_glyphs_list));
+ default:return_trace (false);
+ }
+ }
+
+ /* TODO subset() should choose format. */
+
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_ALTERNATESUBST_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSubstFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSubstFormat1.hh
new file mode 100644
index 0000000000..adec65d586
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/AlternateSubstFormat1.hh
@@ -0,0 +1,128 @@
+#ifndef OT_LAYOUT_GSUB_ALTERNATESUBSTFORMAT1_HH
+#define OT_LAYOUT_GSUB_ALTERNATESUBSTFORMAT1_HH
+
+#include "AlternateSet.hh"
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct AlternateSubstFormat1_2
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ typename Types::template OffsetTo<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of Substitution table */
+ Array16Of<typename Types::template OffsetTo<AlternateSet<Types>>>
+ alternateSet; /* Array of AlternateSet tables
+ * ordered by Coverage Index */
+ public:
+ DEFINE_SIZE_ARRAY (2 + 2 * Types::size, alternateSet);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (coverage.sanitize (c, this) && alternateSet.sanitize (c, this));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return (this+coverage).intersects (glyphs); }
+
+ bool may_have_non_1to1 () const
+ { return false; }
+
+ void closure (hb_closure_context_t *c) const
+ {
+ + hb_zip (this+coverage, alternateSet)
+ | hb_filter (c->parent_active_glyphs (), hb_first)
+ | hb_map (hb_second)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const AlternateSet<Types> &_) { _.closure (c); })
+ ;
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+ + hb_zip (this+coverage, alternateSet)
+ | hb_map (hb_second)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const AlternateSet<Types> &_) { _.collect_glyphs (c); })
+ ;
+ }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool would_apply (hb_would_apply_context_t *c) const
+ { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
+
+ unsigned
+ get_glyph_alternates (hb_codepoint_t gid,
+ unsigned start_offset,
+ unsigned *alternate_count /* IN/OUT. May be NULL. */,
+ hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */) const
+ { return (this+alternateSet[(this+coverage).get_coverage (gid)])
+ .get_alternates (start_offset, alternate_count, alternate_glyphs); }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+
+ unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ return_trace ((this+alternateSet[index]).apply (c));
+ }
+
+ bool serialize (hb_serialize_context_t *c,
+ hb_sorted_array_t<const HBGlyphID16> glyphs,
+ hb_array_t<const unsigned int> alternate_len_list,
+ hb_array_t<const HBGlyphID16> alternate_glyphs_list)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+ if (unlikely (!alternateSet.serialize (c, glyphs.length))) return_trace (false);
+ for (unsigned int i = 0; i < glyphs.length; i++)
+ {
+ unsigned int alternate_len = alternate_len_list[i];
+ if (unlikely (!alternateSet[i]
+ .serialize_serialize (c, alternate_glyphs_list.sub_array (0, alternate_len))))
+ return_trace (false);
+ alternate_glyphs_list += alternate_len;
+ }
+ return_trace (coverage.serialize_serialize (c, glyphs));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ + hb_zip (this+coverage, alternateSet)
+ | hb_filter (glyphset, hb_first)
+ | hb_filter (subset_offset_array (c, out->alternateSet, this), hb_second)
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+ out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
+ return_trace (bool (new_coverage));
+ }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_ALTERNATESUBSTFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/ChainContextSubst.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/ChainContextSubst.hh
new file mode 100644
index 0000000000..08fd779f73
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/ChainContextSubst.hh
@@ -0,0 +1,18 @@
+#ifndef OT_LAYOUT_GSUB_CHAINCONTEXTSUBST_HH
+#define OT_LAYOUT_GSUB_CHAINCONTEXTSUBST_HH
+
+// TODO(garretrieger): move to new layout.
+#include "../../../hb-ot-layout-gsubgpos.hh"
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct ChainContextSubst : ChainContext {};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_CHAINCONTEXTSUBST_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/Common.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/Common.hh
new file mode 100644
index 0000000000..968bba0481
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/Common.hh
@@ -0,0 +1,21 @@
+#ifndef OT_LAYOUT_GSUB_COMMON_HH
+#define OT_LAYOUT_GSUB_COMMON_HH
+
+#include "../../../hb-serialize.hh"
+#include "../../../hb-ot-layout-gsubgpos.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+typedef hb_pair_t<hb_codepoint_t, hb_codepoint_t> hb_codepoint_pair_t;
+
+template<typename Iterator>
+static void SingleSubst_serialize (hb_serialize_context_t *c,
+ Iterator it);
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_COMMON_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/ContextSubst.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/ContextSubst.hh
new file mode 100644
index 0000000000..9f8cb46b5e
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/ContextSubst.hh
@@ -0,0 +1,18 @@
+#ifndef OT_LAYOUT_GSUB_CONTEXTSUBST_HH
+#define OT_LAYOUT_GSUB_CONTEXTSUBST_HH
+
+// TODO(garretrieger): move to new layout.
+#include "../../../hb-ot-layout-gsubgpos.hh"
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct ContextSubst : Context {};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_CONTEXTSUBST_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/ExtensionSubst.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/ExtensionSubst.hh
new file mode 100644
index 0000000000..831a7dfa2d
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/ExtensionSubst.hh
@@ -0,0 +1,22 @@
+#ifndef OT_LAYOUT_GSUB_EXTENSIONSUBST_HH
+#define OT_LAYOUT_GSUB_EXTENSIONSUBST_HH
+
+// TODO(garretrieger): move to new layout.
+#include "../../../hb-ot-layout-gsubgpos.hh"
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct ExtensionSubst : Extension<ExtensionSubst>
+{
+ typedef struct SubstLookupSubTable SubTable;
+ bool is_reverse () const;
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_EXTENSIONSUBST_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/GSUB.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/GSUB.hh
new file mode 100644
index 0000000000..900cf603e4
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/GSUB.hh
@@ -0,0 +1,61 @@
+#ifndef OT_LAYOUT_GSUB_GSUB_HH
+#define OT_LAYOUT_GSUB_GSUB_HH
+
+#include "../../../hb-ot-layout-gsubgpos.hh"
+#include "Common.hh"
+#include "SubstLookup.hh"
+
+namespace OT {
+
+using Layout::GSUB_impl::SubstLookup;
+
+namespace Layout {
+
+/*
+ * GSUB -- Glyph Substitution
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/gsub
+ */
+
+struct GSUB : GSUBGPOS
+{
+ using Lookup = SubstLookup;
+
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_GSUB;
+
+ const SubstLookup& get_lookup (unsigned int i) const
+ { return static_cast<const SubstLookup &> (GSUBGPOS::get_lookup (i)); }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ hb_subset_layout_context_t l (c, tableTag);
+ return GSUBGPOS::subset<SubstLookup> (&l);
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (GSUBGPOS::sanitize<SubstLookup> (c));
+ }
+
+ HB_INTERNAL bool is_blocklisted (hb_blob_t *blob,
+ hb_face_t *face) const;
+
+ void closure_lookups (hb_face_t *face,
+ const hb_set_t *glyphs,
+ hb_set_t *lookup_indexes /* IN/OUT */) const
+ { GSUBGPOS::closure_lookups<SubstLookup> (face, glyphs, lookup_indexes); }
+
+ typedef GSUBGPOS::accelerator_t<GSUB> accelerator_t;
+};
+
+
+}
+
+struct GSUB_accelerator_t : Layout::GSUB::accelerator_t {
+ GSUB_accelerator_t (hb_face_t *face) : Layout::GSUB::accelerator_t (face) {}
+};
+
+
+}
+
+#endif /* OT_LAYOUT_GSUB_GSUB_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/Ligature.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/Ligature.hh
new file mode 100644
index 0000000000..8674a52fb5
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/Ligature.hh
@@ -0,0 +1,190 @@
+#ifndef OT_LAYOUT_GSUB_LIGATURE_HH
+#define OT_LAYOUT_GSUB_LIGATURE_HH
+
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct Ligature
+{
+ public:
+ typename Types::HBGlyphID
+ ligGlyph; /* GlyphID of ligature to substitute */
+ HeadlessArrayOf<typename Types::HBGlyphID>
+ component; /* Array of component GlyphIDs--start
+ * with the second component--ordered
+ * in writing direction */
+ public:
+ DEFINE_SIZE_ARRAY (Types::size + 2, component);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (ligGlyph.sanitize (c) && component.sanitize (c));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return hb_all (component, glyphs); }
+
+ bool intersects_lig_glyph (const hb_set_t *glyphs) const
+ { return glyphs->has(ligGlyph); }
+
+ void closure (hb_closure_context_t *c) const
+ {
+ if (!intersects (c->glyphs)) return;
+ c->output->add (ligGlyph);
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ c->input->add_array (component.arrayZ, component.get_length ());
+ c->output->add (ligGlyph);
+ }
+
+ bool would_apply (hb_would_apply_context_t *c) const
+ {
+ if (c->len != component.lenP1)
+ return false;
+
+ for (unsigned int i = 1; i < c->len; i++)
+ if (likely (c->glyphs[i] != component[i]))
+ return false;
+
+ return true;
+ }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ unsigned int count = component.lenP1;
+
+ if (unlikely (!count)) return_trace (false);
+
+ /* Special-case to make it in-place and not consider this
+ * as a "ligated" substitution. */
+ if (unlikely (count == 1))
+ {
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "replacing glyph at %u (ligature substitution)",
+ c->buffer->idx);
+ }
+
+ c->replace_glyph (ligGlyph);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "replaced glyph at %u (ligature substitution)",
+ c->buffer->idx - 1u);
+ }
+
+ return_trace (true);
+ }
+
+ unsigned int total_component_count = 0;
+
+ unsigned int match_end = 0;
+ unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
+
+ if (likely (!match_input (c, count,
+ &component[1],
+ match_glyph,
+ nullptr,
+ &match_end,
+ match_positions,
+ &total_component_count)))
+ {
+ c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
+ return_trace (false);
+ }
+
+ unsigned pos = 0;
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ unsigned delta = c->buffer->sync_so_far ();
+
+ pos = c->buffer->idx;
+
+ char buf[HB_MAX_CONTEXT_LENGTH * 16] = {0};
+ char *p = buf;
+
+ match_end += delta;
+ for (unsigned i = 0; i < count; i++)
+ {
+ match_positions[i] += delta;
+ if (i)
+ *p++ = ',';
+ snprintf (p, sizeof(buf) - (p - buf), "%u", match_positions[i]);
+ p += strlen(p);
+ }
+
+ c->buffer->message (c->font,
+ "ligating glyphs at %s",
+ buf);
+ }
+
+ ligate_input (c,
+ count,
+ match_positions,
+ match_end,
+ ligGlyph,
+ total_component_count);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "ligated glyph at %u",
+ pos);
+ }
+
+ return_trace (true);
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
+ bool serialize (hb_serialize_context_t *c,
+ hb_codepoint_t ligature,
+ Iterator components /* Starting from second */)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+ ligGlyph = ligature;
+ if (unlikely (!component.serialize (c, components))) return_trace (false);
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t *c, unsigned coverage_idx) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ if (!intersects (&glyphset) || !glyphset.has (ligGlyph)) return_trace (false);
+ // Ensure Coverage table is always packed after this.
+ c->serializer->add_virtual_link (coverage_idx);
+
+ auto it =
+ + hb_iter (component)
+ | hb_map (glyph_map)
+ ;
+
+ auto *out = c->serializer->start_embed (*this);
+ return_trace (out->serialize (c->serializer,
+ glyph_map[ligGlyph],
+ it)); }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_LIGATURE_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSet.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSet.hh
new file mode 100644
index 0000000000..0ba262e901
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSet.hh
@@ -0,0 +1,189 @@
+#ifndef OT_LAYOUT_GSUB_LIGATURESET_HH
+#define OT_LAYOUT_GSUB_LIGATURESET_HH
+
+#include "Common.hh"
+#include "Ligature.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct LigatureSet
+{
+ protected:
+ Array16OfOffset16To<Ligature<Types>>
+ ligature; /* Array LigatureSet tables
+ * ordered by preference */
+ public:
+ DEFINE_SIZE_ARRAY (2, ligature);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (ligature.sanitize (c, this));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ return
+ + hb_iter (ligature)
+ | hb_map (hb_add (this))
+ | hb_map ([glyphs] (const Ligature<Types> &_) { return _.intersects (glyphs); })
+ | hb_any
+ ;
+ }
+
+ bool intersects_lig_glyph (const hb_set_t *glyphs) const
+ {
+ return
+ + hb_iter (ligature)
+ | hb_map (hb_add (this))
+ | hb_map ([glyphs] (const Ligature<Types> &_) {
+ return _.intersects_lig_glyph (glyphs) && _.intersects (glyphs);
+ })
+ | hb_any
+ ;
+ }
+
+ void closure (hb_closure_context_t *c) const
+ {
+ + hb_iter (ligature)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const Ligature<Types> &_) { _.closure (c); })
+ ;
+ }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ + hb_iter (ligature)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const Ligature<Types> &_) { _.collect_glyphs (c); })
+ ;
+ }
+
+ bool would_apply (hb_would_apply_context_t *c) const
+ {
+ return
+ + hb_iter (ligature)
+ | hb_map (hb_add (this))
+ | hb_map ([c] (const Ligature<Types> &_) { return _.would_apply (c); })
+ | hb_any
+ ;
+ }
+
+ static bool match_always (hb_glyph_info_t &info HB_UNUSED, unsigned value HB_UNUSED, const void *data HB_UNUSED)
+ {
+ return true;
+ }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+
+ unsigned int num_ligs = ligature.len;
+
+#ifndef HB_NO_OT_LIGATURES_FAST_PATH
+ if (HB_OPTIMIZE_SIZE_VAL || num_ligs <= 2)
+#endif
+ {
+ slow:
+ for (unsigned int i = 0; i < num_ligs; i++)
+ {
+ const auto &lig = this+ligature.arrayZ[i];
+ if (lig.apply (c)) return_trace (true);
+ }
+ return_trace (false);
+ }
+
+ /* This version is optimized for speed by matching the first component
+ * of the ligature here, instead of calling into the ligation code. */
+
+ hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
+ skippy_iter.reset (c->buffer->idx, 1);
+ skippy_iter.set_match_func (match_always, nullptr);
+ skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
+ unsigned unsafe_to;
+ hb_codepoint_t first = (unsigned) -1;
+ bool matched = skippy_iter.next (&unsafe_to);
+ if (likely (matched))
+ {
+ first = c->buffer->info[skippy_iter.idx].codepoint;
+ unsafe_to = skippy_iter.idx + 1;
+
+ if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
+ {
+ /* Can't use the fast path if eg. the next char is a default-ignorable
+ * or other skippable. */
+ goto slow;
+ }
+ }
+
+ bool unsafe_to_concat = false;
+
+ for (unsigned int i = 0; i < num_ligs; i++)
+ {
+ const auto &lig = this+ligature.arrayZ[i];
+ if (unlikely (lig.component.lenP1 <= 1) ||
+ lig.component[1] == first)
+ {
+ if (lig.apply (c))
+ {
+ if (unsafe_to_concat)
+ c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
+ return_trace (true);
+ }
+ }
+ else if (likely (lig.component.lenP1 > 1))
+ unsafe_to_concat = true;
+ }
+ if (likely (unsafe_to_concat))
+ c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
+
+ return_trace (false);
+ }
+
+ bool serialize (hb_serialize_context_t *c,
+ hb_array_t<const HBGlyphID16> ligatures,
+ hb_array_t<const unsigned int> component_count_list,
+ hb_array_t<const HBGlyphID16> &component_list /* Starting from second for each ligature */)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+ if (unlikely (!ligature.serialize (c, ligatures.length))) return_trace (false);
+ for (unsigned int i = 0; i < ligatures.length; i++)
+ {
+ unsigned int component_count = (unsigned) hb_max ((int) component_count_list[i] - 1, 0);
+ if (unlikely (!ligature[i].serialize_serialize (c,
+ ligatures[i],
+ component_list.sub_array (0, component_count))))
+ return_trace (false);
+ component_list += component_count;
+ }
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t *c, unsigned coverage_idx) const
+ {
+ TRACE_SUBSET (this);
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+
+ + hb_iter (ligature)
+ | hb_filter (subset_offset_array (c, out->ligature, this, coverage_idx))
+ | hb_drain
+ ;
+
+ if (bool (out->ligature))
+ // Ensure Coverage table is always packed after this.
+ c->serializer->add_virtual_link (coverage_idx);
+
+ return_trace (bool (out->ligature));
+ }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_LIGATURESET_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh
new file mode 100644
index 0000000000..18f6e35581
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSubst.hh
@@ -0,0 +1,71 @@
+#ifndef OT_LAYOUT_GSUB_LIGATURESUBST_HH
+#define OT_LAYOUT_GSUB_LIGATURESUBST_HH
+
+#include "Common.hh"
+#include "LigatureSubstFormat1.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct LigatureSubst
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ LigatureSubstFormat1_2<SmallTypes> format1;
+#ifndef HB_NO_BEYOND_64K
+ LigatureSubstFormat1_2<MediumTypes> format2;
+#endif
+ } u;
+
+ public:
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+#ifndef HB_NO_BEYOND_64K
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+#endif
+ default:return_trace (c->default_return_value ());
+ }
+ }
+
+ /* TODO This function is only used by small GIDs, and not updated to 24bit GIDs. Should
+ * be done by using iterators. While at it perhaps using iterator of arrays of hb_codepoint_t
+ * instead. */
+ bool serialize (hb_serialize_context_t *c,
+ hb_sorted_array_t<const HBGlyphID16> first_glyphs,
+ hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
+ hb_array_t<const HBGlyphID16> ligatures_list,
+ hb_array_t<const unsigned int> component_count_list,
+ hb_array_t<const HBGlyphID16> component_list /* Starting from second for each ligature */)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (u.format))) return_trace (false);
+ unsigned int format = 1;
+ u.format = format;
+ switch (u.format) {
+ case 1: return_trace (u.format1.serialize (c,
+ first_glyphs,
+ ligature_per_first_glyph_count_list,
+ ligatures_list,
+ component_count_list,
+ component_list));
+ default:return_trace (false);
+ }
+ }
+
+ /* TODO subset() should choose format. */
+
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_LIGATURESUBST_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSubstFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSubstFormat1.hh
new file mode 100644
index 0000000000..5c7df97d13
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/LigatureSubstFormat1.hh
@@ -0,0 +1,166 @@
+#ifndef OT_LAYOUT_GSUB_LIGATURESUBSTFORMAT1_HH
+#define OT_LAYOUT_GSUB_LIGATURESUBSTFORMAT1_HH
+
+#include "Common.hh"
+#include "LigatureSet.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct LigatureSubstFormat1_2
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ typename Types::template OffsetTo<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of Substitution table */
+ Array16Of<typename Types::template OffsetTo<LigatureSet<Types>>>
+ ligatureSet; /* Array LigatureSet tables
+ * ordered by Coverage Index */
+ public:
+ DEFINE_SIZE_ARRAY (4 + Types::size, ligatureSet);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ return
+ + hb_zip (this+coverage, ligatureSet)
+ | hb_filter (*glyphs, hb_first)
+ | hb_map (hb_second)
+ | hb_map ([this, glyphs] (const typename Types::template OffsetTo<LigatureSet<Types>> &_)
+ { return (this+_).intersects (glyphs); })
+ | hb_any
+ ;
+ }
+
+ bool may_have_non_1to1 () const
+ { return true; }
+
+ void closure (hb_closure_context_t *c) const
+ {
+ + hb_zip (this+coverage, ligatureSet)
+ | hb_filter (c->parent_active_glyphs (), hb_first)
+ | hb_map (hb_second)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const LigatureSet<Types> &_) { _.closure (c); })
+ ;
+
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+
+ + hb_zip (this+coverage, ligatureSet)
+ | hb_map (hb_second)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const LigatureSet<Types> &_) { _.collect_glyphs (c); })
+ ;
+ }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool would_apply (hb_would_apply_context_t *c) const
+ {
+ unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
+ if (likely (index == NOT_COVERED)) return false;
+
+ const auto &lig_set = this+ligatureSet[index];
+ return lig_set.would_apply (c);
+ }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+
+ unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ const auto &lig_set = this+ligatureSet[index];
+ return_trace (lig_set.apply (c));
+ }
+
+ bool serialize (hb_serialize_context_t *c,
+ hb_sorted_array_t<const HBGlyphID16> first_glyphs,
+ hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
+ hb_array_t<const HBGlyphID16> ligatures_list,
+ hb_array_t<const unsigned int> component_count_list,
+ hb_array_t<const HBGlyphID16> component_list /* Starting from second for each ligature */)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+ if (unlikely (!ligatureSet.serialize (c, first_glyphs.length))) return_trace (false);
+ for (unsigned int i = 0; i < first_glyphs.length; i++)
+ {
+ unsigned int ligature_count = ligature_per_first_glyph_count_list[i];
+ if (unlikely (!ligatureSet[i]
+ .serialize_serialize (c,
+ ligatures_list.sub_array (0, ligature_count),
+ component_count_list.sub_array (0, ligature_count),
+ component_list))) return_trace (false);
+ ligatures_list += ligature_count;
+ component_count_list += ligature_count;
+ }
+ return_trace (coverage.serialize_serialize (c, first_glyphs));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ // Due to a bug in some older versions of windows 7 the Coverage table must be
+ // packed after the LigatureSet and Ligature tables, so serialize Coverage first
+ // which places it last in the packed order.
+ hb_set_t new_coverage;
+ + hb_zip (this+coverage, hb_iter (ligatureSet) | hb_map (hb_add (this)))
+ | hb_filter (glyphset, hb_first)
+ | hb_filter ([&] (const LigatureSet<Types>& _) {
+ return _.intersects_lig_glyph (&glyphset);
+ }, hb_second)
+ | hb_map (hb_first)
+ | hb_sink (new_coverage);
+
+ if (!c->serializer->push<Coverage> ()
+ ->serialize (c->serializer,
+ + new_coverage.iter () | hb_map_retains_sorting (glyph_map)))
+ {
+ c->serializer->pop_discard ();
+ return_trace (false);
+ }
+
+ unsigned coverage_idx = c->serializer->pop_pack ();
+ c->serializer->add_link (out->coverage, coverage_idx);
+
+ + hb_zip (this+coverage, ligatureSet)
+ | hb_filter (new_coverage, hb_first)
+ | hb_map (hb_second)
+ // to ensure that the repacker always orders the coverage table after the LigatureSet
+ // and LigatureSubtable's they will be linked to the Coverage table via a virtual link
+ // the coverage table object idx is passed down to facilitate this.
+ | hb_apply (subset_offset_array (c, out->ligatureSet, this, coverage_idx))
+ ;
+
+ return_trace (bool (new_coverage));
+ }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_LIGATURESUBSTFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh
new file mode 100644
index 0000000000..742c8587ee
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/MultipleSubst.hh
@@ -0,0 +1,62 @@
+#ifndef OT_LAYOUT_GSUB_MULTIPLESUBST_HH
+#define OT_LAYOUT_GSUB_MULTIPLESUBST_HH
+
+#include "Common.hh"
+#include "MultipleSubstFormat1.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct MultipleSubst
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ MultipleSubstFormat1_2<SmallTypes> format1;
+#ifndef HB_NO_BEYOND_64K
+ MultipleSubstFormat1_2<MediumTypes> format2;
+#endif
+ } u;
+
+ public:
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+#ifndef HB_NO_BEYOND_64K
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+#endif
+ default:return_trace (c->default_return_value ());
+ }
+ }
+
+ template<typename Iterator,
+ hb_requires (hb_is_sorted_iterator (Iterator))>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator it)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (u.format))) return_trace (false);
+ unsigned int format = 1;
+ u.format = format;
+ switch (u.format) {
+ case 1: return_trace (u.format1.serialize (c, it));
+ default:return_trace (false);
+ }
+ }
+
+ /* TODO subset() should choose format. */
+
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_MULTIPLESUBST_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/MultipleSubstFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/MultipleSubstFormat1.hh
new file mode 100644
index 0000000000..3b4bd11694
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/MultipleSubstFormat1.hh
@@ -0,0 +1,130 @@
+#ifndef OT_LAYOUT_GSUB_MULTIPLESUBSTFORMAT1_HH
+#define OT_LAYOUT_GSUB_MULTIPLESUBSTFORMAT1_HH
+
+#include "Common.hh"
+#include "Sequence.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct MultipleSubstFormat1_2
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ typename Types::template OffsetTo<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of Substitution table */
+ Array16Of<typename Types::template OffsetTo<Sequence<Types>>>
+ sequence; /* Array of Sequence tables
+ * ordered by Coverage Index */
+ public:
+ DEFINE_SIZE_ARRAY (4 + Types::size, sequence);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (coverage.sanitize (c, this) && sequence.sanitize (c, this));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return (this+coverage).intersects (glyphs); }
+
+ bool may_have_non_1to1 () const
+ { return true; }
+
+ void closure (hb_closure_context_t *c) const
+ {
+ + hb_zip (this+coverage, sequence)
+ | hb_filter (c->parent_active_glyphs (), hb_first)
+ | hb_map (hb_second)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const Sequence<Types> &_) { _.closure (c); })
+ ;
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+ + hb_zip (this+coverage, sequence)
+ | hb_map (hb_second)
+ | hb_map (hb_add (this))
+ | hb_apply ([c] (const Sequence<Types> &_) { _.collect_glyphs (c); })
+ ;
+ }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool would_apply (hb_would_apply_context_t *c) const
+ { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+
+ unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ return_trace ((this+sequence[index]).apply (c));
+ }
+
+ template<typename Iterator,
+ hb_requires (hb_is_sorted_iterator (Iterator))>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator it)
+ {
+ TRACE_SERIALIZE (this);
+ auto sequences =
+ + it
+ | hb_map (hb_second)
+ ;
+ auto glyphs =
+ + it
+ | hb_map_retains_sorting (hb_first)
+ ;
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+
+ if (unlikely (!sequence.serialize (c, sequences.length))) return_trace (false);
+
+ for (auto& pair : hb_zip (sequences, sequence))
+ {
+ if (unlikely (!pair.second
+ .serialize_serialize (c, pair.first)))
+ return_trace (false);
+ }
+
+ return_trace (coverage.serialize_serialize (c, glyphs));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto *out = c->serializer->start_embed (*this);
+ if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
+ out->format = format;
+
+ hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ + hb_zip (this+coverage, sequence)
+ | hb_filter (glyphset, hb_first)
+ | hb_filter (subset_offset_array (c, out->sequence, this), hb_second)
+ | hb_map (hb_first)
+ | hb_map (glyph_map)
+ | hb_sink (new_coverage)
+ ;
+ out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
+ return_trace (bool (new_coverage));
+ }
+};
+
+}
+}
+}
+
+
+#endif /* OT_LAYOUT_GSUB_MULTIPLESUBSTFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubst.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubst.hh
new file mode 100644
index 0000000000..5ad463fea7
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubst.hh
@@ -0,0 +1,36 @@
+#ifndef OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBST_HH
+#define OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBST_HH
+
+#include "Common.hh"
+#include "ReverseChainSingleSubstFormat1.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct ReverseChainSingleSubst
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ ReverseChainSingleSubstFormat1 format1;
+ } u;
+
+ public:
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+ default:return_trace (c->default_return_value ());
+ }
+ }
+};
+
+}
+}
+}
+
+#endif /* HB_OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBST_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubstFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubstFormat1.hh
new file mode 100644
index 0000000000..2c2e1aa44f
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/ReverseChainSingleSubstFormat1.hh
@@ -0,0 +1,244 @@
+#ifndef OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBSTFORMAT1_HH
+#define OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBSTFORMAT1_HH
+
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct ReverseChainSingleSubstFormat1
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ Offset16To<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of table */
+ Array16OfOffset16To<Coverage>
+ backtrack; /* Array of coverage tables
+ * in backtracking sequence, in glyph
+ * sequence order */
+ Array16OfOffset16To<Coverage>
+ lookaheadX; /* Array of coverage tables
+ * in lookahead sequence, in glyph
+ * sequence order */
+ Array16Of<HBGlyphID16>
+ substituteX; /* Array of substitute
+ * GlyphIDs--ordered by Coverage Index */
+ public:
+ DEFINE_SIZE_MIN (10);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
+ return_trace (false);
+ const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
+ if (!lookahead.sanitize (c, this))
+ return_trace (false);
+ const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
+ return_trace (substitute.sanitize (c));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ if (!(this+coverage).intersects (glyphs))
+ return false;
+
+ const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
+
+ unsigned int count;
+
+ count = backtrack.len;
+ for (unsigned int i = 0; i < count; i++)
+ if (!(this+backtrack[i]).intersects (glyphs))
+ return false;
+
+ count = lookahead.len;
+ for (unsigned int i = 0; i < count; i++)
+ if (!(this+lookahead[i]).intersects (glyphs))
+ return false;
+
+ return true;
+ }
+
+ bool may_have_non_1to1 () const
+ { return false; }
+
+ void closure (hb_closure_context_t *c) const
+ {
+ if (!intersects (c->glyphs)) return;
+
+ const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
+ const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
+
+ + hb_zip (this+coverage, substitute)
+ | hb_filter (c->parent_active_glyphs (), hb_first)
+ | hb_map (hb_second)
+ | hb_sink (c->output)
+ ;
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+
+ unsigned int count;
+
+ count = backtrack.len;
+ for (unsigned int i = 0; i < count; i++)
+ if (unlikely (!(this+backtrack[i]).collect_coverage (c->before))) return;
+
+ const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
+ count = lookahead.len;
+ for (unsigned int i = 0; i < count; i++)
+ if (unlikely (!(this+lookahead[i]).collect_coverage (c->after))) return;
+
+ const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
+ count = substitute.len;
+ c->output->add_array (substitute.arrayZ, substitute.len);
+ }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool would_apply (hb_would_apply_context_t *c) const
+ { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ if (unlikely (c->nesting_level_left != HB_MAX_NESTING_LEVEL))
+ return_trace (false); /* No chaining to this type */
+
+ unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
+ const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
+
+ if (unlikely (index >= substitute.len)) return_trace (false);
+
+ unsigned int start_index = 0, end_index = 0;
+ if (match_backtrack (c,
+ backtrack.len, (HBUINT16 *) backtrack.arrayZ,
+ match_coverage, this,
+ &start_index) &&
+ match_lookahead (c,
+ lookahead.len, (HBUINT16 *) lookahead.arrayZ,
+ match_coverage, this,
+ c->buffer->idx + 1, &end_index))
+ {
+ c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "replacing glyph at %u (reverse chaining substitution)",
+ c->buffer->idx);
+ }
+
+ c->replace_glyph_inplace (substitute[index]);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "replaced glyph at %u (reverse chaining substitution)",
+ c->buffer->idx);
+ }
+
+ /* Note: We DON'T decrease buffer->idx. The main loop does it
+ * for us. This is useful for preventing surprises if someone
+ * calls us through a Context lookup. */
+ return_trace (true);
+ }
+ else
+ {
+ c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
+ return_trace (false);
+ }
+ }
+
+ template<typename Iterator,
+ hb_requires (hb_is_iterator (Iterator))>
+ bool serialize_coverage_offset_array (hb_subset_context_t *c, Iterator it) const
+ {
+ TRACE_SERIALIZE (this);
+ auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
+
+ if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
+ return_trace (false);
+
+ for (auto& offset : it) {
+ auto *o = out->serialize_append (c->serializer);
+ if (unlikely (!o) || !o->serialize_subset (c, offset, this))
+ return_trace (false);
+ }
+
+ return_trace (true);
+ }
+
+ template<typename Iterator, typename BacktrackIterator, typename LookaheadIterator,
+ hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_pair_t)),
+ hb_requires (hb_is_iterator (BacktrackIterator)),
+ hb_requires (hb_is_iterator (LookaheadIterator))>
+ bool serialize (hb_subset_context_t *c,
+ Iterator coverage_subst_iter,
+ BacktrackIterator backtrack_iter,
+ LookaheadIterator lookahead_iter) const
+ {
+ TRACE_SERIALIZE (this);
+
+ auto *out = c->serializer->start_embed (this);
+ if (unlikely (!c->serializer->check_success (out))) return_trace (false);
+ if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
+ if (unlikely (!c->serializer->embed (this->coverage))) return_trace (false);
+
+ if (!serialize_coverage_offset_array (c, backtrack_iter)) return_trace (false);
+ if (!serialize_coverage_offset_array (c, lookahead_iter)) return_trace (false);
+
+ auto *substitute_out = c->serializer->start_embed<Array16Of<HBGlyphID16>> ();
+ auto substitutes =
+ + coverage_subst_iter
+ | hb_map (hb_second)
+ ;
+
+ auto glyphs =
+ + coverage_subst_iter
+ | hb_map_retains_sorting (hb_first)
+ ;
+ if (unlikely (! c->serializer->check_success (substitute_out->serialize (c->serializer, substitutes))))
+ return_trace (false);
+
+ if (unlikely (!out->coverage.serialize_serialize (c->serializer, glyphs)))
+ return_trace (false);
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
+ const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
+
+ auto it =
+ + hb_zip (this+coverage, substitute)
+ | hb_filter (glyphset, hb_first)
+ | hb_filter (glyphset, hb_second)
+ | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const HBGlyphID16 &> p) -> hb_codepoint_pair_t
+ { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
+ ;
+
+ return_trace (bool (it) && serialize (c, it, backtrack.iter (), lookahead.iter ()));
+ }
+};
+
+}
+}
+}
+
+#endif /* HB_OT_LAYOUT_GSUB_REVERSECHAINSINGLESUBSTFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/Sequence.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/Sequence.hh
new file mode 100644
index 0000000000..ae3292f329
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/Sequence.hh
@@ -0,0 +1,165 @@
+#ifndef OT_LAYOUT_GSUB_SEQUENCE_HH
+#define OT_LAYOUT_GSUB_SEQUENCE_HH
+
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct Sequence
+{
+ protected:
+ Array16Of<typename Types::HBGlyphID>
+ substitute; /* String of GlyphIDs to substitute */
+ public:
+ DEFINE_SIZE_ARRAY (2, substitute);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (substitute.sanitize (c));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return hb_all (substitute, glyphs); }
+
+ void closure (hb_closure_context_t *c) const
+ { c->output->add_array (substitute.arrayZ, substitute.len); }
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ { c->output->add_array (substitute.arrayZ, substitute.len); }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ unsigned int count = substitute.len;
+
+ /* Special-case to make it in-place and not consider this
+ * as a "multiplied" substitution. */
+ if (unlikely (count == 1))
+ {
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "replacing glyph at %u (multiple substitution)",
+ c->buffer->idx);
+ }
+
+ c->replace_glyph (substitute.arrayZ[0]);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "replaced glyph at %u (multiple subtitution)",
+ c->buffer->idx - 1u);
+ }
+
+ return_trace (true);
+ }
+ /* Spec disallows this, but Uniscribe allows it.
+ * https://github.com/harfbuzz/harfbuzz/issues/253 */
+ else if (unlikely (count == 0))
+ {
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "deleting glyph at %u (multiple substitution)",
+ c->buffer->idx);
+ }
+
+ c->buffer->delete_glyph ();
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "deleted glyph at %u (multiple substitution)",
+ c->buffer->idx);
+ }
+
+ return_trace (true);
+ }
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "multiplying glyph at %u",
+ c->buffer->idx);
+ }
+
+ unsigned int klass = _hb_glyph_info_is_ligature (&c->buffer->cur()) ?
+ HB_OT_LAYOUT_GLYPH_PROPS_BASE_GLYPH : 0;
+ unsigned lig_id = _hb_glyph_info_get_lig_id (&c->buffer->cur());
+
+ for (unsigned int i = 0; i < count; i++)
+ {
+ /* If is attached to a ligature, don't disturb that.
+ * https://github.com/harfbuzz/harfbuzz/issues/3069 */
+ if (!lig_id)
+ _hb_glyph_info_set_lig_props_for_component (&c->buffer->cur(), i);
+ c->output_glyph_for_component (substitute.arrayZ[i], klass);
+ }
+ c->buffer->skip_glyph ();
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+
+ char buf[HB_MAX_CONTEXT_LENGTH * 16] = {0};
+ char *p = buf;
+
+ for (unsigned i = c->buffer->idx - count; i < c->buffer->idx; i++)
+ {
+ if (buf < p)
+ *p++ = ',';
+ snprintf (p, sizeof(buf) - (p - buf), "%u", i);
+ p += strlen(p);
+ }
+
+ c->buffer->message (c->font,
+ "multiplied glyphs at %s",
+ buf);
+ }
+
+ return_trace (true);
+ }
+
+ template <typename Iterator,
+ hb_requires (hb_is_source_of (Iterator, hb_codepoint_t))>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator subst)
+ {
+ TRACE_SERIALIZE (this);
+ return_trace (substitute.serialize (c, subst));
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ if (!intersects (&glyphset)) return_trace (false);
+
+ auto it =
+ + hb_iter (substitute)
+ | hb_map (glyph_map)
+ ;
+
+ auto *out = c->serializer->start_embed (*this);
+ return_trace (out->serialize (c->serializer, it));
+ }
+};
+
+
+}
+}
+}
+
+
+#endif /* OT_LAYOUT_GSUB_SEQUENCE_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh
new file mode 100644
index 0000000000..181c9e52e5
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubst.hh
@@ -0,0 +1,103 @@
+#ifndef OT_LAYOUT_GSUB_SINGLESUBST_HH
+#define OT_LAYOUT_GSUB_SINGLESUBST_HH
+
+#include "Common.hh"
+#include "SingleSubstFormat1.hh"
+#include "SingleSubstFormat2.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct SingleSubst
+{
+ protected:
+ union {
+ HBUINT16 format; /* Format identifier */
+ SingleSubstFormat1_3<SmallTypes> format1;
+ SingleSubstFormat2_4<SmallTypes> format2;
+#ifndef HB_NO_BEYOND_64K
+ SingleSubstFormat1_3<MediumTypes> format3;
+ SingleSubstFormat2_4<MediumTypes> format4;
+#endif
+ } u;
+
+ public:
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ {
+ if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
+ TRACE_DISPATCH (this, u.format);
+ switch (u.format) {
+ case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
+ case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
+#ifndef HB_NO_BEYOND_64K
+ case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
+ case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
+#endif
+ default:return_trace (c->default_return_value ());
+ }
+ }
+
+ template<typename Iterator,
+ hb_requires (hb_is_sorted_source_of (Iterator,
+ const hb_codepoint_pair_t))>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator glyphs)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (u.format))) return_trace (false);
+ unsigned format = 2;
+ unsigned delta = 0;
+ if (glyphs)
+ {
+ format = 1;
+ hb_codepoint_t mask = 0xFFFFu;
+
+#ifndef HB_NO_BEYOND_64K
+ if (+ glyphs
+ | hb_map_retains_sorting (hb_second)
+ | hb_filter ([] (hb_codepoint_t gid) { return gid > 0xFFFFu; }))
+ {
+ format += 2;
+ mask = 0xFFFFFFu;
+ }
+#endif
+
+ auto get_delta = [=] (hb_codepoint_pair_t _)
+ { return (unsigned) (_.second - _.first) & mask; };
+ delta = get_delta (*glyphs);
+ if (!hb_all (++(+glyphs), delta, get_delta)) format += 1;
+ }
+
+ u.format = format;
+ switch (u.format) {
+ case 1: return_trace (u.format1.serialize (c,
+ + glyphs
+ | hb_map_retains_sorting (hb_first),
+ delta));
+ case 2: return_trace (u.format2.serialize (c, glyphs));
+#ifndef HB_NO_BEYOND_64K
+ case 3: return_trace (u.format3.serialize (c,
+ + glyphs
+ | hb_map_retains_sorting (hb_first),
+ delta));
+ case 4: return_trace (u.format4.serialize (c, glyphs));
+#endif
+ default:return_trace (false);
+ }
+ }
+};
+
+template<typename Iterator>
+static void
+SingleSubst_serialize (hb_serialize_context_t *c,
+ Iterator it)
+{ c->start_embed<SingleSubst> ()->serialize (c, it); }
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_SINGLESUBST_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh
new file mode 100644
index 0000000000..850be86c04
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat1.hh
@@ -0,0 +1,204 @@
+#ifndef OT_LAYOUT_GSUB_SINGLESUBSTFORMAT1_HH
+#define OT_LAYOUT_GSUB_SINGLESUBSTFORMAT1_HH
+
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct SingleSubstFormat1_3
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 1 */
+ typename Types::template OffsetTo<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of Substitution table */
+ typename Types::HBUINT
+ deltaGlyphID; /* Add to original GlyphID to get
+ * substitute GlyphID, modulo 0x10000 */
+
+ public:
+ DEFINE_SIZE_STATIC (2 + 2 * Types::size);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ coverage.sanitize (c, this) &&
+ /* The coverage table may use a range to represent a set
+ * of glyphs, which means a small number of bytes can
+ * generate a large glyph set. Manually modify the
+ * sanitizer max ops to take this into account.
+ *
+ * Note: This check *must* be right after coverage sanitize. */
+ c->check_ops ((this + coverage).get_population () >> 1));
+ }
+
+ hb_codepoint_t get_mask () const
+ { return (1 << (8 * Types::size)) - 1; }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return (this+coverage).intersects (glyphs); }
+
+ bool may_have_non_1to1 () const
+ { return false; }
+
+ void closure (hb_closure_context_t *c) const
+ {
+ hb_codepoint_t d = deltaGlyphID;
+ hb_codepoint_t mask = get_mask ();
+
+ /* Help fuzzer avoid this function as much. */
+ unsigned pop = (this+coverage).get_population ();
+ if (pop >= mask)
+ return;
+
+ hb_set_t intersection;
+ (this+coverage).intersect_set (c->parent_active_glyphs (), intersection);
+
+ /* In degenerate fuzzer-found fonts, but not real fonts,
+ * this table can keep adding new glyphs in each round of closure.
+ * Refuse to close-over, if it maps glyph range to overlapping range. */
+ hb_codepoint_t min_before = intersection.get_min ();
+ hb_codepoint_t max_before = intersection.get_max ();
+ hb_codepoint_t min_after = (min_before + d) & mask;
+ hb_codepoint_t max_after = (max_before + d) & mask;
+ if (intersection.get_population () == max_before - min_before + 1 &&
+ ((min_before <= min_after && min_after <= max_before) ||
+ (min_before <= max_after && max_after <= max_before)))
+ return;
+
+ + hb_iter (intersection)
+ | hb_map ([d, mask] (hb_codepoint_t g) { return (g + d) & mask; })
+ | hb_sink (c->output)
+ ;
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+ hb_codepoint_t d = deltaGlyphID;
+ hb_codepoint_t mask = get_mask ();
+
+ + hb_iter (this+coverage)
+ | hb_map ([d, mask] (hb_codepoint_t g) { return (g + d) & mask; })
+ | hb_sink (c->output)
+ ;
+ }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool would_apply (hb_would_apply_context_t *c) const
+ { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
+
+ unsigned
+ get_glyph_alternates (hb_codepoint_t glyph_id,
+ unsigned start_offset,
+ unsigned *alternate_count /* IN/OUT. May be NULL. */,
+ hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */) const
+ {
+ unsigned int index = (this+coverage).get_coverage (glyph_id);
+ if (likely (index == NOT_COVERED))
+ {
+ if (alternate_count)
+ *alternate_count = 0;
+ return 0;
+ }
+
+ if (alternate_count && *alternate_count)
+ {
+ hb_codepoint_t d = deltaGlyphID;
+ hb_codepoint_t mask = get_mask ();
+
+ glyph_id = (glyph_id + d) & mask;
+
+ *alternate_glyphs = glyph_id;
+ *alternate_count = 1;
+ }
+
+ return 1;
+ }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
+ unsigned int index = (this+coverage).get_coverage (glyph_id);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ hb_codepoint_t d = deltaGlyphID;
+ hb_codepoint_t mask = get_mask ();
+
+ glyph_id = (glyph_id + d) & mask;
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "replacing glyph at %u (single substitution)",
+ c->buffer->idx);
+ }
+
+ c->replace_glyph (glyph_id);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "replaced glyph at %u (single substitution)",
+ c->buffer->idx - 1u);
+ }
+
+ return_trace (true);
+ }
+
+ template<typename Iterator,
+ hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator glyphs,
+ unsigned delta)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+ if (unlikely (!coverage.serialize_serialize (c, glyphs))) return_trace (false);
+ c->check_assign (deltaGlyphID, delta, HB_SERIALIZE_ERROR_INT_OVERFLOW);
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ hb_codepoint_t d = deltaGlyphID;
+ hb_codepoint_t mask = get_mask ();
+
+ hb_set_t intersection;
+ (this+coverage).intersect_set (glyphset, intersection);
+
+ auto it =
+ + hb_iter (intersection)
+ | hb_map_retains_sorting ([d, mask] (hb_codepoint_t g) {
+ return hb_codepoint_pair_t (g,
+ (g + d) & mask); })
+ | hb_filter (glyphset, hb_second)
+ | hb_map_retains_sorting ([&] (hb_codepoint_pair_t p) -> hb_codepoint_pair_t
+ { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
+ ;
+
+ bool ret = bool (it);
+ SingleSubst_serialize (c->serializer, it);
+ return_trace (ret);
+ }
+};
+
+}
+}
+}
+
+
+#endif /* OT_LAYOUT_GSUB_SINGLESUBSTFORMAT1_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh
new file mode 100644
index 0000000000..9c651abe71
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/SingleSubstFormat2.hh
@@ -0,0 +1,176 @@
+#ifndef OT_LAYOUT_GSUB_SINGLESUBSTFORMAT2_HH
+#define OT_LAYOUT_GSUB_SINGLESUBSTFORMAT2_HH
+
+#include "Common.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+template <typename Types>
+struct SingleSubstFormat2_4
+{
+ protected:
+ HBUINT16 format; /* Format identifier--format = 2 */
+ typename Types::template OffsetTo<Coverage>
+ coverage; /* Offset to Coverage table--from
+ * beginning of Substitution table */
+ Array16Of<typename Types::HBGlyphID>
+ substitute; /* Array of substitute
+ * GlyphIDs--ordered by Coverage Index */
+
+ public:
+ DEFINE_SIZE_ARRAY (4 + Types::size, substitute);
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (coverage.sanitize (c, this) && substitute.sanitize (c));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ { return (this+coverage).intersects (glyphs); }
+
+ bool may_have_non_1to1 () const
+ { return false; }
+
+ void closure (hb_closure_context_t *c) const
+ {
+ auto &cov = this+coverage;
+ auto &glyph_set = c->parent_active_glyphs ();
+
+ if (substitute.len > glyph_set.get_population () * 4)
+ {
+ for (auto g : glyph_set)
+ {
+ unsigned i = cov.get_coverage (g);
+ if (i == NOT_COVERED || i >= substitute.len)
+ continue;
+ c->output->add (substitute.arrayZ[i]);
+ }
+
+ return;
+ }
+
+ + hb_zip (cov, substitute)
+ | hb_filter (glyph_set, hb_first)
+ | hb_map (hb_second)
+ | hb_sink (c->output)
+ ;
+ }
+
+ void closure_lookups (hb_closure_lookups_context_t *c) const {}
+
+ void collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
+ + hb_zip (this+coverage, substitute)
+ | hb_map (hb_second)
+ | hb_sink (c->output)
+ ;
+ }
+
+ const Coverage &get_coverage () const { return this+coverage; }
+
+ bool would_apply (hb_would_apply_context_t *c) const
+ { return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
+
+ unsigned
+ get_glyph_alternates (hb_codepoint_t glyph_id,
+ unsigned start_offset,
+ unsigned *alternate_count /* IN/OUT. May be NULL. */,
+ hb_codepoint_t *alternate_glyphs /* OUT. May be NULL. */) const
+ {
+ unsigned int index = (this+coverage).get_coverage (glyph_id);
+ if (likely (index == NOT_COVERED))
+ {
+ if (alternate_count)
+ *alternate_count = 0;
+ return 0;
+ }
+
+ if (alternate_count && *alternate_count)
+ {
+ glyph_id = substitute[index];
+
+ *alternate_glyphs = glyph_id;
+ *alternate_count = 1;
+ }
+
+ return 1;
+ }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
+ if (likely (index == NOT_COVERED)) return_trace (false);
+
+ if (unlikely (index >= substitute.len)) return_trace (false);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->sync_so_far ();
+ c->buffer->message (c->font,
+ "replacing glyph at %u (single substitution)",
+ c->buffer->idx);
+ }
+
+ c->replace_glyph (substitute[index]);
+
+ if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
+ {
+ c->buffer->message (c->font,
+ "replaced glyph at %u (single substitution)",
+ c->buffer->idx - 1u);
+ }
+
+ return_trace (true);
+ }
+
+ template<typename Iterator,
+ hb_requires (hb_is_sorted_source_of (Iterator,
+ hb_codepoint_pair_t))>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator it)
+ {
+ TRACE_SERIALIZE (this);
+ auto substitutes =
+ + it
+ | hb_map (hb_second)
+ ;
+ auto glyphs =
+ + it
+ | hb_map_retains_sorting (hb_first)
+ ;
+ if (unlikely (!c->extend_min (this))) return_trace (false);
+ if (unlikely (!substitute.serialize (c, substitutes))) return_trace (false);
+ if (unlikely (!coverage.serialize_serialize (c, glyphs))) return_trace (false);
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+ const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
+ const hb_map_t &glyph_map = *c->plan->glyph_map;
+
+ auto it =
+ + hb_zip (this+coverage, substitute)
+ | hb_filter (glyphset, hb_first)
+ | hb_filter (glyphset, hb_second)
+ | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const typename Types::HBGlyphID &> p) -> hb_codepoint_pair_t
+ { return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
+ ;
+
+ bool ret = bool (it);
+ SingleSubst_serialize (c->serializer, it);
+ return_trace (ret);
+ }
+};
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_SINGLESUBSTFORMAT2_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/SubstLookup.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/SubstLookup.hh
new file mode 100644
index 0000000000..d49dcc0e0f
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/SubstLookup.hh
@@ -0,0 +1,220 @@
+#ifndef OT_LAYOUT_GSUB_SUBSTLOOKUP_HH
+#define OT_LAYOUT_GSUB_SUBSTLOOKUP_HH
+
+#include "Common.hh"
+#include "SubstLookupSubTable.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct SubstLookup : Lookup
+{
+ using SubTable = SubstLookupSubTable;
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ { return Lookup::sanitize<SubTable> (c); }
+
+ const SubTable& get_subtable (unsigned int i) const
+ { return Lookup::get_subtable<SubTable> (i); }
+
+ static inline bool lookup_type_is_reverse (unsigned int lookup_type)
+ { return lookup_type == SubTable::ReverseChainSingle; }
+
+ bool is_reverse () const
+ {
+ unsigned int type = get_type ();
+ if (unlikely (type == SubTable::Extension))
+ return get_subtable (0).u.extension.is_reverse ();
+ return lookup_type_is_reverse (type);
+ }
+
+ bool may_have_non_1to1 () const
+ {
+ hb_have_non_1to1_context_t c;
+ return dispatch (&c);
+ }
+
+ bool apply (hb_ot_apply_context_t *c) const
+ {
+ TRACE_APPLY (this);
+ return_trace (dispatch (c));
+ }
+
+ bool intersects (const hb_set_t *glyphs) const
+ {
+ hb_intersects_context_t c (glyphs);
+ return dispatch (&c);
+ }
+
+ hb_closure_context_t::return_t closure (hb_closure_context_t *c, unsigned int this_index) const
+ {
+ if (!c->should_visit_lookup (this_index))
+ return hb_closure_context_t::default_return_value ();
+
+ c->set_recurse_func (dispatch_closure_recurse_func);
+
+ hb_closure_context_t::return_t ret = dispatch (c);
+
+ c->flush ();
+
+ return ret;
+ }
+
+ hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
+ {
+ if (c->is_lookup_visited (this_index))
+ return hb_closure_lookups_context_t::default_return_value ();
+
+ c->set_lookup_visited (this_index);
+ if (!intersects (c->glyphs))
+ {
+ c->set_lookup_inactive (this_index);
+ return hb_closure_lookups_context_t::default_return_value ();
+ }
+
+ hb_closure_lookups_context_t::return_t ret = dispatch (c);
+ return ret;
+ }
+
+ hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
+ {
+ c->set_recurse_func (dispatch_recurse_func<hb_collect_glyphs_context_t>);
+ return dispatch (c);
+ }
+
+ template <typename set_t>
+ void collect_coverage (set_t *glyphs) const
+ {
+ hb_collect_coverage_context_t<set_t> c (glyphs);
+ dispatch (&c);
+ }
+
+ bool would_apply (hb_would_apply_context_t *c,
+ const hb_ot_layout_lookup_accelerator_t *accel) const
+ {
+ if (unlikely (!c->len)) return false;
+ if (!accel->may_have (c->glyphs[0])) return false;
+ return dispatch (c);
+ }
+
+ template<typename Glyphs, typename Substitutes,
+ hb_requires (hb_is_sorted_source_of (Glyphs,
+ const hb_codepoint_t) &&
+ hb_is_source_of (Substitutes,
+ const hb_codepoint_t))>
+ bool serialize_single (hb_serialize_context_t *c,
+ uint32_t lookup_props,
+ Glyphs glyphs,
+ Substitutes substitutes)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!Lookup::serialize (c, SubTable::Single, lookup_props, 1))) return_trace (false);
+ if (c->push<SubTable> ()->u.single.serialize (c, hb_zip (glyphs, substitutes)))
+ {
+ c->add_link (get_subtables<SubTable> ()[0], c->pop_pack ());
+ return_trace (true);
+ }
+ c->pop_discard ();
+ return_trace (false);
+ }
+
+ template<typename Iterator,
+ hb_requires (hb_is_sorted_iterator (Iterator))>
+ bool serialize (hb_serialize_context_t *c,
+ uint32_t lookup_props,
+ Iterator it)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!Lookup::serialize (c, SubTable::Multiple, lookup_props, 1))) return_trace (false);
+ if (c->push<SubTable> ()->u.multiple.
+ serialize (c, it))
+ {
+ c->add_link (get_subtables<SubTable> ()[0], c->pop_pack ());
+ return_trace (true);
+ }
+ c->pop_discard ();
+ return_trace (false);
+ }
+
+ bool serialize_alternate (hb_serialize_context_t *c,
+ uint32_t lookup_props,
+ hb_sorted_array_t<const HBGlyphID16> glyphs,
+ hb_array_t<const unsigned int> alternate_len_list,
+ hb_array_t<const HBGlyphID16> alternate_glyphs_list)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!Lookup::serialize (c, SubTable::Alternate, lookup_props, 1))) return_trace (false);
+
+ if (c->push<SubTable> ()->u.alternate.
+ serialize (c,
+ glyphs,
+ alternate_len_list,
+ alternate_glyphs_list))
+ {
+ c->add_link (get_subtables<SubTable> ()[0], c->pop_pack ());
+ return_trace (true);
+ }
+ c->pop_discard ();
+ return_trace (false);
+ }
+
+ bool serialize_ligature (hb_serialize_context_t *c,
+ uint32_t lookup_props,
+ hb_sorted_array_t<const HBGlyphID16> first_glyphs,
+ hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
+ hb_array_t<const HBGlyphID16> ligatures_list,
+ hb_array_t<const unsigned int> component_count_list,
+ hb_array_t<const HBGlyphID16> component_list /* Starting from second for each ligature */)
+ {
+ TRACE_SERIALIZE (this);
+ if (unlikely (!Lookup::serialize (c, SubTable::Ligature, lookup_props, 1))) return_trace (false);
+ if (c->push<SubTable> ()->u.ligature.
+ serialize (c,
+ first_glyphs,
+ ligature_per_first_glyph_count_list,
+ ligatures_list,
+ component_count_list,
+ component_list))
+ {
+ c->add_link (get_subtables<SubTable> ()[0], c->pop_pack ());
+ return_trace (true);
+ }
+ c->pop_discard ();
+ return_trace (false);
+ }
+
+ template <typename context_t>
+ static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
+
+ static inline typename hb_closure_context_t::return_t closure_glyphs_recurse_func (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indices, unsigned seq_index, unsigned end_index);
+
+ static inline hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indices, unsigned seq_index, unsigned end_index)
+ {
+ if (!c->should_visit_lookup (lookup_index))
+ return hb_empty_t ();
+
+ hb_closure_context_t::return_t ret = closure_glyphs_recurse_func (c, lookup_index, covered_seq_indices, seq_index, end_index);
+
+ /* While in theory we should flush here, it will cause timeouts because a recursive
+ * lookup can keep growing the glyph set. Skip, and outer loop will retry up to
+ * HB_CLOSURE_MAX_STAGES time, which should be enough for every realistic font. */
+ //c->flush ();
+
+ return ret;
+ }
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
+ { return Lookup::dispatch<SubTable> (c, std::forward<Ts> (ds)...); }
+
+ bool subset (hb_subset_context_t *c) const
+ { return Lookup::subset<SubTable> (c); }
+};
+
+
+}
+}
+}
+
+#endif /* OT_LAYOUT_GSUB_SUBSTLOOKUP_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/GSUB/SubstLookupSubTable.hh b/gfx/harfbuzz/src/OT/Layout/GSUB/SubstLookupSubTable.hh
new file mode 100644
index 0000000000..a525fba039
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/GSUB/SubstLookupSubTable.hh
@@ -0,0 +1,77 @@
+#ifndef OT_LAYOUT_GSUB_SUBSTLOOKUPSUBTABLE_HH
+#define OT_LAYOUT_GSUB_SUBSTLOOKUPSUBTABLE_HH
+
+#include "Common.hh"
+#include "SingleSubst.hh"
+#include "MultipleSubst.hh"
+#include "AlternateSubst.hh"
+#include "LigatureSubst.hh"
+#include "ContextSubst.hh"
+#include "ChainContextSubst.hh"
+#include "ExtensionSubst.hh"
+#include "ReverseChainSingleSubst.hh"
+
+namespace OT {
+namespace Layout {
+namespace GSUB_impl {
+
+struct SubstLookupSubTable
+{
+ friend struct ::OT::Lookup;
+ friend struct SubstLookup;
+
+ protected:
+ union {
+ SingleSubst single;
+ MultipleSubst multiple;
+ AlternateSubst alternate;
+ LigatureSubst ligature;
+ ContextSubst context;
+ ChainContextSubst chainContext;
+ ExtensionSubst extension;
+ ReverseChainSingleSubst reverseChainContextSingle;
+ } u;
+ public:
+ DEFINE_SIZE_MIN (0);
+
+ enum Type {
+ Single = 1,
+ Multiple = 2,
+ Alternate = 3,
+ Ligature = 4,
+ Context = 5,
+ ChainContext = 6,
+ Extension = 7,
+ ReverseChainSingle = 8
+ };
+
+ template <typename context_t, typename ...Ts>
+ typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
+ {
+ TRACE_DISPATCH (this, lookup_type);
+ switch (lookup_type) {
+ case Single: return_trace (u.single.dispatch (c, std::forward<Ts> (ds)...));
+ case Multiple: return_trace (u.multiple.dispatch (c, std::forward<Ts> (ds)...));
+ case Alternate: return_trace (u.alternate.dispatch (c, std::forward<Ts> (ds)...));
+ case Ligature: return_trace (u.ligature.dispatch (c, std::forward<Ts> (ds)...));
+ case Context: return_trace (u.context.dispatch (c, std::forward<Ts> (ds)...));
+ case ChainContext: return_trace (u.chainContext.dispatch (c, std::forward<Ts> (ds)...));
+ case Extension: return_trace (u.extension.dispatch (c, std::forward<Ts> (ds)...));
+ case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c, std::forward<Ts> (ds)...));
+ default: return_trace (c->default_return_value ());
+ }
+ }
+
+ bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
+ {
+ hb_intersects_context_t c (glyphs);
+ return dispatch (&c, lookup_type);
+ }
+};
+
+
+}
+}
+}
+
+#endif /* HB_OT_LAYOUT_GSUB_SUBSTLOOKUPSUBTABLE_HH */
diff --git a/gfx/harfbuzz/src/OT/Layout/types.hh b/gfx/harfbuzz/src/OT/Layout/types.hh
new file mode 100644
index 0000000000..6a43403e94
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/Layout/types.hh
@@ -0,0 +1,66 @@
+/*
+ * Copyright © 2007,2008,2009 Red Hat, Inc.
+ * Copyright © 2010,2012 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Red Hat Author(s): Behdad Esfahbod
+ * Google Author(s): Behdad Esfahbod, Garret Rieger
+ */
+
+#ifndef OT_LAYOUT_TYPES_HH
+#define OT_LAYOUT_TYPES_HH
+
+namespace OT {
+namespace Layout {
+
+struct SmallTypes {
+ static constexpr unsigned size = 2;
+ using large_int = uint32_t;
+ using HBUINT = HBUINT16;
+ using HBGlyphID = HBGlyphID16;
+ using Offset = Offset16;
+ template <typename Type, bool has_null=true>
+ using OffsetTo = OT::Offset16To<Type, has_null>;
+ template <typename Type>
+ using ArrayOf = OT::Array16Of<Type>;
+ template <typename Type>
+ using SortedArrayOf = OT::SortedArray16Of<Type>;
+};
+
+struct MediumTypes {
+ static constexpr unsigned size = 3;
+ using large_int = uint64_t;
+ using HBUINT = HBUINT24;
+ using HBGlyphID = HBGlyphID24;
+ using Offset = Offset24;
+ template <typename Type, bool has_null=true>
+ using OffsetTo = OT::Offset24To<Type, has_null>;
+ template <typename Type>
+ using ArrayOf = OT::Array24Of<Type>;
+ template <typename Type>
+ using SortedArrayOf = OT::SortedArray24Of<Type>;
+};
+
+}
+}
+
+#endif /* OT_LAYOUT_TYPES_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/CompositeGlyph.hh b/gfx/harfbuzz/src/OT/glyf/CompositeGlyph.hh
new file mode 100644
index 0000000000..d81fadf7c8
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/CompositeGlyph.hh
@@ -0,0 +1,423 @@
+#ifndef OT_GLYF_COMPOSITEGLYPH_HH
+#define OT_GLYF_COMPOSITEGLYPH_HH
+
+
+#include "../../hb-open-type.hh"
+#include "composite-iter.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+struct CompositeGlyphRecord
+{
+ protected:
+ enum composite_glyph_flag_t
+ {
+ ARG_1_AND_2_ARE_WORDS = 0x0001,
+ ARGS_ARE_XY_VALUES = 0x0002,
+ ROUND_XY_TO_GRID = 0x0004,
+ WE_HAVE_A_SCALE = 0x0008,
+ MORE_COMPONENTS = 0x0020,
+ WE_HAVE_AN_X_AND_Y_SCALE = 0x0040,
+ WE_HAVE_A_TWO_BY_TWO = 0x0080,
+ WE_HAVE_INSTRUCTIONS = 0x0100,
+ USE_MY_METRICS = 0x0200,
+ OVERLAP_COMPOUND = 0x0400,
+ SCALED_COMPONENT_OFFSET = 0x0800,
+ UNSCALED_COMPONENT_OFFSET = 0x1000,
+#ifndef HB_NO_BEYOND_64K
+ GID_IS_24BIT = 0x2000
+#endif
+ };
+
+ public:
+ unsigned int get_size () const
+ {
+ unsigned int size = min_size;
+ /* glyphIndex is 24bit instead of 16bit */
+#ifndef HB_NO_BEYOND_64K
+ if (flags & GID_IS_24BIT) size += HBGlyphID24::static_size - HBGlyphID16::static_size;
+#endif
+ /* arg1 and 2 are int16 */
+ if (flags & ARG_1_AND_2_ARE_WORDS) size += 4;
+ /* arg1 and 2 are int8 */
+ else size += 2;
+
+ /* One x 16 bit (scale) */
+ if (flags & WE_HAVE_A_SCALE) size += 2;
+ /* Two x 16 bit (xscale, yscale) */
+ else if (flags & WE_HAVE_AN_X_AND_Y_SCALE) size += 4;
+ /* Four x 16 bit (xscale, scale01, scale10, yscale) */
+ else if (flags & WE_HAVE_A_TWO_BY_TWO) size += 8;
+
+ return size;
+ }
+
+ void drop_instructions_flag () { flags = (uint16_t) flags & ~WE_HAVE_INSTRUCTIONS; }
+ void set_overlaps_flag ()
+ {
+ flags = (uint16_t) flags | OVERLAP_COMPOUND;
+ }
+
+ bool has_instructions () const { return flags & WE_HAVE_INSTRUCTIONS; }
+
+ bool has_more () const { return flags & MORE_COMPONENTS; }
+ bool is_use_my_metrics () const { return flags & USE_MY_METRICS; }
+ bool is_anchored () const { return !(flags & ARGS_ARE_XY_VALUES); }
+ void get_anchor_points (unsigned int &point1, unsigned int &point2) const
+ {
+ const auto *p = &StructAfter<const HBUINT8> (flags);
+#ifndef HB_NO_BEYOND_64K
+ if (flags & GID_IS_24BIT)
+ p += HBGlyphID24::static_size;
+ else
+#endif
+ p += HBGlyphID16::static_size;
+ if (flags & ARG_1_AND_2_ARE_WORDS)
+ {
+ point1 = ((const HBUINT16 *) p)[0];
+ point2 = ((const HBUINT16 *) p)[1];
+ }
+ else
+ {
+ point1 = p[0];
+ point2 = p[1];
+ }
+ }
+
+ static void transform (const float (&matrix)[4],
+ hb_array_t<contour_point_t> points)
+ {
+ auto arrayZ = points.arrayZ;
+ unsigned count = points.length;
+
+ if (matrix[0] != 1.f || matrix[1] != 0.f ||
+ matrix[2] != 0.f || matrix[3] != 1.f)
+ for (unsigned i = 0; i < count; i++)
+ arrayZ[i].transform (matrix);
+ }
+
+ static void translate (const contour_point_t &trans,
+ hb_array_t<contour_point_t> points)
+ {
+ auto arrayZ = points.arrayZ;
+ unsigned count = points.length;
+
+ if (trans.x != 0.f || trans.y != 0.f)
+ for (unsigned i = 0; i < count; i++)
+ arrayZ[i].translate (trans);
+ }
+
+ void transform_points (hb_array_t<contour_point_t> points,
+ const float (&matrix)[4],
+ const contour_point_t &trans) const
+ {
+ if (scaled_offsets ())
+ {
+ translate (trans, points);
+ transform (matrix, points);
+ }
+ else
+ {
+ transform (matrix, points);
+ translate (trans, points);
+ }
+ }
+
+ bool get_points (contour_point_vector_t &points) const
+ {
+ float matrix[4];
+ contour_point_t trans;
+ get_transformation (matrix, trans);
+ points.alloc (points.length + 4); // For phantom points
+ if (unlikely (!points.resize (points.length + 1))) return false;
+ points.arrayZ[points.length - 1] = trans;
+ return true;
+ }
+
+ unsigned compile_with_point (const contour_point_t &point,
+ char *out) const
+ {
+ const HBINT8 *p = &StructAfter<const HBINT8> (flags);
+#ifndef HB_NO_BEYOND_64K
+ if (flags & GID_IS_24BIT)
+ p += HBGlyphID24::static_size;
+ else
+#endif
+ p += HBGlyphID16::static_size;
+
+ unsigned len = get_size ();
+ unsigned len_before_val = (const char *)p - (const char *)this;
+ if (flags & ARG_1_AND_2_ARE_WORDS)
+ {
+ // no overflow, copy value
+ hb_memcpy (out, this, len);
+
+ HBINT16 *o = reinterpret_cast<HBINT16 *> (out + len_before_val);
+ o[0] = roundf (point.x);
+ o[1] = roundf (point.y);
+ }
+ else
+ {
+ int new_x = roundf (point.x);
+ int new_y = roundf (point.y);
+ if (new_x <= 127 && new_x >= -128 &&
+ new_y <= 127 && new_y >= -128)
+ {
+ hb_memcpy (out, this, len);
+ HBINT8 *o = reinterpret_cast<HBINT8 *> (out + len_before_val);
+ o[0] = new_x;
+ o[1] = new_y;
+ }
+ else
+ {
+ // new point value has an int8 overflow
+ hb_memcpy (out, this, len_before_val);
+
+ //update flags
+ CompositeGlyphRecord *o = reinterpret_cast<CompositeGlyphRecord *> (out);
+ o->flags = flags | ARG_1_AND_2_ARE_WORDS;
+ out += len_before_val;
+
+ HBINT16 new_value;
+ new_value = new_x;
+ hb_memcpy (out, &new_value, HBINT16::static_size);
+ out += HBINT16::static_size;
+
+ new_value = new_y;
+ hb_memcpy (out, &new_value, HBINT16::static_size);
+ out += HBINT16::static_size;
+
+ hb_memcpy (out, p+2, len - len_before_val - 2);
+ len += 2;
+ }
+ }
+ return len;
+ }
+
+ protected:
+ bool scaled_offsets () const
+ { return (flags & (SCALED_COMPONENT_OFFSET | UNSCALED_COMPONENT_OFFSET)) == SCALED_COMPONENT_OFFSET; }
+
+ public:
+ bool get_transformation (float (&matrix)[4], contour_point_t &trans) const
+ {
+ matrix[0] = matrix[3] = 1.f;
+ matrix[1] = matrix[2] = 0.f;
+
+ const auto *p = &StructAfter<const HBINT8> (flags);
+#ifndef HB_NO_BEYOND_64K
+ if (flags & GID_IS_24BIT)
+ p += HBGlyphID24::static_size;
+ else
+#endif
+ p += HBGlyphID16::static_size;
+ int tx, ty;
+ if (flags & ARG_1_AND_2_ARE_WORDS)
+ {
+ tx = *(const HBINT16 *) p;
+ p += HBINT16::static_size;
+ ty = *(const HBINT16 *) p;
+ p += HBINT16::static_size;
+ }
+ else
+ {
+ tx = *p++;
+ ty = *p++;
+ }
+ if (is_anchored ()) tx = ty = 0;
+
+ trans.init ((float) tx, (float) ty);
+
+ {
+ const F2DOT14 *points = (const F2DOT14 *) p;
+ if (flags & WE_HAVE_A_SCALE)
+ {
+ matrix[0] = matrix[3] = points[0].to_float ();
+ return true;
+ }
+ else if (flags & WE_HAVE_AN_X_AND_Y_SCALE)
+ {
+ matrix[0] = points[0].to_float ();
+ matrix[3] = points[1].to_float ();
+ return true;
+ }
+ else if (flags & WE_HAVE_A_TWO_BY_TWO)
+ {
+ matrix[0] = points[0].to_float ();
+ matrix[1] = points[1].to_float ();
+ matrix[2] = points[2].to_float ();
+ matrix[3] = points[3].to_float ();
+ return true;
+ }
+ }
+ return tx || ty;
+ }
+
+ hb_codepoint_t get_gid () const
+ {
+#ifndef HB_NO_BEYOND_64K
+ if (flags & GID_IS_24BIT)
+ return StructAfter<const HBGlyphID24> (flags);
+ else
+#endif
+ return StructAfter<const HBGlyphID16> (flags);
+ }
+ void set_gid (hb_codepoint_t gid)
+ {
+#ifndef HB_NO_BEYOND_64K
+ if (flags & GID_IS_24BIT)
+ StructAfter<HBGlyphID24> (flags) = gid;
+ else
+#endif
+ /* TODO assert? */
+ StructAfter<HBGlyphID16> (flags) = gid;
+ }
+
+#ifndef HB_NO_BEYOND_64K
+ void lower_gid_24_to_16 ()
+ {
+ hb_codepoint_t gid = get_gid ();
+ if (!(flags & GID_IS_24BIT) || gid > 0xFFFFu)
+ return;
+
+ /* Lower the flag and move the rest of the struct down. */
+
+ unsigned size = get_size ();
+ char *end = (char *) this + size;
+ char *p = &StructAfter<char> (flags);
+ p += HBGlyphID24::static_size;
+
+ flags = flags & ~GID_IS_24BIT;
+ set_gid (gid);
+
+ memmove (p - HBGlyphID24::static_size + HBGlyphID16::static_size, p, end - p);
+ }
+#endif
+
+ protected:
+ HBUINT16 flags;
+ HBUINT24 pad;
+ public:
+ DEFINE_SIZE_MIN (4);
+};
+
+using composite_iter_t = composite_iter_tmpl<CompositeGlyphRecord>;
+
+struct CompositeGlyph
+{
+ const GlyphHeader &header;
+ hb_bytes_t bytes;
+ CompositeGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
+ header (header_), bytes (bytes_) {}
+
+ composite_iter_t iter () const
+ { return composite_iter_t (bytes, &StructAfter<CompositeGlyphRecord, GlyphHeader> (header)); }
+
+ unsigned int instructions_length (hb_bytes_t bytes) const
+ {
+ unsigned int start = bytes.length;
+ unsigned int end = bytes.length;
+ const CompositeGlyphRecord *last = nullptr;
+ for (auto &item : iter ())
+ last = &item;
+ if (unlikely (!last)) return 0;
+
+ if (last->has_instructions ())
+ start = (char *) last - &bytes + last->get_size ();
+ if (unlikely (start > end)) return 0;
+ return end - start;
+ }
+
+ /* Trimming for composites not implemented.
+ * If removing hints it falls out of that. */
+ const hb_bytes_t trim_padding () const { return bytes; }
+
+ void drop_hints ()
+ {
+ for (const auto &_ : iter ())
+ const_cast<CompositeGlyphRecord &> (_).drop_instructions_flag ();
+ }
+
+ /* Chop instructions off the end */
+ void drop_hints_bytes (hb_bytes_t &dest_start) const
+ { dest_start = bytes.sub_array (0, bytes.length - instructions_length (bytes)); }
+
+ void set_overlaps_flag ()
+ {
+ CompositeGlyphRecord& glyph_chain = const_cast<CompositeGlyphRecord &> (
+ StructAfter<CompositeGlyphRecord, GlyphHeader> (header));
+ if (!bytes.check_range(&glyph_chain, CompositeGlyphRecord::min_size))
+ return;
+ glyph_chain.set_overlaps_flag ();
+ }
+
+ bool compile_bytes_with_deltas (const hb_bytes_t &source_bytes,
+ const contour_point_vector_t &points_with_deltas,
+ hb_bytes_t &dest_bytes /* OUT */)
+ {
+ if (source_bytes.length <= GlyphHeader::static_size ||
+ header.numberOfContours != -1)
+ {
+ dest_bytes = hb_bytes_t ();
+ return true;
+ }
+
+ unsigned source_len = source_bytes.length - GlyphHeader::static_size;
+
+ /* try to allocate more memories than source glyph bytes
+ * in case that there might be an overflow for int8 value
+ * and we would need to use int16 instead */
+ char *o = (char *) hb_calloc (source_len * 2, sizeof (char));
+ if (unlikely (!o)) return false;
+
+ const CompositeGlyphRecord *c = reinterpret_cast<const CompositeGlyphRecord *> (source_bytes.arrayZ + GlyphHeader::static_size);
+ auto it = composite_iter_t (hb_bytes_t ((const char *)c, source_len), c);
+
+ char *p = o;
+ unsigned i = 0, source_comp_len = 0;
+ for (const auto &component : it)
+ {
+ /* last 4 points in points_with_deltas are phantom points and should not be included */
+ if (i >= points_with_deltas.length - 4) {
+ free (o);
+ return false;
+ }
+
+ unsigned comp_len = component.get_size ();
+ if (component.is_anchored ())
+ {
+ hb_memcpy (p, &component, comp_len);
+ p += comp_len;
+ }
+ else
+ {
+ unsigned new_len = component.compile_with_point (points_with_deltas[i], p);
+ p += new_len;
+ }
+ i++;
+ source_comp_len += comp_len;
+ }
+
+ //copy instructions if any
+ if (source_len > source_comp_len)
+ {
+ unsigned instr_len = source_len - source_comp_len;
+ hb_memcpy (p, (const char *)c + source_comp_len, instr_len);
+ p += instr_len;
+ }
+
+ unsigned len = p - o;
+ dest_bytes = hb_bytes_t (o, len);
+ return true;
+ }
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_COMPOSITEGLYPH_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/Glyph.hh b/gfx/harfbuzz/src/OT/glyf/Glyph.hh
new file mode 100644
index 0000000000..2bd5fe8206
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/Glyph.hh
@@ -0,0 +1,577 @@
+#ifndef OT_GLYF_GLYPH_HH
+#define OT_GLYF_GLYPH_HH
+
+
+#include "../../hb-open-type.hh"
+
+#include "GlyphHeader.hh"
+#include "SimpleGlyph.hh"
+#include "CompositeGlyph.hh"
+#include "VarCompositeGlyph.hh"
+#include "coord-setter.hh"
+
+
+namespace OT {
+
+struct glyf_accelerator_t;
+
+namespace glyf_impl {
+
+
+enum phantom_point_index_t
+{
+ PHANTOM_LEFT = 0,
+ PHANTOM_RIGHT = 1,
+ PHANTOM_TOP = 2,
+ PHANTOM_BOTTOM = 3,
+ PHANTOM_COUNT = 4
+};
+
+struct Glyph
+{
+ enum glyph_type_t {
+ EMPTY,
+ SIMPLE,
+ COMPOSITE,
+#ifndef HB_NO_VAR_COMPOSITES
+ VAR_COMPOSITE,
+#endif
+ };
+
+ public:
+ composite_iter_t get_composite_iterator () const
+ {
+ if (type != COMPOSITE) return composite_iter_t ();
+ return CompositeGlyph (*header, bytes).iter ();
+ }
+ var_composite_iter_t get_var_composite_iterator () const
+ {
+#ifndef HB_NO_VAR_COMPOSITES
+ if (type != VAR_COMPOSITE) return var_composite_iter_t ();
+ return VarCompositeGlyph (*header, bytes).iter ();
+#else
+ return var_composite_iter_t ();
+#endif
+ }
+
+ const hb_bytes_t trim_padding () const
+ {
+ switch (type) {
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE: return VarCompositeGlyph (*header, bytes).trim_padding ();
+#endif
+ case COMPOSITE: return CompositeGlyph (*header, bytes).trim_padding ();
+ case SIMPLE: return SimpleGlyph (*header, bytes).trim_padding ();
+ case EMPTY: return bytes;
+ default: return bytes;
+ }
+ }
+
+ void drop_hints ()
+ {
+ switch (type) {
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE: return; // No hinting
+#endif
+ case COMPOSITE: CompositeGlyph (*header, bytes).drop_hints (); return;
+ case SIMPLE: SimpleGlyph (*header, bytes).drop_hints (); return;
+ case EMPTY: return;
+ }
+ }
+
+ void set_overlaps_flag ()
+ {
+ switch (type) {
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE: return; // No overlaps flag
+#endif
+ case COMPOSITE: CompositeGlyph (*header, bytes).set_overlaps_flag (); return;
+ case SIMPLE: SimpleGlyph (*header, bytes).set_overlaps_flag (); return;
+ case EMPTY: return;
+ }
+ }
+
+ void drop_hints_bytes (hb_bytes_t &dest_start, hb_bytes_t &dest_end) const
+ {
+ switch (type) {
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE: return; // No hinting
+#endif
+ case COMPOSITE: CompositeGlyph (*header, bytes).drop_hints_bytes (dest_start); return;
+ case SIMPLE: SimpleGlyph (*header, bytes).drop_hints_bytes (dest_start, dest_end); return;
+ case EMPTY: return;
+ }
+ }
+
+ void update_mtx (const hb_subset_plan_t *plan,
+ int xMin, int xMax,
+ int yMin, int yMax,
+ const contour_point_vector_t &all_points) const
+ {
+ hb_codepoint_t new_gid = 0;
+ if (!plan->new_gid_for_old_gid (gid, &new_gid))
+ return;
+
+ if (type != EMPTY)
+ {
+ plan->bounds_width_map.set (new_gid, xMax - xMin);
+ plan->bounds_height_map.set (new_gid, yMax - yMin);
+ }
+
+ unsigned len = all_points.length;
+ float leftSideX = all_points[len - 4].x;
+ float rightSideX = all_points[len - 3].x;
+ float topSideY = all_points[len - 2].y;
+ float bottomSideY = all_points[len - 1].y;
+
+ signed hori_aw = roundf (rightSideX - leftSideX);
+ if (hori_aw < 0) hori_aw = 0;
+ int lsb = roundf (xMin - leftSideX);
+ plan->hmtx_map.set (new_gid, hb_pair ((unsigned) hori_aw, lsb));
+ //flag value should be computed using non-empty glyphs
+ if (type != EMPTY && lsb != xMin)
+ plan->head_maxp_info.allXMinIsLsb = false;
+
+ signed vert_aw = roundf (topSideY - bottomSideY);
+ if (vert_aw < 0) vert_aw = 0;
+ int tsb = roundf (topSideY - yMax);
+ plan->vmtx_map.set (new_gid, hb_pair ((unsigned) vert_aw, tsb));
+ }
+
+ bool compile_header_bytes (const hb_subset_plan_t *plan,
+ const contour_point_vector_t &all_points,
+ hb_bytes_t &dest_bytes /* OUT */) const
+ {
+ GlyphHeader *glyph_header = nullptr;
+ if (!plan->pinned_at_default && type != EMPTY && all_points.length >= 4)
+ {
+ glyph_header = (GlyphHeader *) hb_calloc (1, GlyphHeader::static_size);
+ if (unlikely (!glyph_header)) return false;
+ }
+
+ float xMin = 0, xMax = 0;
+ float yMin = 0, yMax = 0;
+ if (all_points.length > 4)
+ {
+ xMin = xMax = all_points[0].x;
+ yMin = yMax = all_points[0].y;
+
+ unsigned count = all_points.length - 4;
+ for (unsigned i = 1; i < count; i++)
+ {
+ float x = all_points[i].x;
+ float y = all_points[i].y;
+ xMin = hb_min (xMin, x);
+ xMax = hb_max (xMax, x);
+ yMin = hb_min (yMin, y);
+ yMax = hb_max (yMax, y);
+ }
+ }
+
+
+ // These are destined for storage in a 16 bit field to clamp the values to
+ // fit into a 16 bit signed integer.
+ int rounded_xMin = hb_clamp (roundf (xMin), -32768.0f, 32767.0f);
+ int rounded_xMax = hb_clamp (roundf (xMax), -32768.0f, 32767.0f);
+ int rounded_yMin = hb_clamp (roundf (yMin), -32768.0f, 32767.0f);
+ int rounded_yMax = hb_clamp (roundf (yMax), -32768.0f, 32767.0f);
+
+ update_mtx (plan, rounded_xMin, rounded_xMax, rounded_yMin, rounded_yMax, all_points);
+
+ if (type != EMPTY)
+ {
+ plan->head_maxp_info.xMin = hb_min (plan->head_maxp_info.xMin, rounded_xMin);
+ plan->head_maxp_info.yMin = hb_min (plan->head_maxp_info.yMin, rounded_yMin);
+ plan->head_maxp_info.xMax = hb_max (plan->head_maxp_info.xMax, rounded_xMax);
+ plan->head_maxp_info.yMax = hb_max (plan->head_maxp_info.yMax, rounded_yMax);
+ }
+
+ /* when pinned at default, no need to compile glyph header
+ * and for empty glyphs: all_points only include phantom points.
+ * just update metrics and then return */
+ if (!glyph_header)
+ return true;
+
+ glyph_header->numberOfContours = header->numberOfContours;
+
+ glyph_header->xMin = rounded_xMin;
+ glyph_header->yMin = rounded_yMin;
+ glyph_header->xMax = rounded_xMax;
+ glyph_header->yMax = rounded_yMax;
+
+ dest_bytes = hb_bytes_t ((const char *)glyph_header, GlyphHeader::static_size);
+ return true;
+ }
+
+ bool compile_bytes_with_deltas (const hb_subset_plan_t *plan,
+ hb_font_t *font,
+ const glyf_accelerator_t &glyf,
+ hb_bytes_t &dest_start, /* IN/OUT */
+ hb_bytes_t &dest_end /* OUT */)
+ {
+ contour_point_vector_t all_points, points_with_deltas;
+ unsigned composite_contours = 0;
+ head_maxp_info_t *head_maxp_info_p = &plan->head_maxp_info;
+ unsigned *composite_contours_p = &composite_contours;
+
+ // don't compute head/maxp values when glyph has no contours(type is EMPTY)
+ // also ignore .notdef glyph when --notdef-outline is not enabled
+ if (type == EMPTY ||
+ (gid == 0 && !(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)))
+ {
+ head_maxp_info_p = nullptr;
+ composite_contours_p = nullptr;
+ }
+
+ if (!get_points (font, glyf, all_points, &points_with_deltas, head_maxp_info_p, composite_contours_p, false, false))
+ return false;
+
+ // .notdef, set type to empty so we only update metrics and don't compile bytes for
+ // it
+ if (gid == 0 &&
+ !(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE))
+ {
+ type = EMPTY;
+ dest_start = hb_bytes_t ();
+ dest_end = hb_bytes_t ();
+ }
+
+ //dont compile bytes when pinned at default, just recalculate bounds
+ if (!plan->pinned_at_default)
+ {
+ switch (type)
+ {
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE:
+ // TODO
+ dest_end = hb_bytes_t ();
+ break;
+#endif
+
+ case COMPOSITE:
+ if (!CompositeGlyph (*header, bytes).compile_bytes_with_deltas (dest_start,
+ points_with_deltas,
+ dest_end))
+ return false;
+ break;
+ case SIMPLE:
+ if (!SimpleGlyph (*header, bytes).compile_bytes_with_deltas (all_points,
+ plan->flags & HB_SUBSET_FLAGS_NO_HINTING,
+ dest_end))
+ return false;
+ break;
+ case EMPTY:
+ /* set empty bytes for empty glyph
+ * do not use source glyph's pointers */
+ dest_start = hb_bytes_t ();
+ dest_end = hb_bytes_t ();
+ break;
+ }
+ }
+
+ if (!compile_header_bytes (plan, all_points, dest_start))
+ {
+ dest_end.fini ();
+ return false;
+ }
+ return true;
+ }
+
+
+ /* Note: Recursively calls itself.
+ * all_points includes phantom points
+ */
+ template <typename accelerator_t>
+ bool get_points (hb_font_t *font, const accelerator_t &glyf_accelerator,
+ contour_point_vector_t &all_points /* OUT */,
+ contour_point_vector_t *points_with_deltas = nullptr, /* OUT */
+ head_maxp_info_t * head_maxp_info = nullptr, /* OUT */
+ unsigned *composite_contours = nullptr, /* OUT */
+ bool shift_points_hori = true,
+ bool use_my_metrics = true,
+ bool phantom_only = false,
+ hb_array_t<int> coords = hb_array_t<int> (),
+ unsigned int depth = 0,
+ unsigned *edge_count = nullptr) const
+ {
+ if (unlikely (depth > HB_MAX_NESTING_LEVEL)) return false;
+ unsigned stack_edge_count = 0;
+ if (!edge_count) edge_count = &stack_edge_count;
+ if (unlikely (*edge_count > HB_GLYF_MAX_EDGE_COUNT)) return false;
+ (*edge_count)++;
+
+ if (head_maxp_info)
+ {
+ head_maxp_info->maxComponentDepth = hb_max (head_maxp_info->maxComponentDepth, depth);
+ }
+
+ if (!coords)
+ coords = hb_array (font->coords, font->num_coords);
+
+ contour_point_vector_t stack_points;
+ contour_point_vector_t &points = type == SIMPLE ? all_points : stack_points;
+ unsigned old_length = points.length;
+
+ switch (type) {
+ case SIMPLE:
+ if (depth == 0 && head_maxp_info)
+ head_maxp_info->maxContours = hb_max (head_maxp_info->maxContours, (unsigned) header->numberOfContours);
+ if (depth > 0 && composite_contours)
+ *composite_contours += (unsigned) header->numberOfContours;
+ if (unlikely (!SimpleGlyph (*header, bytes).get_contour_points (all_points, phantom_only)))
+ return false;
+ break;
+ case COMPOSITE:
+ {
+ for (auto &item : get_composite_iterator ())
+ if (unlikely (!item.get_points (points))) return false;
+ break;
+ }
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE:
+ {
+ for (auto &item : get_var_composite_iterator ())
+ if (unlikely (!item.get_points (points))) return false;
+ break;
+ }
+#endif
+ case EMPTY:
+ break;
+ }
+
+ /* Init phantom points */
+ if (unlikely (!points.resize (points.length + PHANTOM_COUNT))) return false;
+ hb_array_t<contour_point_t> phantoms = points.as_array ().sub_array (points.length - PHANTOM_COUNT, PHANTOM_COUNT);
+ {
+ int lsb = 0;
+ int h_delta = glyf_accelerator.hmtx->get_leading_bearing_without_var_unscaled (gid, &lsb) ?
+ (int) header->xMin - lsb : 0;
+ HB_UNUSED int tsb = 0;
+ int v_orig = (int) header->yMax +
+#ifndef HB_NO_VERTICAL
+ ((void) glyf_accelerator.vmtx->get_leading_bearing_without_var_unscaled (gid, &tsb), tsb)
+#else
+ 0
+#endif
+ ;
+ unsigned h_adv = glyf_accelerator.hmtx->get_advance_without_var_unscaled (gid);
+ unsigned v_adv =
+#ifndef HB_NO_VERTICAL
+ glyf_accelerator.vmtx->get_advance_without_var_unscaled (gid)
+#else
+ - font->face->get_upem ()
+#endif
+ ;
+ phantoms[PHANTOM_LEFT].x = h_delta;
+ phantoms[PHANTOM_RIGHT].x = (int) h_adv + h_delta;
+ phantoms[PHANTOM_TOP].y = v_orig;
+ phantoms[PHANTOM_BOTTOM].y = v_orig - (int) v_adv;
+ }
+
+#ifndef HB_NO_VAR
+ glyf_accelerator.gvar->apply_deltas_to_points (gid,
+ coords,
+ points.as_array ().sub_array (old_length));
+#endif
+
+ // mainly used by CompositeGlyph calculating new X/Y offset value so no need to extend it
+ // with child glyphs' points
+ if (points_with_deltas != nullptr && depth == 0 && type == COMPOSITE)
+ {
+ if (unlikely (!points_with_deltas->resize (points.length))) return false;
+ points_with_deltas->copy_vector (points);
+ }
+
+ switch (type) {
+ case SIMPLE:
+ if (depth == 0 && head_maxp_info)
+ head_maxp_info->maxPoints = hb_max (head_maxp_info->maxPoints, all_points.length - old_length - 4);
+ break;
+ case COMPOSITE:
+ {
+ unsigned int comp_index = 0;
+ for (auto &item : get_composite_iterator ())
+ {
+ unsigned old_count = all_points.length;
+
+ if (unlikely ((!phantom_only || (use_my_metrics && item.is_use_my_metrics ())) &&
+ !glyf_accelerator.glyph_for_gid (item.get_gid ())
+ .get_points (font,
+ glyf_accelerator,
+ all_points,
+ points_with_deltas,
+ head_maxp_info,
+ composite_contours,
+ shift_points_hori,
+ use_my_metrics,
+ phantom_only,
+ coords,
+ depth + 1,
+ edge_count)))
+ return false;
+
+ auto comp_points = all_points.as_array ().sub_array (old_count);
+
+ /* Copy phantom points from component if USE_MY_METRICS flag set */
+ if (use_my_metrics && item.is_use_my_metrics ())
+ for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
+ phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
+
+ float matrix[4];
+ contour_point_t default_trans;
+ item.get_transformation (matrix, default_trans);
+
+ /* Apply component transformation & translation (with deltas applied) */
+ item.transform_points (comp_points, matrix, points[comp_index]);
+
+ if (item.is_anchored ())
+ {
+ unsigned int p1, p2;
+ item.get_anchor_points (p1, p2);
+ if (likely (p1 < all_points.length && p2 < comp_points.length))
+ {
+ contour_point_t delta;
+ delta.init (all_points[p1].x - comp_points[p2].x,
+ all_points[p1].y - comp_points[p2].y);
+
+ item.translate (delta, comp_points);
+ }
+ }
+
+ all_points.resize (all_points.length - PHANTOM_COUNT);
+
+ if (all_points.length > HB_GLYF_MAX_POINTS)
+ return false;
+
+ comp_index++;
+ }
+
+ if (head_maxp_info && depth == 0)
+ {
+ if (composite_contours)
+ head_maxp_info->maxCompositeContours = hb_max (head_maxp_info->maxCompositeContours, *composite_contours);
+ head_maxp_info->maxCompositePoints = hb_max (head_maxp_info->maxCompositePoints, all_points.length);
+ head_maxp_info->maxComponentElements = hb_max (head_maxp_info->maxComponentElements, comp_index);
+ }
+ all_points.extend (phantoms);
+ } break;
+#ifndef HB_NO_VAR_COMPOSITES
+ case VAR_COMPOSITE:
+ {
+ hb_array_t<contour_point_t> points_left = points.as_array ();
+ for (auto &item : get_var_composite_iterator ())
+ {
+ unsigned item_num_points = item.get_num_points ();
+ hb_array_t<contour_point_t> record_points = points_left.sub_array (0, item_num_points);
+ assert (record_points.length == item_num_points);
+
+ auto component_coords = coords;
+ if (item.is_reset_unspecified_axes ())
+ component_coords = hb_array<int> ();
+
+ coord_setter_t coord_setter (component_coords);
+ item.set_variations (coord_setter, record_points);
+
+ unsigned old_count = all_points.length;
+
+ if (unlikely ((!phantom_only || (use_my_metrics && item.is_use_my_metrics ())) &&
+ !glyf_accelerator.glyph_for_gid (item.get_gid ())
+ .get_points (font,
+ glyf_accelerator,
+ all_points,
+ points_with_deltas,
+ head_maxp_info,
+ nullptr,
+ shift_points_hori,
+ use_my_metrics,
+ phantom_only,
+ coord_setter.get_coords (),
+ depth + 1,
+ edge_count)))
+ return false;
+
+ auto comp_points = all_points.as_array ().sub_array (old_count);
+
+ /* Apply component transformation */
+ if (comp_points) // Empty in case of phantom_only
+ item.transform_points (record_points, comp_points);
+
+ /* Copy phantom points from component if USE_MY_METRICS flag set */
+ if (use_my_metrics && item.is_use_my_metrics ())
+ for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
+ phantoms[i] = comp_points[comp_points.length - PHANTOM_COUNT + i];
+
+ all_points.resize (all_points.length - PHANTOM_COUNT);
+
+ if (all_points.length > HB_GLYF_MAX_POINTS)
+ return false;
+
+ points_left += item_num_points;
+ }
+ all_points.extend (phantoms);
+ } break;
+#endif
+ case EMPTY:
+ all_points.extend (phantoms);
+ break;
+ }
+
+ if (depth == 0 && shift_points_hori) /* Apply at top level */
+ {
+ /* Undocumented rasterizer behavior:
+ * Shift points horizontally by the updated left side bearing
+ */
+ int v = -phantoms[PHANTOM_LEFT].x;
+ if (v)
+ for (auto &point : all_points)
+ point.x += v;
+ }
+
+ return !all_points.in_error ();
+ }
+
+ bool get_extents_without_var_scaled (hb_font_t *font, const glyf_accelerator_t &glyf_accelerator,
+ hb_glyph_extents_t *extents) const
+ {
+ if (type == EMPTY) return true; /* Empty glyph; zero extents. */
+ return header->get_extents_without_var_scaled (font, glyf_accelerator, gid, extents);
+ }
+
+ hb_bytes_t get_bytes () const { return bytes; }
+ glyph_type_t get_type () const { return type; }
+ const GlyphHeader *get_header () const { return header; }
+
+ Glyph () : bytes (),
+ header (bytes.as<GlyphHeader> ()),
+ gid (-1),
+ type(EMPTY)
+ {}
+
+ Glyph (hb_bytes_t bytes_,
+ hb_codepoint_t gid_ = (unsigned) -1) : bytes (bytes_),
+ header (bytes.as<GlyphHeader> ()),
+ gid (gid_)
+ {
+ int num_contours = header->numberOfContours;
+ if (unlikely (num_contours == 0)) type = EMPTY;
+ else if (num_contours > 0) type = SIMPLE;
+ else if (num_contours == -1) type = COMPOSITE;
+#ifndef HB_NO_VAR_COMPOSITES
+ else if (num_contours == -2) type = VAR_COMPOSITE;
+#endif
+ else type = EMPTY; // Spec deviation; Spec says COMPOSITE, but not seen in the wild.
+ }
+
+ protected:
+ hb_bytes_t bytes;
+ const GlyphHeader *header;
+ hb_codepoint_t gid;
+ glyph_type_t type;
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_GLYPH_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/GlyphHeader.hh b/gfx/harfbuzz/src/OT/glyf/GlyphHeader.hh
new file mode 100644
index 0000000000..a43b6691ab
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/GlyphHeader.hh
@@ -0,0 +1,52 @@
+#ifndef OT_GLYF_GLYPHHEADER_HH
+#define OT_GLYF_GLYPHHEADER_HH
+
+
+#include "../../hb-open-type.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+struct GlyphHeader
+{
+ bool has_data () const { return numberOfContours; }
+
+ template <typename accelerator_t>
+ bool get_extents_without_var_scaled (hb_font_t *font, const accelerator_t &glyf_accelerator,
+ hb_codepoint_t gid, hb_glyph_extents_t *extents) const
+ {
+ /* Undocumented rasterizer behavior: shift glyph to the left by (lsb - xMin), i.e., xMin = lsb */
+ /* extents->x_bearing = hb_min (glyph_header.xMin, glyph_header.xMax); */
+ int lsb = hb_min (xMin, xMax);
+ (void) glyf_accelerator.hmtx->get_leading_bearing_without_var_unscaled (gid, &lsb);
+ extents->x_bearing = lsb;
+ extents->y_bearing = hb_max (yMin, yMax);
+ extents->width = hb_max (xMin, xMax) - hb_min (xMin, xMax);
+ extents->height = hb_min (yMin, yMax) - hb_max (yMin, yMax);
+
+ font->scale_glyph_extents (extents);
+
+ return true;
+ }
+
+ HBINT16 numberOfContours;
+ /* If the number of contours is
+ * greater than or equal to zero,
+ * this is a simple glyph; if negative,
+ * this is a composite glyph. */
+ FWORD xMin; /* Minimum x for coordinate data. */
+ FWORD yMin; /* Minimum y for coordinate data. */
+ FWORD xMax; /* Maximum x for coordinate data. */
+ FWORD yMax; /* Maximum y for coordinate data. */
+ public:
+ DEFINE_SIZE_STATIC (10);
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_GLYPHHEADER_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/SimpleGlyph.hh b/gfx/harfbuzz/src/OT/glyf/SimpleGlyph.hh
new file mode 100644
index 0000000000..555bcee346
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/SimpleGlyph.hh
@@ -0,0 +1,348 @@
+#ifndef OT_GLYF_SIMPLEGLYPH_HH
+#define OT_GLYF_SIMPLEGLYPH_HH
+
+
+#include "../../hb-open-type.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+struct SimpleGlyph
+{
+ enum simple_glyph_flag_t
+ {
+ FLAG_ON_CURVE = 0x01,
+ FLAG_X_SHORT = 0x02,
+ FLAG_Y_SHORT = 0x04,
+ FLAG_REPEAT = 0x08,
+ FLAG_X_SAME = 0x10,
+ FLAG_Y_SAME = 0x20,
+ FLAG_OVERLAP_SIMPLE = 0x40,
+ FLAG_CUBIC = 0x80
+ };
+
+ const GlyphHeader &header;
+ hb_bytes_t bytes;
+ SimpleGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
+ header (header_), bytes (bytes_) {}
+
+ unsigned int instruction_len_offset () const
+ { return GlyphHeader::static_size + 2 * header.numberOfContours; }
+
+ unsigned int length (unsigned int instruction_len) const
+ { return instruction_len_offset () + 2 + instruction_len; }
+
+ bool has_instructions_length () const
+ {
+ return instruction_len_offset () + 2 <= bytes.length;
+ }
+
+ unsigned int instructions_length () const
+ {
+ unsigned int instruction_length_offset = instruction_len_offset ();
+ if (unlikely (instruction_length_offset + 2 > bytes.length)) return 0;
+
+ const HBUINT16 &instructionLength = StructAtOffset<HBUINT16> (&bytes, instruction_length_offset);
+ /* Out of bounds of the current glyph */
+ if (unlikely (length (instructionLength) > bytes.length)) return 0;
+ return instructionLength;
+ }
+
+ const hb_bytes_t trim_padding () const
+ {
+ /* based on FontTools _g_l_y_f.py::trim */
+ const uint8_t *glyph = (uint8_t*) bytes.arrayZ;
+ const uint8_t *glyph_end = glyph + bytes.length;
+ /* simple glyph w/contours, possibly trimmable */
+ glyph += instruction_len_offset ();
+
+ if (unlikely (glyph + 2 >= glyph_end)) return hb_bytes_t ();
+ unsigned int num_coordinates = StructAtOffset<HBUINT16> (glyph - 2, 0) + 1;
+ unsigned int num_instructions = StructAtOffset<HBUINT16> (glyph, 0);
+
+ glyph += 2 + num_instructions;
+
+ unsigned int coord_bytes = 0;
+ unsigned int coords_with_flags = 0;
+ while (glyph < glyph_end)
+ {
+ uint8_t flag = *glyph;
+ glyph++;
+
+ unsigned int repeat = 1;
+ if (flag & FLAG_REPEAT)
+ {
+ if (unlikely (glyph >= glyph_end)) return hb_bytes_t ();
+ repeat = *glyph + 1;
+ glyph++;
+ }
+
+ unsigned int xBytes, yBytes;
+ xBytes = yBytes = 0;
+ if (flag & FLAG_X_SHORT) xBytes = 1;
+ else if ((flag & FLAG_X_SAME) == 0) xBytes = 2;
+
+ if (flag & FLAG_Y_SHORT) yBytes = 1;
+ else if ((flag & FLAG_Y_SAME) == 0) yBytes = 2;
+
+ coord_bytes += (xBytes + yBytes) * repeat;
+ coords_with_flags += repeat;
+ if (coords_with_flags >= num_coordinates) break;
+ }
+
+ if (unlikely (coords_with_flags != num_coordinates)) return hb_bytes_t ();
+ return bytes.sub_array (0, bytes.length + coord_bytes - (glyph_end - glyph));
+ }
+
+ /* zero instruction length */
+ void drop_hints ()
+ {
+ if (!has_instructions_length ()) return;
+ GlyphHeader &glyph_header = const_cast<GlyphHeader &> (header);
+ (HBUINT16 &) StructAtOffset<HBUINT16> (&glyph_header, instruction_len_offset ()) = 0;
+ }
+
+ void drop_hints_bytes (hb_bytes_t &dest_start, hb_bytes_t &dest_end) const
+ {
+ unsigned int instructions_len = instructions_length ();
+ unsigned int glyph_length = length (instructions_len);
+ dest_start = bytes.sub_array (0, glyph_length - instructions_len);
+ dest_end = bytes.sub_array (glyph_length, bytes.length - glyph_length);
+ }
+
+ void set_overlaps_flag ()
+ {
+ if (unlikely (!header.numberOfContours)) return;
+
+ unsigned flags_offset = length (instructions_length ());
+ if (unlikely (flags_offset + 1 > bytes.length)) return;
+
+ HBUINT8 &first_flag = (HBUINT8 &) StructAtOffset<HBUINT16> (&bytes, flags_offset);
+ first_flag = (uint8_t) first_flag | FLAG_OVERLAP_SIMPLE;
+ }
+
+ static bool read_flags (const HBUINT8 *&p /* IN/OUT */,
+ hb_array_t<contour_point_t> points_ /* IN/OUT */,
+ const HBUINT8 *end)
+ {
+ unsigned count = points_.length;
+ for (unsigned int i = 0; i < count;)
+ {
+ if (unlikely (p + 1 > end)) return false;
+ uint8_t flag = *p++;
+ points_.arrayZ[i++].flag = flag;
+ if (flag & FLAG_REPEAT)
+ {
+ if (unlikely (p + 1 > end)) return false;
+ unsigned int repeat_count = *p++;
+ unsigned stop = hb_min (i + repeat_count, count);
+ for (; i < stop; i++)
+ points_.arrayZ[i].flag = flag;
+ }
+ }
+ return true;
+ }
+
+ static bool read_points (const HBUINT8 *&p /* IN/OUT */,
+ hb_array_t<contour_point_t> points_ /* IN/OUT */,
+ const HBUINT8 *end,
+ float contour_point_t::*m,
+ const simple_glyph_flag_t short_flag,
+ const simple_glyph_flag_t same_flag)
+ {
+ int v = 0;
+
+ unsigned count = points_.length;
+ for (unsigned i = 0; i < count; i++)
+ {
+ unsigned flag = points_.arrayZ[i].flag;
+ if (flag & short_flag)
+ {
+ if (unlikely (p + 1 > end)) return false;
+ if (flag & same_flag)
+ v += *p++;
+ else
+ v -= *p++;
+ }
+ else
+ {
+ if (!(flag & same_flag))
+ {
+ if (unlikely (p + HBINT16::static_size > end)) return false;
+ v += *(const HBINT16 *) p;
+ p += HBINT16::static_size;
+ }
+ }
+ points_.arrayZ[i].*m = v;
+ }
+ return true;
+ }
+
+ bool get_contour_points (contour_point_vector_t &points /* OUT */,
+ bool phantom_only = false) const
+ {
+ const HBUINT16 *endPtsOfContours = &StructAfter<HBUINT16> (header);
+ int num_contours = header.numberOfContours;
+ assert (num_contours > 0);
+ /* One extra item at the end, for the instruction-count below. */
+ if (unlikely (!bytes.check_range (&endPtsOfContours[num_contours]))) return false;
+ unsigned int num_points = endPtsOfContours[num_contours - 1] + 1;
+
+ unsigned old_length = points.length;
+ points.alloc (points.length + num_points + 4, true); // Allocate for phantom points, to avoid a possible copy
+ if (!points.resize (points.length + num_points, false)) return false;
+ auto points_ = points.as_array ().sub_array (old_length);
+ hb_memset (points_.arrayZ, 0, sizeof (contour_point_t) * num_points);
+ if (phantom_only) return true;
+
+ for (int i = 0; i < num_contours; i++)
+ points_[endPtsOfContours[i]].is_end_point = true;
+
+ /* Skip instructions */
+ const HBUINT8 *p = &StructAtOffset<HBUINT8> (&endPtsOfContours[num_contours + 1],
+ endPtsOfContours[num_contours]);
+
+ if (unlikely ((const char *) p < bytes.arrayZ)) return false; /* Unlikely overflow */
+ const HBUINT8 *end = (const HBUINT8 *) (bytes.arrayZ + bytes.length);
+ if (unlikely (p >= end)) return false;
+
+ /* Read x & y coordinates */
+ return read_flags (p, points_, end)
+ && read_points (p, points_, end, &contour_point_t::x,
+ FLAG_X_SHORT, FLAG_X_SAME)
+ && read_points (p, points_, end, &contour_point_t::y,
+ FLAG_Y_SHORT, FLAG_Y_SAME);
+ }
+
+ static void encode_coord (int value,
+ unsigned &flag,
+ const simple_glyph_flag_t short_flag,
+ const simple_glyph_flag_t same_flag,
+ hb_vector_t<uint8_t> &coords /* OUT */)
+ {
+ if (value == 0)
+ {
+ flag |= same_flag;
+ }
+ else if (value >= -255 && value <= 255)
+ {
+ flag |= short_flag;
+ if (value > 0) flag |= same_flag;
+ else value = -value;
+
+ coords.arrayZ[coords.length++] = (uint8_t) value;
+ }
+ else
+ {
+ int16_t val = value;
+ coords.arrayZ[coords.length++] = val >> 8;
+ coords.arrayZ[coords.length++] = val & 0xff;
+ }
+ }
+
+ static void encode_flag (unsigned flag,
+ unsigned &repeat,
+ unsigned lastflag,
+ hb_vector_t<uint8_t> &flags /* OUT */)
+ {
+ if (flag == lastflag && repeat != 255)
+ {
+ repeat++;
+ if (repeat == 1)
+ {
+ /* We know there's room. */
+ flags.arrayZ[flags.length++] = flag;
+ }
+ else
+ {
+ unsigned len = flags.length;
+ flags.arrayZ[len-2] = flag | FLAG_REPEAT;
+ flags.arrayZ[len-1] = repeat;
+ }
+ }
+ else
+ {
+ repeat = 0;
+ flags.arrayZ[flags.length++] = flag;
+ }
+ }
+
+ bool compile_bytes_with_deltas (const contour_point_vector_t &all_points,
+ bool no_hinting,
+ hb_bytes_t &dest_bytes /* OUT */)
+ {
+ if (header.numberOfContours == 0 || all_points.length <= 4)
+ {
+ dest_bytes = hb_bytes_t ();
+ return true;
+ }
+ unsigned num_points = all_points.length - 4;
+
+ hb_vector_t<uint8_t> flags, x_coords, y_coords;
+ if (unlikely (!flags.alloc (num_points, true))) return false;
+ if (unlikely (!x_coords.alloc (2*num_points, true))) return false;
+ if (unlikely (!y_coords.alloc (2*num_points, true))) return false;
+
+ unsigned lastflag = 255, repeat = 0;
+ int prev_x = 0, prev_y = 0;
+
+ for (unsigned i = 0; i < num_points; i++)
+ {
+ unsigned flag = all_points.arrayZ[i].flag;
+ flag &= FLAG_ON_CURVE | FLAG_OVERLAP_SIMPLE | FLAG_CUBIC;
+
+ int cur_x = roundf (all_points.arrayZ[i].x);
+ int cur_y = roundf (all_points.arrayZ[i].y);
+ encode_coord (cur_x - prev_x, flag, FLAG_X_SHORT, FLAG_X_SAME, x_coords);
+ encode_coord (cur_y - prev_y, flag, FLAG_Y_SHORT, FLAG_Y_SAME, y_coords);
+ encode_flag (flag, repeat, lastflag, flags);
+
+ prev_x = cur_x;
+ prev_y = cur_y;
+ lastflag = flag;
+ }
+
+ unsigned len_before_instrs = 2 * header.numberOfContours + 2;
+ unsigned len_instrs = instructions_length ();
+ unsigned total_len = len_before_instrs + flags.length + x_coords.length + y_coords.length;
+
+ if (!no_hinting)
+ total_len += len_instrs;
+
+ char *p = (char *) hb_malloc (total_len);
+ if (unlikely (!p)) return false;
+
+ const char *src = bytes.arrayZ + GlyphHeader::static_size;
+ char *cur = p;
+ hb_memcpy (p, src, len_before_instrs);
+
+ cur += len_before_instrs;
+ src += len_before_instrs;
+
+ if (!no_hinting)
+ {
+ hb_memcpy (cur, src, len_instrs);
+ cur += len_instrs;
+ }
+
+ hb_memcpy (cur, flags.arrayZ, flags.length);
+ cur += flags.length;
+
+ hb_memcpy (cur, x_coords.arrayZ, x_coords.length);
+ cur += x_coords.length;
+
+ hb_memcpy (cur, y_coords.arrayZ, y_coords.length);
+
+ dest_bytes = hb_bytes_t (p, total_len);
+ return true;
+ }
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_SIMPLEGLYPH_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/SubsetGlyph.hh b/gfx/harfbuzz/src/OT/glyf/SubsetGlyph.hh
new file mode 100644
index 0000000000..26dc374eab
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/SubsetGlyph.hh
@@ -0,0 +1,152 @@
+#ifndef OT_GLYF_SUBSETGLYPH_HH
+#define OT_GLYF_SUBSETGLYPH_HH
+
+
+#include "../../hb-open-type.hh"
+
+
+namespace OT {
+
+struct glyf_accelerator_t;
+
+namespace glyf_impl {
+
+
+struct SubsetGlyph
+{
+ hb_codepoint_t old_gid;
+ Glyph source_glyph;
+ hb_bytes_t dest_start; /* region of source_glyph to copy first */
+ hb_bytes_t dest_end; /* region of source_glyph to copy second */
+ bool allocated;
+
+ bool serialize (hb_serialize_context_t *c,
+ bool use_short_loca,
+ const hb_subset_plan_t *plan)
+ {
+ TRACE_SERIALIZE (this);
+
+ hb_bytes_t dest_glyph = dest_start.copy (c);
+ hb_bytes_t end_copy = dest_end.copy (c);
+ if (!end_copy.arrayZ || !dest_glyph.arrayZ) {
+ return false;
+ }
+
+ dest_glyph = hb_bytes_t (&dest_glyph, dest_glyph.length + end_copy.length);
+ unsigned int pad_length = use_short_loca ? padding () : 0;
+ DEBUG_MSG (SUBSET, nullptr, "serialize %u byte glyph, width %u pad %u", dest_glyph.length, dest_glyph.length + pad_length, pad_length);
+
+ HBUINT8 pad;
+ pad = 0;
+ while (pad_length > 0)
+ {
+ c->embed (pad);
+ pad_length--;
+ }
+
+ if (unlikely (!dest_glyph.length)) return_trace (true);
+
+ /* update components gids. */
+ for (auto &_ : Glyph (dest_glyph).get_composite_iterator ())
+ {
+ hb_codepoint_t new_gid;
+ if (plan->new_gid_for_old_gid (_.get_gid(), &new_gid))
+ const_cast<CompositeGlyphRecord &> (_).set_gid (new_gid);
+ }
+#ifndef HB_NO_VAR_COMPOSITES
+ for (auto &_ : Glyph (dest_glyph).get_var_composite_iterator ())
+ {
+ hb_codepoint_t new_gid;
+ if (plan->new_gid_for_old_gid (_.get_gid(), &new_gid))
+ const_cast<VarCompositeGlyphRecord &> (_).set_gid (new_gid);
+ }
+#endif
+
+#ifndef HB_NO_BEYOND_64K
+ auto it = Glyph (dest_glyph).get_composite_iterator ();
+ if (it)
+ {
+ /* lower GID24 to GID16 in components if possible.
+ *
+ * TODO: VarComposite. Not as critical, since VarComposite supports
+ * gid24 from the first version. */
+ char *p = it ? (char *) &*it : nullptr;
+ char *q = p;
+ const char *end = dest_glyph.arrayZ + dest_glyph.length;
+ while (it)
+ {
+ auto &rec = const_cast<CompositeGlyphRecord &> (*it);
+ ++it;
+
+ q += rec.get_size ();
+
+ rec.lower_gid_24_to_16 ();
+
+ unsigned size = rec.get_size ();
+
+ memmove (p, &rec, size);
+
+ p += size;
+ }
+ memmove (p, q, end - q);
+ p += end - q;
+
+ /* We want to shorten the glyph, but we can't do that without
+ * updating the length in the loca table, which is already
+ * written out :-(. So we just fill the rest of the glyph with
+ * harmless instructions, since that's what they will be
+ * interpreted as.
+ *
+ * Should move the lowering to _populate_subset_glyphs() to
+ * fix this issue. */
+
+ hb_memset (p, 0x7A /* TrueType instruction ROFF; harmless */, end - p);
+ p += end - p;
+ dest_glyph = hb_bytes_t (dest_glyph.arrayZ, p - (char *) dest_glyph.arrayZ);
+
+ // TODO: Padding; & trim serialized bytes.
+ // TODO: Update length in loca. Ugh.
+ }
+#endif
+
+ if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
+ Glyph (dest_glyph).drop_hints ();
+
+ if (plan->flags & HB_SUBSET_FLAGS_SET_OVERLAPS_FLAG)
+ Glyph (dest_glyph).set_overlaps_flag ();
+
+ return_trace (true);
+ }
+
+ bool compile_bytes_with_deltas (const hb_subset_plan_t *plan,
+ hb_font_t *font,
+ const glyf_accelerator_t &glyf)
+ {
+ allocated = source_glyph.compile_bytes_with_deltas (plan, font, glyf, dest_start, dest_end);
+ return allocated;
+ }
+
+ void free_compiled_bytes ()
+ {
+ if (likely (allocated)) {
+ allocated = false;
+ dest_start.fini ();
+ dest_end.fini ();
+ }
+ }
+
+ void drop_hints_bytes ()
+ { source_glyph.drop_hints_bytes (dest_start, dest_end); }
+
+ unsigned int length () const { return dest_start.length + dest_end.length; }
+ /* pad to 2 to ensure 2-byte loca will be ok */
+ unsigned int padding () const { return length () % 2; }
+ unsigned int padded_size () const { return length () + padding (); }
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_SUBSETGLYPH_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/VarCompositeGlyph.hh b/gfx/harfbuzz/src/OT/glyf/VarCompositeGlyph.hh
new file mode 100644
index 0000000000..6dc6fd9ded
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/VarCompositeGlyph.hh
@@ -0,0 +1,401 @@
+#ifndef OT_GLYF_VARCOMPOSITEGLYPH_HH
+#define OT_GLYF_VARCOMPOSITEGLYPH_HH
+
+
+#include "../../hb-open-type.hh"
+#include "coord-setter.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+struct VarCompositeGlyphRecord
+{
+ protected:
+ enum var_composite_glyph_flag_t
+ {
+ USE_MY_METRICS = 0x0001,
+ AXIS_INDICES_ARE_SHORT = 0x0002,
+ UNIFORM_SCALE = 0x0004,
+ HAVE_TRANSLATE_X = 0x0008,
+ HAVE_TRANSLATE_Y = 0x0010,
+ HAVE_ROTATION = 0x0020,
+ HAVE_SCALE_X = 0x0040,
+ HAVE_SCALE_Y = 0x0080,
+ HAVE_SKEW_X = 0x0100,
+ HAVE_SKEW_Y = 0x0200,
+ HAVE_TCENTER_X = 0x0400,
+ HAVE_TCENTER_Y = 0x0800,
+ GID_IS_24BIT = 0x1000,
+ AXES_HAVE_VARIATION = 0x2000,
+ RESET_UNSPECIFIED_AXES = 0x4000,
+ };
+
+ public:
+
+ unsigned int get_size () const
+ {
+ unsigned fl = flags;
+ unsigned int size = min_size;
+
+ unsigned axis_width = (fl & AXIS_INDICES_ARE_SHORT) ? 4 : 3;
+ size += numAxes * axis_width;
+
+ if (fl & GID_IS_24BIT) size += 1;
+
+ // 2 bytes each for the following flags
+ fl = fl & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y |
+ HAVE_ROTATION |
+ HAVE_SCALE_X | HAVE_SCALE_Y |
+ HAVE_SKEW_X | HAVE_SKEW_Y |
+ HAVE_TCENTER_X | HAVE_TCENTER_Y);
+ size += hb_popcount (fl) * 2;
+
+ return size;
+ }
+
+ bool has_more () const { return true; }
+
+ bool is_use_my_metrics () const { return flags & USE_MY_METRICS; }
+ bool is_reset_unspecified_axes () const { return flags & RESET_UNSPECIFIED_AXES; }
+
+ hb_codepoint_t get_gid () const
+ {
+ if (flags & GID_IS_24BIT)
+ return * (const HBGlyphID24 *) &pad;
+ else
+ return * (const HBGlyphID16 *) &pad;
+ }
+
+ void set_gid (hb_codepoint_t gid)
+ {
+ if (flags & GID_IS_24BIT)
+ * (HBGlyphID24 *) &pad = gid;
+ else
+ * (HBGlyphID16 *) &pad = gid;
+ }
+
+ unsigned get_numAxes () const
+ {
+ return numAxes;
+ }
+
+ unsigned get_num_points () const
+ {
+ unsigned fl = flags;
+ unsigned num = 0;
+ if (fl & AXES_HAVE_VARIATION) num += numAxes;
+
+ /* Hopefully faster code, relying on the value of the flags. */
+ fl = (((fl & (HAVE_TRANSLATE_Y | HAVE_SCALE_Y | HAVE_SKEW_Y | HAVE_TCENTER_Y)) >> 1) | fl) &
+ (HAVE_TRANSLATE_X | HAVE_ROTATION | HAVE_SCALE_X | HAVE_SKEW_X | HAVE_TCENTER_X);
+ num += hb_popcount (fl);
+ return num;
+
+ /* Slower but more readable code. */
+ if (fl & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y)) num++;
+ if (fl & HAVE_ROTATION) num++;
+ if (fl & (HAVE_SCALE_X | HAVE_SCALE_Y)) num++;
+ if (fl & (HAVE_SKEW_X | HAVE_SKEW_Y)) num++;
+ if (fl & (HAVE_TCENTER_X | HAVE_TCENTER_Y)) num++;
+ return num;
+ }
+
+ void transform_points (hb_array_t<const contour_point_t> record_points,
+ hb_array_t<contour_point_t> points) const
+ {
+ float matrix[4];
+ contour_point_t trans;
+
+ get_transformation_from_points (record_points.arrayZ, matrix, trans);
+
+ auto arrayZ = points.arrayZ;
+ unsigned count = points.length;
+
+ if (matrix[0] != 1.f || matrix[1] != 0.f ||
+ matrix[2] != 0.f || matrix[3] != 1.f)
+ for (unsigned i = 0; i < count; i++)
+ arrayZ[i].transform (matrix);
+
+ if (trans.x != 0.f || trans.y != 0.f)
+ for (unsigned i = 0; i < count; i++)
+ arrayZ[i].translate (trans);
+ }
+
+ static inline void transform (float (&matrix)[4], contour_point_t &trans,
+ float (other)[6])
+ {
+ // https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L268
+ float xx1 = other[0];
+ float xy1 = other[1];
+ float yx1 = other[2];
+ float yy1 = other[3];
+ float dx1 = other[4];
+ float dy1 = other[5];
+ float xx2 = matrix[0];
+ float xy2 = matrix[1];
+ float yx2 = matrix[2];
+ float yy2 = matrix[3];
+ float dx2 = trans.x;
+ float dy2 = trans.y;
+
+ matrix[0] = xx1*xx2 + xy1*yx2;
+ matrix[1] = xx1*xy2 + xy1*yy2;
+ matrix[2] = yx1*xx2 + yy1*yx2;
+ matrix[3] = yx1*xy2 + yy1*yy2;
+ trans.x = xx2*dx1 + yx2*dy1 + dx2;
+ trans.y = xy2*dx1 + yy2*dy1 + dy2;
+ }
+
+ static void translate (float (&matrix)[4], contour_point_t &trans,
+ float translateX, float translateY)
+ {
+ if (!translateX && !translateY)
+ return;
+
+ trans.x += matrix[0] * translateX + matrix[2] * translateY;
+ trans.y += matrix[1] * translateX + matrix[3] * translateY;
+ }
+
+ static void scale (float (&matrix)[4], contour_point_t &trans,
+ float scaleX, float scaleY)
+ {
+ if (scaleX == 1.f && scaleY == 1.f)
+ return;
+
+ matrix[0] *= scaleX;
+ matrix[1] *= scaleX;
+ matrix[2] *= scaleY;
+ matrix[3] *= scaleY;
+ }
+
+ static void rotate (float (&matrix)[4], contour_point_t &trans,
+ float rotation)
+ {
+ if (!rotation)
+ return;
+
+ // https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L240
+ rotation = rotation * HB_PI;
+ float c;
+ float s;
+#ifdef HAVE_SINCOSF
+ sincosf (rotation, &s, &c);
+#else
+ c = cosf (rotation);
+ s = sinf (rotation);
+#endif
+ float other[6] = {c, s, -s, c, 0.f, 0.f};
+ transform (matrix, trans, other);
+ }
+
+ static void skew (float (&matrix)[4], contour_point_t &trans,
+ float skewX, float skewY)
+ {
+ if (!skewX && !skewY)
+ return;
+
+ // https://github.com/fonttools/fonttools/blob/f66ee05f71c8b57b5f519ee975e95edcd1466e14/Lib/fontTools/misc/transform.py#L255
+ skewX = skewX * HB_PI;
+ skewY = skewY * HB_PI;
+ float other[6] = {1.f,
+ skewY ? tanf (skewY) : 0.f,
+ skewX ? tanf (skewX) : 0.f,
+ 1.f,
+ 0.f, 0.f};
+ transform (matrix, trans, other);
+ }
+
+ bool get_points (contour_point_vector_t &points) const
+ {
+ unsigned num_points = get_num_points ();
+
+ points.alloc (points.length + num_points + 4); // For phantom points
+ if (unlikely (!points.resize (points.length + num_points, false))) return false;
+ contour_point_t *rec_points = points.arrayZ + (points.length - num_points);
+ memset (rec_points, 0, num_points * sizeof (rec_points[0]));
+
+ unsigned fl = flags;
+
+ unsigned num_axes = numAxes;
+ unsigned axis_width = (fl & AXIS_INDICES_ARE_SHORT) ? 2 : 1;
+ unsigned axes_size = num_axes * axis_width;
+
+ const F2DOT14 *q = (const F2DOT14 *) (axes_size +
+ (fl & GID_IS_24BIT ? 3 : 2) +
+ (const HBUINT8 *) &pad);
+
+ unsigned count = num_axes;
+ if (fl & AXES_HAVE_VARIATION)
+ {
+ for (unsigned i = 0; i < count; i++)
+ rec_points++->x = q++->to_int ();
+ }
+ else
+ q += count;
+
+ const HBUINT16 *p = (const HBUINT16 *) q;
+
+ if (fl & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y))
+ {
+ int translateX = (fl & HAVE_TRANSLATE_X) ? * (const FWORD *) p++ : 0;
+ int translateY = (fl & HAVE_TRANSLATE_Y) ? * (const FWORD *) p++ : 0;
+ rec_points->x = translateX;
+ rec_points->y = translateY;
+ rec_points++;
+ }
+ if (fl & HAVE_ROTATION)
+ {
+ int rotation = (fl & HAVE_ROTATION) ? ((const F4DOT12 *) p++)->to_int () : 0;
+ rec_points->x = rotation;
+ rec_points++;
+ }
+ if (fl & (HAVE_SCALE_X | HAVE_SCALE_Y))
+ {
+ int scaleX = (fl & HAVE_SCALE_X) ? ((const F6DOT10 *) p++)->to_int () : 1 << 10;
+ int scaleY = (fl & HAVE_SCALE_Y) ? ((const F6DOT10 *) p++)->to_int () : 1 << 10;
+ if ((fl & UNIFORM_SCALE) && !(fl & HAVE_SCALE_Y))
+ scaleY = scaleX;
+ rec_points->x = scaleX;
+ rec_points->y = scaleY;
+ rec_points++;
+ }
+ if (fl & (HAVE_SKEW_X | HAVE_SKEW_Y))
+ {
+ int skewX = (fl & HAVE_SKEW_X) ? ((const F4DOT12 *) p++)->to_int () : 0;
+ int skewY = (fl & HAVE_SKEW_Y) ? ((const F4DOT12 *) p++)->to_int () : 0;
+ rec_points->x = skewX;
+ rec_points->y = skewY;
+ rec_points++;
+ }
+ if (fl & (HAVE_TCENTER_X | HAVE_TCENTER_Y))
+ {
+ int tCenterX = (fl & HAVE_TCENTER_X) ? * (const FWORD *) p++ : 0;
+ int tCenterY = (fl & HAVE_TCENTER_Y) ? * (const FWORD *) p++ : 0;
+ rec_points->x = tCenterX;
+ rec_points->y = tCenterY;
+ rec_points++;
+ }
+
+ return true;
+ }
+
+ void get_transformation_from_points (const contour_point_t *rec_points,
+ float (&matrix)[4], contour_point_t &trans) const
+ {
+ unsigned fl = flags;
+
+ if (fl & AXES_HAVE_VARIATION)
+ rec_points += numAxes;
+
+ matrix[0] = matrix[3] = 1.f;
+ matrix[1] = matrix[2] = 0.f;
+ trans.init (0.f, 0.f);
+
+ float translateX = 0.f;
+ float translateY = 0.f;
+ float rotation = 0.f;
+ float scaleX = 1.f;
+ float scaleY = 1.f;
+ float skewX = 0.f;
+ float skewY = 0.f;
+ float tCenterX = 0.f;
+ float tCenterY = 0.f;
+
+ if (fl & (HAVE_TRANSLATE_X | HAVE_TRANSLATE_Y))
+ {
+ translateX = rec_points->x;
+ translateY = rec_points->y;
+ rec_points++;
+ }
+ if (fl & HAVE_ROTATION)
+ {
+ rotation = rec_points->x / (1 << 12);
+ rec_points++;
+ }
+ if (fl & (HAVE_SCALE_X | HAVE_SCALE_Y))
+ {
+ scaleX = rec_points->x / (1 << 10);
+ scaleY = rec_points->y / (1 << 10);
+ rec_points++;
+ }
+ if (fl & (HAVE_SKEW_X | HAVE_SKEW_Y))
+ {
+ skewX = rec_points->x / (1 << 12);
+ skewY = rec_points->y / (1 << 12);
+ rec_points++;
+ }
+ if (fl & (HAVE_TCENTER_X | HAVE_TCENTER_Y))
+ {
+ tCenterX = rec_points->x;
+ tCenterY = rec_points->y;
+ rec_points++;
+ }
+
+ translate (matrix, trans, translateX + tCenterX, translateY + tCenterY);
+ rotate (matrix, trans, rotation);
+ scale (matrix, trans, scaleX, scaleY);
+ skew (matrix, trans, -skewX, skewY);
+ translate (matrix, trans, -tCenterX, -tCenterY);
+ }
+
+ void set_variations (coord_setter_t &setter,
+ hb_array_t<contour_point_t> rec_points) const
+ {
+ bool have_variations = flags & AXES_HAVE_VARIATION;
+ unsigned axis_width = (flags & AXIS_INDICES_ARE_SHORT) ? 2 : 1;
+ unsigned num_axes = numAxes;
+
+ const HBUINT8 *p = (const HBUINT8 *) (((HBUINT8 *) &numAxes) + numAxes.static_size + (flags & GID_IS_24BIT ? 3 : 2));
+ const HBUINT16 *q = (const HBUINT16 *) (((HBUINT8 *) &numAxes) + numAxes.static_size + (flags & GID_IS_24BIT ? 3 : 2));
+
+ const F2DOT14 *a = (const F2DOT14 *) ((HBUINT8 *) (axis_width == 1 ? (p + num_axes) : (HBUINT8 *) (q + num_axes)));
+
+ unsigned count = num_axes;
+ for (unsigned i = 0; i < count; i++)
+ {
+ unsigned axis_index = axis_width == 1 ? (unsigned) *p++ : (unsigned) *q++;
+
+ signed v = have_variations ? rec_points.arrayZ[i].x : a++->to_int ();
+
+ v = hb_clamp (v, -(1<<14), (1<<14));
+ setter[axis_index] = v;
+ }
+ }
+
+ protected:
+ HBUINT16 flags;
+ HBUINT8 numAxes;
+ HBUINT16 pad;
+ public:
+ DEFINE_SIZE_MIN (5);
+};
+
+using var_composite_iter_t = composite_iter_tmpl<VarCompositeGlyphRecord>;
+
+struct VarCompositeGlyph
+{
+ const GlyphHeader &header;
+ hb_bytes_t bytes;
+ VarCompositeGlyph (const GlyphHeader &header_, hb_bytes_t bytes_) :
+ header (header_), bytes (bytes_) {}
+
+ var_composite_iter_t iter () const
+ { return var_composite_iter_t (bytes, &StructAfter<VarCompositeGlyphRecord, GlyphHeader> (header)); }
+
+ const hb_bytes_t trim_padding () const
+ {
+ unsigned length = GlyphHeader::static_size;
+ for (auto &comp : iter ())
+ length += comp.get_size ();
+ return bytes.sub_array (0, length);
+ }
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_VARCOMPOSITEGLYPH_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/composite-iter.hh b/gfx/harfbuzz/src/OT/glyf/composite-iter.hh
new file mode 100644
index 0000000000..d05701f3d1
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/composite-iter.hh
@@ -0,0 +1,68 @@
+#ifndef OT_GLYF_COMPOSITE_ITER_HH
+#define OT_GLYF_COMPOSITE_ITER_HH
+
+
+#include "../../hb.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+template <typename CompositeGlyphRecord>
+struct composite_iter_tmpl : hb_iter_with_fallback_t<composite_iter_tmpl<CompositeGlyphRecord>,
+ const CompositeGlyphRecord &>
+{
+ typedef const CompositeGlyphRecord *__item_t__;
+ composite_iter_tmpl (hb_bytes_t glyph_, __item_t__ current_) :
+ glyph (glyph_), current (nullptr), current_size (0)
+ {
+ set_current (current_);
+ }
+
+ composite_iter_tmpl () : glyph (hb_bytes_t ()), current (nullptr), current_size (0) {}
+
+ const CompositeGlyphRecord & __item__ () const { return *current; }
+ bool __more__ () const { return current; }
+ void __next__ ()
+ {
+ if (!current->has_more ()) { current = nullptr; return; }
+
+ set_current (&StructAtOffset<CompositeGlyphRecord> (current, current_size));
+ }
+ composite_iter_tmpl __end__ () const { return composite_iter_tmpl (); }
+ bool operator != (const composite_iter_tmpl& o) const
+ { return current != o.current; }
+
+
+ void set_current (__item_t__ current_)
+ {
+ if (!glyph.check_range (current_, CompositeGlyphRecord::min_size))
+ {
+ current = nullptr;
+ current_size = 0;
+ return;
+ }
+ unsigned size = current_->get_size ();
+ if (!glyph.check_range (current_, size))
+ {
+ current = nullptr;
+ current_size = 0;
+ return;
+ }
+
+ current = current_;
+ current_size = size;
+ }
+
+ private:
+ hb_bytes_t glyph;
+ __item_t__ current;
+ unsigned current_size;
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+#endif /* OT_GLYF_COMPOSITE_ITER_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/coord-setter.hh b/gfx/harfbuzz/src/OT/glyf/coord-setter.hh
new file mode 100644
index 0000000000..df64ed5af7
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/coord-setter.hh
@@ -0,0 +1,34 @@
+#ifndef OT_GLYF_COORD_SETTER_HH
+#define OT_GLYF_COORD_SETTER_HH
+
+
+#include "../../hb.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+struct coord_setter_t
+{
+ coord_setter_t (hb_array_t<int> coords) :
+ coords (coords) {}
+
+ int& operator [] (unsigned idx)
+ {
+ if (coords.length < idx + 1)
+ coords.resize (idx + 1);
+ return coords[idx];
+ }
+
+ hb_array_t<int> get_coords ()
+ { return coords.as_array (); }
+
+ hb_vector_t<int> coords;
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+#endif /* OT_GLYF_COORD_SETTER_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/glyf-helpers.hh b/gfx/harfbuzz/src/OT/glyf/glyf-helpers.hh
new file mode 100644
index 0000000000..30106b2b98
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/glyf-helpers.hh
@@ -0,0 +1,104 @@
+#ifndef OT_GLYF_GLYF_HELPERS_HH
+#define OT_GLYF_GLYF_HELPERS_HH
+
+
+#include "../../hb-open-type.hh"
+#include "../../hb-subset-plan.hh"
+
+#include "loca.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+template<typename IteratorIn, typename IteratorOut,
+ hb_requires (hb_is_source_of (IteratorIn, unsigned int)),
+ hb_requires (hb_is_sink_of (IteratorOut, unsigned))>
+static void
+_write_loca (IteratorIn&& it, bool short_offsets, IteratorOut&& dest)
+{
+ unsigned right_shift = short_offsets ? 1 : 0;
+ unsigned int offset = 0;
+ dest << 0;
+ + it
+ | hb_map ([=, &offset] (unsigned int padded_size)
+ {
+ offset += padded_size;
+ DEBUG_MSG (SUBSET, nullptr, "loca entry offset %u", offset);
+ return offset >> right_shift;
+ })
+ | hb_sink (dest)
+ ;
+}
+
+static bool
+_add_head_and_set_loca_version (hb_subset_plan_t *plan, bool use_short_loca)
+{
+ hb_blob_t *head_blob = hb_sanitize_context_t ().reference_table<head> (plan->source);
+ hb_blob_t *head_prime_blob = hb_blob_copy_writable_or_fail (head_blob);
+ hb_blob_destroy (head_blob);
+
+ if (unlikely (!head_prime_blob))
+ return false;
+
+ head *head_prime = (head *) hb_blob_get_data_writable (head_prime_blob, nullptr);
+ head_prime->indexToLocFormat = use_short_loca ? 0 : 1;
+ if (plan->normalized_coords)
+ {
+ head_prime->xMin = plan->head_maxp_info.xMin;
+ head_prime->xMax = plan->head_maxp_info.xMax;
+ head_prime->yMin = plan->head_maxp_info.yMin;
+ head_prime->yMax = plan->head_maxp_info.yMax;
+
+ unsigned orig_flag = head_prime->flags;
+ if (plan->head_maxp_info.allXMinIsLsb)
+ orig_flag |= 1 << 1;
+ else
+ orig_flag &= ~(1 << 1);
+ head_prime->flags = orig_flag;
+ }
+ bool success = plan->add_table (HB_OT_TAG_head, head_prime_blob);
+
+ hb_blob_destroy (head_prime_blob);
+ return success;
+}
+
+template<typename Iterator,
+ hb_requires (hb_is_source_of (Iterator, unsigned int))>
+static bool
+_add_loca_and_head (hb_subset_plan_t * plan, Iterator padded_offsets, bool use_short_loca)
+{
+ unsigned num_offsets = padded_offsets.len () + 1;
+ unsigned entry_size = use_short_loca ? 2 : 4;
+ char *loca_prime_data = (char *) hb_calloc (entry_size, num_offsets);
+
+ if (unlikely (!loca_prime_data)) return false;
+
+ DEBUG_MSG (SUBSET, nullptr, "loca entry_size %u num_offsets %u size %u",
+ entry_size, num_offsets, entry_size * num_offsets);
+
+ if (use_short_loca)
+ _write_loca (padded_offsets, true, hb_array ((HBUINT16 *) loca_prime_data, num_offsets));
+ else
+ _write_loca (padded_offsets, false, hb_array ((HBUINT32 *) loca_prime_data, num_offsets));
+
+ hb_blob_t *loca_blob = hb_blob_create (loca_prime_data,
+ entry_size * num_offsets,
+ HB_MEMORY_MODE_WRITABLE,
+ loca_prime_data,
+ hb_free);
+
+ bool result = plan->add_table (HB_OT_TAG_loca, loca_blob)
+ && _add_head_and_set_loca_version (plan, use_short_loca);
+
+ hb_blob_destroy (loca_blob);
+ return result;
+}
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_GLYF_HELPERS_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/glyf.hh b/gfx/harfbuzz/src/OT/glyf/glyf.hh
new file mode 100644
index 0000000000..dd08dda6ee
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/glyf.hh
@@ -0,0 +1,504 @@
+#ifndef OT_GLYF_GLYF_HH
+#define OT_GLYF_GLYF_HH
+
+
+#include "../../hb-open-type.hh"
+#include "../../hb-ot-head-table.hh"
+#include "../../hb-ot-hmtx-table.hh"
+#include "../../hb-ot-var-gvar-table.hh"
+#include "../../hb-draw.hh"
+#include "../../hb-paint.hh"
+
+#include "glyf-helpers.hh"
+#include "Glyph.hh"
+#include "SubsetGlyph.hh"
+#include "loca.hh"
+#include "path-builder.hh"
+
+
+namespace OT {
+
+
+/*
+ * glyf -- TrueType Glyph Data
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/glyf
+ */
+#define HB_OT_TAG_glyf HB_TAG('g','l','y','f')
+
+struct glyf
+{
+ friend struct glyf_accelerator_t;
+
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_glyf;
+
+ static bool has_valid_glyf_format(const hb_face_t* face)
+ {
+ const OT::head &head = *face->table.head;
+ return head.indexToLocFormat <= 1 && head.glyphDataFormat <= 1;
+ }
+
+ bool sanitize (hb_sanitize_context_t *c HB_UNUSED) const
+ {
+ TRACE_SANITIZE (this);
+ /* Runtime checks as eager sanitizing each glyph is costy */
+ return_trace (true);
+ }
+
+ /* requires source of SubsetGlyph complains the identifier isn't declared */
+ template <typename Iterator>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator it,
+ bool use_short_loca,
+ const hb_subset_plan_t *plan)
+ {
+ TRACE_SERIALIZE (this);
+
+ unsigned init_len = c->length ();
+ for (auto &_ : it)
+ if (unlikely (!_.serialize (c, use_short_loca, plan)))
+ return false;
+
+ /* As a special case when all glyph in the font are empty, add a zero byte
+ * to the table, so that OTS doesn’t reject it, and to make the table work
+ * on Windows as well.
+ * See https://github.com/khaledhosny/ots/issues/52 */
+ if (init_len == c->length ())
+ {
+ HBUINT8 empty_byte;
+ empty_byte = 0;
+ c->copy (empty_byte);
+ }
+ return_trace (true);
+ }
+
+ /* Byte region(s) per glyph to output
+ unpadded, hints removed if so requested
+ If we fail to process a glyph we produce an empty (0-length) glyph */
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+
+ if (!has_valid_glyf_format (c->plan->source)) {
+ // glyf format is unknown don't attempt to subset it.
+ DEBUG_MSG (SUBSET, nullptr,
+ "unkown glyf format, dropping from subset.");
+ return_trace (false);
+ }
+
+ glyf *glyf_prime = c->serializer->start_embed <glyf> ();
+ if (unlikely (!c->serializer->check_success (glyf_prime))) return_trace (false);
+
+ hb_font_t *font = nullptr;
+ if (c->plan->normalized_coords)
+ {
+ font = _create_font_for_instancing (c->plan);
+ if (unlikely (!font)) return false;
+ }
+
+ hb_vector_t<unsigned> padded_offsets;
+ unsigned num_glyphs = c->plan->num_output_glyphs ();
+ if (unlikely (!padded_offsets.resize (num_glyphs)))
+ {
+ hb_font_destroy (font);
+ return false;
+ }
+
+ hb_vector_t<glyf_impl::SubsetGlyph> glyphs;
+ if (!_populate_subset_glyphs (c->plan, font, glyphs))
+ {
+ hb_font_destroy (font);
+ return false;
+ }
+
+ if (font)
+ hb_font_destroy (font);
+
+ unsigned max_offset = 0;
+ for (unsigned i = 0; i < num_glyphs; i++)
+ {
+ padded_offsets[i] = glyphs[i].padded_size ();
+ max_offset += padded_offsets[i];
+ }
+
+ bool use_short_loca = false;
+ if (likely (!c->plan->force_long_loca))
+ use_short_loca = max_offset < 0x1FFFF;
+
+ if (!use_short_loca) {
+ for (unsigned i = 0; i < num_glyphs; i++)
+ padded_offsets[i] = glyphs[i].length ();
+ }
+
+ bool result = glyf_prime->serialize (c->serializer, glyphs.writer (), use_short_loca, c->plan);
+ if (c->plan->normalized_coords && !c->plan->pinned_at_default)
+ _free_compiled_subset_glyphs (glyphs);
+
+ if (!result) return false;
+
+ if (unlikely (c->serializer->in_error ())) return_trace (false);
+
+ return_trace (c->serializer->check_success (glyf_impl::_add_loca_and_head (c->plan,
+ padded_offsets.iter (),
+ use_short_loca)));
+ }
+
+ bool
+ _populate_subset_glyphs (const hb_subset_plan_t *plan,
+ hb_font_t *font,
+ hb_vector_t<glyf_impl::SubsetGlyph> &glyphs /* OUT */) const;
+
+ hb_font_t *
+ _create_font_for_instancing (const hb_subset_plan_t *plan) const;
+
+ void _free_compiled_subset_glyphs (hb_vector_t<glyf_impl::SubsetGlyph> &glyphs) const
+ {
+ for (unsigned i = 0; i < glyphs.length; i++)
+ glyphs[i].free_compiled_bytes ();
+ }
+
+ protected:
+ UnsizedArrayOf<HBUINT8>
+ dataZ; /* Glyphs data. */
+ public:
+ DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always
+ * check the size externally, allow Null() object of it by
+ * defining it _MIN instead. */
+};
+
+struct glyf_accelerator_t
+{
+ glyf_accelerator_t (hb_face_t *face)
+ {
+ short_offset = false;
+ num_glyphs = 0;
+ loca_table = nullptr;
+ glyf_table = nullptr;
+#ifndef HB_NO_VAR
+ gvar = nullptr;
+#endif
+ hmtx = nullptr;
+#ifndef HB_NO_VERTICAL
+ vmtx = nullptr;
+#endif
+ const OT::head &head = *face->table.head;
+ if (!glyf::has_valid_glyf_format (face))
+ /* Unknown format. Leave num_glyphs=0, that takes care of disabling us. */
+ return;
+ short_offset = 0 == head.indexToLocFormat;
+
+ loca_table = face->table.loca.get_blob (); // Needs no destruct!
+ glyf_table = hb_sanitize_context_t ().reference_table<glyf> (face);
+#ifndef HB_NO_VAR
+ gvar = face->table.gvar;
+#endif
+ hmtx = face->table.hmtx;
+#ifndef HB_NO_VERTICAL
+ vmtx = face->table.vmtx;
+#endif
+
+ num_glyphs = hb_max (1u, loca_table.get_length () / (short_offset ? 2 : 4)) - 1;
+ num_glyphs = hb_min (num_glyphs, face->get_num_glyphs ());
+ }
+ ~glyf_accelerator_t ()
+ {
+ glyf_table.destroy ();
+ }
+
+ bool has_data () const { return num_glyphs; }
+
+ protected:
+ template<typename T>
+ bool get_points (hb_font_t *font, hb_codepoint_t gid, T consumer) const
+ {
+ if (gid >= num_glyphs) return false;
+
+ /* Making this allocfree is not that easy
+ https://github.com/harfbuzz/harfbuzz/issues/2095
+ mostly because of gvar handling in VF fonts,
+ perhaps a separate path for non-VF fonts can be considered */
+ contour_point_vector_t all_points;
+
+ bool phantom_only = !consumer.is_consuming_contour_points ();
+ if (unlikely (!glyph_for_gid (gid).get_points (font, *this, all_points, nullptr, nullptr, nullptr, true, true, phantom_only)))
+ return false;
+
+ if (consumer.is_consuming_contour_points ())
+ {
+ unsigned count = all_points.length;
+ assert (count >= glyf_impl::PHANTOM_COUNT);
+ count -= glyf_impl::PHANTOM_COUNT;
+ for (unsigned point_index = 0; point_index < count; point_index++)
+ consumer.consume_point (all_points[point_index]);
+ consumer.points_end ();
+ }
+
+ /* Where to write phantoms, nullptr if not requested */
+ contour_point_t *phantoms = consumer.get_phantoms_sink ();
+ if (phantoms)
+ for (unsigned i = 0; i < glyf_impl::PHANTOM_COUNT; ++i)
+ phantoms[i] = all_points[all_points.length - glyf_impl::PHANTOM_COUNT + i];
+
+ return true;
+ }
+
+ public:
+
+#ifndef HB_NO_VAR
+ struct points_aggregator_t
+ {
+ hb_font_t *font;
+ hb_glyph_extents_t *extents;
+ contour_point_t *phantoms;
+ bool scaled;
+
+ struct contour_bounds_t
+ {
+ contour_bounds_t () { min_x = min_y = FLT_MAX; max_x = max_y = -FLT_MAX; }
+
+ void add (const contour_point_t &p)
+ {
+ min_x = hb_min (min_x, p.x);
+ min_y = hb_min (min_y, p.y);
+ max_x = hb_max (max_x, p.x);
+ max_y = hb_max (max_y, p.y);
+ }
+
+ bool empty () const { return (min_x >= max_x) || (min_y >= max_y); }
+
+ void get_extents (hb_font_t *font, hb_glyph_extents_t *extents, bool scaled)
+ {
+ if (unlikely (empty ()))
+ {
+ extents->width = 0;
+ extents->x_bearing = 0;
+ extents->height = 0;
+ extents->y_bearing = 0;
+ return;
+ }
+ {
+ extents->x_bearing = roundf (min_x);
+ extents->width = roundf (max_x - extents->x_bearing);
+ extents->y_bearing = roundf (max_y);
+ extents->height = roundf (min_y - extents->y_bearing);
+
+ if (scaled)
+ font->scale_glyph_extents (extents);
+ }
+ }
+
+ protected:
+ float min_x, min_y, max_x, max_y;
+ } bounds;
+
+ points_aggregator_t (hb_font_t *font_, hb_glyph_extents_t *extents_, contour_point_t *phantoms_, bool scaled_)
+ {
+ font = font_;
+ extents = extents_;
+ phantoms = phantoms_;
+ scaled = scaled_;
+ if (extents) bounds = contour_bounds_t ();
+ }
+
+ void consume_point (const contour_point_t &point) { bounds.add (point); }
+ void points_end () { bounds.get_extents (font, extents, scaled); }
+
+ bool is_consuming_contour_points () { return extents; }
+ contour_point_t *get_phantoms_sink () { return phantoms; }
+ };
+
+ unsigned
+ get_advance_with_var_unscaled (hb_font_t *font, hb_codepoint_t gid, bool is_vertical) const
+ {
+ if (unlikely (gid >= num_glyphs)) return 0;
+
+ bool success = false;
+
+ contour_point_t phantoms[glyf_impl::PHANTOM_COUNT];
+ if (font->num_coords)
+ success = get_points (font, gid, points_aggregator_t (font, nullptr, phantoms, false));
+
+ if (unlikely (!success))
+ return
+#ifndef HB_NO_VERTICAL
+ is_vertical ? vmtx->get_advance_without_var_unscaled (gid) :
+#endif
+ hmtx->get_advance_without_var_unscaled (gid);
+
+ float result = is_vertical
+ ? phantoms[glyf_impl::PHANTOM_TOP].y - phantoms[glyf_impl::PHANTOM_BOTTOM].y
+ : phantoms[glyf_impl::PHANTOM_RIGHT].x - phantoms[glyf_impl::PHANTOM_LEFT].x;
+ return hb_clamp (roundf (result), 0.f, (float) UINT_MAX / 2);
+ }
+
+ bool get_leading_bearing_with_var_unscaled (hb_font_t *font, hb_codepoint_t gid, bool is_vertical, int *lsb) const
+ {
+ if (unlikely (gid >= num_glyphs)) return false;
+
+ hb_glyph_extents_t extents;
+
+ contour_point_t phantoms[glyf_impl::PHANTOM_COUNT];
+ if (unlikely (!get_points (font, gid, points_aggregator_t (font, &extents, phantoms, false))))
+ return false;
+
+ *lsb = is_vertical
+ ? roundf (phantoms[glyf_impl::PHANTOM_TOP].y) - extents.y_bearing
+ : roundf (phantoms[glyf_impl::PHANTOM_LEFT].x);
+ return true;
+ }
+#endif
+
+ bool get_leading_bearing_without_var_unscaled (hb_codepoint_t gid, bool is_vertical, int *lsb) const
+ {
+ if (unlikely (gid >= num_glyphs)) return false;
+ if (is_vertical) return false; // TODO Humm, what to do here?
+
+ *lsb = glyph_for_gid (gid).get_header ()->xMin;
+ return true;
+ }
+
+ public:
+ bool get_extents (hb_font_t *font, hb_codepoint_t gid, hb_glyph_extents_t *extents) const
+ {
+ if (unlikely (gid >= num_glyphs)) return false;
+
+#ifndef HB_NO_VAR
+ if (font->num_coords)
+ return get_points (font, gid, points_aggregator_t (font, extents, nullptr, true));
+#endif
+ return glyph_for_gid (gid).get_extents_without_var_scaled (font, *this, extents);
+ }
+
+ bool paint_glyph (hb_font_t *font, hb_codepoint_t gid, hb_paint_funcs_t *funcs, void *data, hb_color_t foreground) const
+ {
+ funcs->push_clip_glyph (data, gid, font);
+ funcs->color (data, true, foreground);
+ funcs->pop_clip (data);
+
+ return true;
+ }
+
+ const glyf_impl::Glyph
+ glyph_for_gid (hb_codepoint_t gid, bool needs_padding_removal = false) const
+ {
+ if (unlikely (gid >= num_glyphs)) return glyf_impl::Glyph ();
+
+ unsigned int start_offset, end_offset;
+
+ if (short_offset)
+ {
+ const HBUINT16 *offsets = (const HBUINT16 *) loca_table->dataZ.arrayZ;
+ start_offset = 2 * offsets[gid];
+ end_offset = 2 * offsets[gid + 1];
+ }
+ else
+ {
+ const HBUINT32 *offsets = (const HBUINT32 *) loca_table->dataZ.arrayZ;
+ start_offset = offsets[gid];
+ end_offset = offsets[gid + 1];
+ }
+
+ if (unlikely (start_offset > end_offset || end_offset > glyf_table.get_length ()))
+ return glyf_impl::Glyph ();
+
+ glyf_impl::Glyph glyph (hb_bytes_t ((const char *) this->glyf_table + start_offset,
+ end_offset - start_offset), gid);
+ return needs_padding_removal ? glyf_impl::Glyph (glyph.trim_padding (), gid) : glyph;
+ }
+
+ bool
+ get_path (hb_font_t *font, hb_codepoint_t gid, hb_draw_session_t &draw_session) const
+ { return get_points (font, gid, glyf_impl::path_builder_t (font, draw_session)); }
+
+#ifndef HB_NO_VAR
+ const gvar_accelerator_t *gvar;
+#endif
+ const hmtx_accelerator_t *hmtx;
+#ifndef HB_NO_VERTICAL
+ const vmtx_accelerator_t *vmtx;
+#endif
+
+ private:
+ bool short_offset;
+ unsigned int num_glyphs;
+ hb_blob_ptr_t<loca> loca_table;
+ hb_blob_ptr_t<glyf> glyf_table;
+};
+
+
+inline bool
+glyf::_populate_subset_glyphs (const hb_subset_plan_t *plan,
+ hb_font_t *font,
+ hb_vector_t<glyf_impl::SubsetGlyph>& glyphs /* OUT */) const
+{
+ OT::glyf_accelerator_t glyf (plan->source);
+ unsigned num_glyphs = plan->num_output_glyphs ();
+ if (!glyphs.resize (num_glyphs)) return false;
+
+ for (auto p : plan->glyph_map->iter ())
+ {
+ unsigned new_gid = p.second;
+ glyf_impl::SubsetGlyph& subset_glyph = glyphs.arrayZ[new_gid];
+ subset_glyph.old_gid = p.first;
+
+ if (unlikely (new_gid == 0 &&
+ !(plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE)) &&
+ !plan->normalized_coords)
+ subset_glyph.source_glyph = glyf_impl::Glyph ();
+ else
+ {
+ /* If plan has an accelerator, the preprocessing step already trimmed glyphs.
+ * Don't trim them again! */
+ subset_glyph.source_glyph = glyf.glyph_for_gid (subset_glyph.old_gid, !plan->accelerator);
+ }
+
+ if (plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
+ subset_glyph.drop_hints_bytes ();
+ else
+ subset_glyph.dest_start = subset_glyph.source_glyph.get_bytes ();
+
+ if (font)
+ {
+ if (unlikely (!subset_glyph.compile_bytes_with_deltas (plan, font, glyf)))
+ {
+ // when pinned at default, only bounds are updated, thus no need to free
+ if (!plan->pinned_at_default)
+ _free_compiled_subset_glyphs (glyphs);
+ return false;
+ }
+ }
+ }
+ return true;
+}
+
+inline hb_font_t *
+glyf::_create_font_for_instancing (const hb_subset_plan_t *plan) const
+{
+ hb_font_t *font = hb_font_create (plan->source);
+ if (unlikely (font == hb_font_get_empty ())) return nullptr;
+
+ hb_vector_t<hb_variation_t> vars;
+ if (unlikely (!vars.alloc (plan->user_axes_location.get_population (), true)))
+ {
+ hb_font_destroy (font);
+ return nullptr;
+ }
+
+ for (auto _ : plan->user_axes_location)
+ {
+ hb_variation_t var;
+ var.tag = _.first;
+ var.value = _.second;
+ vars.push (var);
+ }
+
+#ifndef HB_NO_VAR
+ hb_font_set_variations (font, vars.arrayZ, plan->user_axes_location.get_population ());
+#endif
+ return font;
+}
+
+
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_GLYF_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/loca.hh b/gfx/harfbuzz/src/OT/glyf/loca.hh
new file mode 100644
index 0000000000..4481cba8ed
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/loca.hh
@@ -0,0 +1,43 @@
+#ifndef OT_GLYF_LOCA_HH
+#define OT_GLYF_LOCA_HH
+
+
+#include "../../hb-open-type.hh"
+
+
+namespace OT {
+
+
+/*
+ * loca -- Index to Location
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/loca
+ */
+#define HB_OT_TAG_loca HB_TAG('l','o','c','a')
+
+struct loca
+{
+ friend struct glyf;
+ friend struct glyf_accelerator_t;
+
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_loca;
+
+ bool sanitize (hb_sanitize_context_t *c HB_UNUSED) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (true);
+ }
+
+ protected:
+ UnsizedArrayOf<HBUINT8>
+ dataZ; /* Location data. */
+ public:
+ DEFINE_SIZE_MIN (0); /* In reality, this is UNBOUNDED() type; but since we always
+ * check the size externally, allow Null() object of it by
+ * defining it _MIN instead. */
+};
+
+
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_LOCA_HH */
diff --git a/gfx/harfbuzz/src/OT/glyf/path-builder.hh b/gfx/harfbuzz/src/OT/glyf/path-builder.hh
new file mode 100644
index 0000000000..f7f732d336
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/glyf/path-builder.hh
@@ -0,0 +1,189 @@
+#ifndef OT_GLYF_PATH_BUILDER_HH
+#define OT_GLYF_PATH_BUILDER_HH
+
+
+#include "../../hb.hh"
+
+
+namespace OT {
+namespace glyf_impl {
+
+
+struct path_builder_t
+{
+ hb_font_t *font;
+ hb_draw_session_t *draw_session;
+
+ struct optional_point_t
+ {
+ optional_point_t () {}
+ optional_point_t (float x_, float y_) : has_data (true), x (x_), y (y_) {}
+ operator bool () const { return has_data; }
+
+ bool has_data = false;
+ float x = 0.;
+ float y = 0.;
+
+ optional_point_t lerp (optional_point_t p, float t)
+ { return optional_point_t (x + t * (p.x - x), y + t * (p.y - y)); }
+ } first_oncurve, first_offcurve, first_offcurve2, last_offcurve, last_offcurve2;
+
+ path_builder_t (hb_font_t *font_, hb_draw_session_t &draw_session_) :
+ font (font_), draw_session (&draw_session_) {}
+
+ /* based on https://github.com/RazrFalcon/ttf-parser/blob/4f32821/src/glyf.rs#L287
+ See also:
+ * https://developer.apple.com/fonts/TrueType-Reference-Manual/RM01/Chap1.html
+ * https://stackoverflow.com/a/20772557
+ *
+ * Cubic support added. */
+ void consume_point (const contour_point_t &point)
+ {
+ bool is_on_curve = point.flag & glyf_impl::SimpleGlyph::FLAG_ON_CURVE;
+#ifdef HB_NO_CUBIC_GLYF
+ bool is_cubic = false;
+#else
+ bool is_cubic = !is_on_curve && (point.flag & glyf_impl::SimpleGlyph::FLAG_CUBIC);
+#endif
+ optional_point_t p (font->em_fscalef_x (point.x), font->em_fscalef_y (point.y));
+ if (!first_oncurve)
+ {
+ if (is_on_curve)
+ {
+ first_oncurve = p;
+ draw_session->move_to (p.x, p.y);
+ }
+ else
+ {
+ if (is_cubic && !first_offcurve2)
+ {
+ first_offcurve2 = first_offcurve;
+ first_offcurve = p;
+ }
+ else if (first_offcurve)
+ {
+ optional_point_t mid = first_offcurve.lerp (p, .5f);
+ first_oncurve = mid;
+ last_offcurve = p;
+ draw_session->move_to (mid.x, mid.y);
+ }
+ else
+ first_offcurve = p;
+ }
+ }
+ else
+ {
+ if (last_offcurve)
+ {
+ if (is_on_curve)
+ {
+ if (last_offcurve2)
+ {
+ draw_session->cubic_to (last_offcurve2.x, last_offcurve2.y,
+ last_offcurve.x, last_offcurve.y,
+ p.x, p.y);
+ last_offcurve2 = optional_point_t ();
+ }
+ else
+ draw_session->quadratic_to (last_offcurve.x, last_offcurve.y,
+ p.x, p.y);
+ last_offcurve = optional_point_t ();
+ }
+ else
+ {
+ if (is_cubic && !last_offcurve2)
+ {
+ last_offcurve2 = last_offcurve;
+ last_offcurve = p;
+ }
+ else
+ {
+ optional_point_t mid = last_offcurve.lerp (p, .5f);
+
+ if (is_cubic)
+ {
+ draw_session->cubic_to (last_offcurve2.x, last_offcurve2.y,
+ last_offcurve.x, last_offcurve.y,
+ mid.x, mid.y);
+ last_offcurve2 = optional_point_t ();
+ }
+ else
+ draw_session->quadratic_to (last_offcurve.x, last_offcurve.y,
+ mid.x, mid.y);
+ last_offcurve = p;
+ }
+ }
+ }
+ else
+ {
+ if (is_on_curve)
+ draw_session->line_to (p.x, p.y);
+ else
+ last_offcurve = p;
+ }
+ }
+
+ if (point.is_end_point)
+ {
+ if (first_offcurve && last_offcurve)
+ {
+ optional_point_t mid = last_offcurve.lerp (first_offcurve2 ?
+ first_offcurve2 :
+ first_offcurve, .5f);
+ if (last_offcurve2)
+ draw_session->cubic_to (last_offcurve2.x, last_offcurve2.y,
+ last_offcurve.x, last_offcurve.y,
+ mid.x, mid.y);
+ else
+ draw_session->quadratic_to (last_offcurve.x, last_offcurve.y,
+ mid.x, mid.y);
+ last_offcurve = optional_point_t ();
+ }
+ /* now check the rest */
+
+ if (first_offcurve && first_oncurve)
+ {
+ if (first_offcurve2)
+ draw_session->cubic_to (first_offcurve2.x, first_offcurve2.y,
+ first_offcurve.x, first_offcurve.y,
+ first_oncurve.x, first_oncurve.y);
+ else
+ draw_session->quadratic_to (first_offcurve.x, first_offcurve.y,
+ first_oncurve.x, first_oncurve.y);
+ }
+ else if (last_offcurve && first_oncurve)
+ {
+ if (last_offcurve2)
+ draw_session->cubic_to (last_offcurve2.x, last_offcurve2.y,
+ last_offcurve.x, last_offcurve.y,
+ first_oncurve.x, first_oncurve.y);
+ else
+ draw_session->quadratic_to (last_offcurve.x, last_offcurve.y,
+ first_oncurve.x, first_oncurve.y);
+ }
+ else if (first_oncurve)
+ draw_session->line_to (first_oncurve.x, first_oncurve.y);
+ else if (first_offcurve)
+ {
+ float x = first_offcurve.x, y = first_offcurve.y;
+ draw_session->move_to (x, y);
+ draw_session->quadratic_to (x, y, x, y);
+ }
+
+ /* Getting ready for the next contour */
+ first_oncurve = first_offcurve = last_offcurve = last_offcurve2 = optional_point_t ();
+ draw_session->close_path ();
+ }
+ }
+ void points_end () {}
+
+ bool is_consuming_contour_points () { return true; }
+ contour_point_t *get_phantoms_sink () { return nullptr; }
+};
+
+
+} /* namespace glyf_impl */
+} /* namespace OT */
+
+
+#endif /* OT_GLYF_PATH_BUILDER_HH */
diff --git a/gfx/harfbuzz/src/OT/name/name.hh b/gfx/harfbuzz/src/OT/name/name.hh
new file mode 100644
index 0000000000..c1839f3b68
--- /dev/null
+++ b/gfx/harfbuzz/src/OT/name/name.hh
@@ -0,0 +1,589 @@
+/*
+ * Copyright © 2011,2012 Google, Inc.
+ *
+ * This is part of HarfBuzz, a text shaping library.
+ *
+ * Permission is hereby granted, without written agreement and without
+ * license or royalty fees, to use, copy, modify, and distribute this
+ * software and its documentation for any purpose, provided that the
+ * above copyright notice and the following two paragraphs appear in
+ * all copies of this software.
+ *
+ * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
+ * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
+ * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
+ * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+ * DAMAGE.
+ *
+ * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
+ * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
+ * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
+ * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+ *
+ * Google Author(s): Behdad Esfahbod
+ */
+
+#ifndef OT_NAME_NAME_HH
+#define OT_NAME_NAME_HH
+
+#include "../../hb-open-type.hh"
+#include "../../hb-ot-name-language.hh"
+#include "../../hb-aat-layout.hh"
+#include "../../hb-utf.hh"
+
+
+namespace OT {
+
+template <typename in_utf_t, typename out_utf_t>
+inline unsigned int
+hb_ot_name_convert_utf (hb_bytes_t bytes,
+ unsigned int *text_size /* IN/OUT */,
+ typename out_utf_t::codepoint_t *text /* OUT */)
+{
+ unsigned int src_len = bytes.length / sizeof (typename in_utf_t::codepoint_t);
+ const typename in_utf_t::codepoint_t *src = (const typename in_utf_t::codepoint_t *) bytes.arrayZ;
+ const typename in_utf_t::codepoint_t *src_end = src + src_len;
+
+ typename out_utf_t::codepoint_t *dst = text;
+
+ hb_codepoint_t unicode;
+ const hb_codepoint_t replacement = HB_BUFFER_REPLACEMENT_CODEPOINT_DEFAULT;
+
+ if (text_size && *text_size)
+ {
+ (*text_size)--; /* Save room for NUL-termination. */
+ const typename out_utf_t::codepoint_t *dst_end = text + *text_size;
+
+ while (src < src_end && dst < dst_end)
+ {
+ const typename in_utf_t::codepoint_t *src_next = in_utf_t::next (src, src_end, &unicode, replacement);
+ typename out_utf_t::codepoint_t *dst_next = out_utf_t::encode (dst, dst_end, unicode);
+ if (dst_next == dst)
+ break; /* Out-of-room. */
+
+ dst = dst_next;
+ src = src_next;
+ }
+
+ *text_size = dst - text;
+ *dst = 0; /* NUL-terminate. */
+ }
+
+ /* Accumulate length of rest. */
+ unsigned int dst_len = dst - text;
+ while (src < src_end)
+ {
+ src = in_utf_t::next (src, src_end, &unicode, replacement);
+ dst_len += out_utf_t::encode_len (unicode);
+ }
+ return dst_len;
+}
+
+#define entry_score var.u16[0]
+#define entry_index var.u16[1]
+
+
+/*
+ * name -- Naming
+ * https://docs.microsoft.com/en-us/typography/opentype/spec/name
+ */
+#define HB_OT_TAG_name HB_TAG('n','a','m','e')
+
+#define UNSUPPORTED 42
+
+struct NameRecord
+{
+ hb_language_t language (hb_face_t *face) const
+ {
+#ifndef HB_NO_OT_NAME_LANGUAGE
+ unsigned int p = platformID;
+ unsigned int l = languageID;
+
+ if (p == 3)
+ return _hb_ot_name_language_for_ms_code (l);
+
+ if (p == 1)
+ return _hb_ot_name_language_for_mac_code (l);
+
+#ifndef HB_NO_OT_NAME_LANGUAGE_AAT
+ if (p == 0)
+ return face->table.ltag->get_language (l);
+#endif
+
+#endif
+ return HB_LANGUAGE_INVALID;
+ }
+
+ uint16_t score () const
+ {
+ /* Same order as in cmap::find_best_subtable(). */
+ unsigned int p = platformID;
+ unsigned int e = encodingID;
+
+ /* 32-bit. */
+ if (p == 3 && e == 10) return 0;
+ if (p == 0 && e == 6) return 1;
+ if (p == 0 && e == 4) return 2;
+
+ /* 16-bit. */
+ if (p == 3 && e == 1) return 3;
+ if (p == 0 && e == 3) return 4;
+ if (p == 0 && e == 2) return 5;
+ if (p == 0 && e == 1) return 6;
+ if (p == 0 && e == 0) return 7;
+
+ /* Symbol. */
+ if (p == 3 && e == 0) return 8;
+
+ /* We treat all Mac Latin names as ASCII only. */
+ if (p == 1 && e == 0) return 10; /* 10 is magic number :| */
+
+ return UNSUPPORTED;
+ }
+
+ NameRecord* copy (hb_serialize_context_t *c, const void *base
+#ifdef HB_EXPERIMENTAL_API
+ , const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides
+#endif
+ ) const
+ {
+ TRACE_SERIALIZE (this);
+ HB_UNUSED auto snap = c->snapshot ();
+ auto *out = c->embed (this);
+ if (unlikely (!out)) return_trace (nullptr);
+#ifdef HB_EXPERIMENTAL_API
+ hb_ot_name_record_ids_t record_ids (platformID, encodingID, languageID, nameID);
+ hb_bytes_t* name_bytes;
+
+ if (name_table_overrides->has (record_ids, &name_bytes)) {
+ hb_bytes_t encoded_bytes = *name_bytes;
+ char *name_str_utf16_be = nullptr;
+
+ if (platformID != 1)
+ {
+ unsigned text_size = hb_ot_name_convert_utf<hb_utf8_t, hb_utf16_be_t> (*name_bytes, nullptr, nullptr);
+
+ text_size++; // needs to consider NULL terminator for use in hb_ot_name_convert_utf()
+ unsigned byte_len = text_size * hb_utf16_be_t::codepoint_t::static_size;
+ name_str_utf16_be = (char *) hb_calloc (byte_len, 1);
+ if (!name_str_utf16_be)
+ {
+ c->revert (snap);
+ return_trace (nullptr);
+ }
+ hb_ot_name_convert_utf<hb_utf8_t, hb_utf16_be_t> (*name_bytes, &text_size,
+ (hb_utf16_be_t::codepoint_t *) name_str_utf16_be);
+
+ unsigned encoded_byte_len = text_size * hb_utf16_be_t::codepoint_t::static_size;
+ if (!encoded_byte_len || !c->check_assign (out->length, encoded_byte_len, HB_SERIALIZE_ERROR_INT_OVERFLOW)) {
+ c->revert (snap);
+ hb_free (name_str_utf16_be);
+ return_trace (nullptr);
+ }
+
+ encoded_bytes = hb_bytes_t (name_str_utf16_be, encoded_byte_len);
+ }
+ else
+ {
+ // mac platform, copy the UTF-8 string(all ascii characters) as is
+ if (!c->check_assign (out->length, encoded_bytes.length, HB_SERIALIZE_ERROR_INT_OVERFLOW)) {
+ c->revert (snap);
+ return_trace (nullptr);
+ }
+ }
+
+ out->offset = 0;
+ c->push ();
+ encoded_bytes.copy (c);
+ c->add_link (out->offset, c->pop_pack (), hb_serialize_context_t::Tail, 0);
+ hb_free (name_str_utf16_be);
+ }
+ else
+#endif
+ {
+ out->offset.serialize_copy (c, offset, base, 0, hb_serialize_context_t::Tail, length);
+ }
+ return_trace (out);
+ }
+
+ bool isUnicode () const
+ {
+ unsigned int p = platformID;
+ unsigned int e = encodingID;
+
+ return (p == 0 ||
+ (p == 3 && (e == 0 || e == 1 || e == 10)));
+ }
+
+ static int cmp (const void *pa, const void *pb)
+ {
+ const NameRecord *a = (const NameRecord *)pa;
+ const NameRecord *b = (const NameRecord *)pb;
+
+ if (a->platformID != b->platformID)
+ return a->platformID - b->platformID;
+
+ if (a->encodingID != b->encodingID)
+ return a->encodingID - b->encodingID;
+
+ if (a->languageID != b->languageID)
+ return a->languageID - b->languageID;
+
+ if (a->nameID != b->nameID)
+ return a->nameID - b->nameID;
+
+ if (a->length != b->length)
+ return a->length - b->length;
+
+ return 0;
+ }
+
+ bool sanitize (hb_sanitize_context_t *c, const void *base) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) && offset.sanitize (c, base, length));
+ }
+
+ HBUINT16 platformID; /* Platform ID. */
+ HBUINT16 encodingID; /* Platform-specific encoding ID. */
+ HBUINT16 languageID; /* Language ID. */
+ HBUINT16 nameID; /* Name ID. */
+ HBUINT16 length; /* String length (in bytes). */
+ NNOffset16To<UnsizedArrayOf<HBUINT8>>
+ offset; /* String offset from start of storage area (in bytes). */
+ public:
+ DEFINE_SIZE_STATIC (12);
+};
+
+static int
+_hb_ot_name_entry_cmp_key (const void *pa, const void *pb, bool exact)
+{
+ const hb_ot_name_entry_t *a = (const hb_ot_name_entry_t *) pa;
+ const hb_ot_name_entry_t *b = (const hb_ot_name_entry_t *) pb;
+
+ /* Compare by name_id, then language. */
+
+ if (a->name_id != b->name_id)
+ return a->name_id - b->name_id;
+
+ if (a->language == b->language) return 0;
+ if (!a->language) return -1;
+ if (!b->language) return +1;
+
+ const char *astr = hb_language_to_string (a->language);
+ const char *bstr = hb_language_to_string (b->language);
+
+ signed c = strcmp (astr, bstr);
+
+ // 'a' is the user request, and 'b' is string in the font.
+ // If eg. user asks for "en-us" and font has "en", approve.
+ if (!exact && c &&
+ hb_language_matches (b->language, a->language))
+ return 0;
+
+ return c;
+}
+
+static int
+_hb_ot_name_entry_cmp (const void *pa, const void *pb)
+{
+ /* Compare by name_id, then language, then score, then index. */
+
+ int v = _hb_ot_name_entry_cmp_key (pa, pb, true);
+ if (v)
+ return v;
+
+ const hb_ot_name_entry_t *a = (const hb_ot_name_entry_t *) pa;
+ const hb_ot_name_entry_t *b = (const hb_ot_name_entry_t *) pb;
+
+ if (a->entry_score != b->entry_score)
+ return a->entry_score - b->entry_score;
+
+ if (a->entry_index != b->entry_index)
+ return a->entry_index - b->entry_index;
+
+ return 0;
+}
+
+struct name
+{
+ static constexpr hb_tag_t tableTag = HB_OT_TAG_name;
+
+ unsigned int get_size () const
+ { return min_size + count * nameRecordZ.item_size; }
+
+ template <typename Iterator,
+ hb_requires (hb_is_source_of (Iterator, const NameRecord &))>
+ bool serialize (hb_serialize_context_t *c,
+ Iterator it,
+ const void *src_string_pool
+#ifdef HB_EXPERIMENTAL_API
+ , const hb_vector_t<hb_ot_name_record_ids_t>& insert_name_records
+ , const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides
+#endif
+ )
+ {
+ TRACE_SERIALIZE (this);
+
+ if (unlikely (!c->extend_min ((*this)))) return_trace (false);
+
+ unsigned total_count = it.len ()
+#ifdef HB_EXPERIMENTAL_API
+ + insert_name_records.length
+#endif
+ ;
+ this->format = 0;
+ if (!c->check_assign (this->count, total_count, HB_SERIALIZE_ERROR_INT_OVERFLOW))
+ return false;
+
+ NameRecord *name_records = (NameRecord *) hb_calloc (total_count, NameRecord::static_size);
+ if (unlikely (!name_records)) return_trace (false);
+
+ hb_array_t<NameRecord> records (name_records, total_count);
+
+ for (const NameRecord& record : it)
+ {
+ hb_memcpy (name_records, &record, NameRecord::static_size);
+ name_records++;
+ }
+
+#ifdef HB_EXPERIMENTAL_API
+ for (unsigned i = 0; i < insert_name_records.length; i++)
+ {
+ const hb_ot_name_record_ids_t& ids = insert_name_records[i];
+ NameRecord record;
+ record.platformID = ids.platform_id;
+ record.encodingID = ids.encoding_id;
+ record.languageID = ids.language_id;
+ record.nameID = ids.name_id;
+ record.length = 0; // handled in NameRecord copy()
+ record.offset = 0;
+ memcpy (name_records, &record, NameRecord::static_size);
+ name_records++;
+ }
+#endif
+
+ records.qsort ();
+
+ c->copy_all (records,
+ src_string_pool
+#ifdef HB_EXPERIMENTAL_API
+ , name_table_overrides
+#endif
+ );
+ hb_free (records.arrayZ);
+
+
+ if (unlikely (c->ran_out_of_room ())) return_trace (false);
+
+ this->stringOffset = c->length ();
+
+ return_trace (true);
+ }
+
+ bool subset (hb_subset_context_t *c) const
+ {
+ TRACE_SUBSET (this);
+
+ name *name_prime = c->serializer->start_embed<name> ();
+ if (unlikely (!name_prime)) return_trace (false);
+
+#ifdef HB_EXPERIMENTAL_API
+ const hb_hashmap_t<hb_ot_name_record_ids_t, hb_bytes_t> *name_table_overrides =
+ &c->plan->name_table_overrides;
+#endif
+
+ auto it =
+ + nameRecordZ.as_array (count)
+ | hb_filter (c->plan->name_ids, &NameRecord::nameID)
+ | hb_filter (c->plan->name_languages, &NameRecord::languageID)
+ | hb_filter ([&] (const NameRecord& namerecord) {
+ return
+ (c->plan->flags & HB_SUBSET_FLAGS_NAME_LEGACY)
+ || namerecord.isUnicode ();
+ })
+#ifdef HB_EXPERIMENTAL_API
+ | hb_filter ([&] (const NameRecord& namerecord) {
+ if (name_table_overrides->is_empty ())
+ return true;
+ hb_ot_name_record_ids_t rec_ids (namerecord.platformID,
+ namerecord.encodingID,
+ namerecord.languageID,
+ namerecord.nameID);
+
+ hb_bytes_t *p;
+ if (name_table_overrides->has (rec_ids, &p) &&
+ (*p).length == 0)
+ return false;
+ return true;
+ })
+#endif
+ ;
+
+#ifdef HB_EXPERIMENTAL_API
+ hb_hashmap_t<hb_ot_name_record_ids_t, unsigned> retained_name_record_ids;
+ for (const NameRecord& rec : it)
+ {
+ hb_ot_name_record_ids_t rec_ids (rec.platformID,
+ rec.encodingID,
+ rec.languageID,
+ rec.nameID);
+ retained_name_record_ids.set (rec_ids, 1);
+ }
+
+ hb_vector_t<hb_ot_name_record_ids_t> insert_name_records;
+ if (!name_table_overrides->is_empty ())
+ {
+ if (unlikely (!insert_name_records.alloc (name_table_overrides->get_population (), true)))
+ return_trace (false);
+ for (const auto& record_ids : name_table_overrides->keys ())
+ {
+ if (name_table_overrides->get (record_ids).length == 0)
+ continue;
+ if (retained_name_record_ids.has (record_ids))
+ continue;
+ insert_name_records.push (record_ids);
+ }
+ }
+#endif
+
+ return (name_prime->serialize (c->serializer, it,
+ std::addressof (this + stringOffset)
+#ifdef HB_EXPERIMENTAL_API
+ , insert_name_records
+ , name_table_overrides
+#endif
+ ));
+ }
+
+ bool sanitize_records (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ const void *string_pool = (this+stringOffset).arrayZ;
+ return_trace (nameRecordZ.sanitize (c, count, string_pool));
+ }
+
+ bool sanitize (hb_sanitize_context_t *c) const
+ {
+ TRACE_SANITIZE (this);
+ return_trace (c->check_struct (this) &&
+ likely (format == 0 || format == 1) &&
+ c->check_array (nameRecordZ.arrayZ, count) &&
+ c->check_range (this, stringOffset) &&
+ sanitize_records (c));
+ }
+
+ struct accelerator_t
+ {
+ accelerator_t (hb_face_t *face)
+ {
+ this->table = hb_sanitize_context_t ().reference_table<name> (face);
+ assert (this->table.get_length () >= this->table->stringOffset);
+ this->pool = (const char *) (const void *) (this->table+this->table->stringOffset);
+ this->pool_len = this->table.get_length () - this->table->stringOffset;
+ const hb_array_t<const NameRecord> all_names (this->table->nameRecordZ.arrayZ,
+ this->table->count);
+
+ this->names.alloc (all_names.length, true);
+
+ for (unsigned int i = 0; i < all_names.length; i++)
+ {
+ hb_ot_name_entry_t *entry = this->names.push ();
+
+ entry->name_id = all_names[i].nameID;
+ entry->language = all_names[i].language (face);
+ entry->entry_score = all_names[i].score ();
+ entry->entry_index = i;
+ }
+
+ this->names.qsort (_hb_ot_name_entry_cmp);
+ /* Walk and pick best only for each name_id,language pair,
+ * while dropping unsupported encodings. */
+ unsigned int j = 0;
+ for (unsigned int i = 0; i < this->names.length; i++)
+ {
+ if (this->names[i].entry_score == UNSUPPORTED ||
+ this->names[i].language == HB_LANGUAGE_INVALID)
+ continue;
+ if (i &&
+ this->names[i - 1].name_id == this->names[i].name_id &&
+ this->names[i - 1].language == this->names[i].language)
+ continue;
+ this->names[j++] = this->names[i];
+ }
+ this->names.resize (j);
+ }
+ ~accelerator_t ()
+ {
+ this->table.destroy ();
+ }
+
+ int get_index (hb_ot_name_id_t name_id,
+ hb_language_t language,
+ unsigned int *width=nullptr) const
+ {
+ const hb_ot_name_entry_t key = {name_id, {0}, language};
+ const hb_ot_name_entry_t *entry = hb_bsearch (key, (const hb_ot_name_entry_t *) this->names,
+ this->names.length,
+ sizeof (hb_ot_name_entry_t),
+ _hb_ot_name_entry_cmp_key,
+ true);
+
+ if (!entry)
+ {
+ entry = hb_bsearch (key, (const hb_ot_name_entry_t *) this->names,
+ this->names.length,
+ sizeof (hb_ot_name_entry_t),
+ _hb_ot_name_entry_cmp_key,
+ false);
+ }
+
+ if (!entry)
+ return -1;
+
+ if (width)
+ *width = entry->entry_score < 10 ? 2 : 1;
+
+ return entry->entry_index;
+ }
+
+ hb_bytes_t get_name (unsigned int idx) const
+ {
+ const hb_array_t<const NameRecord> all_names (table->nameRecordZ.arrayZ, table->count);
+ const NameRecord &record = all_names[idx];
+ const hb_bytes_t string_pool (pool, pool_len);
+ return string_pool.sub_array (record.offset, record.length);
+ }
+
+ private:
+ const char *pool;
+ unsigned int pool_len;
+ public:
+ hb_blob_ptr_t<name> table;
+ hb_vector_t<hb_ot_name_entry_t> names;
+ };
+
+ public:
+ /* We only implement format 0 for now. */
+ HBUINT16 format; /* Format selector (=0/1). */
+ HBUINT16 count; /* Number of name records. */
+ NNOffset16To<UnsizedArrayOf<HBUINT8>>
+ stringOffset; /* Offset to start of string storage (from start of table). */
+ UnsizedArrayOf<NameRecord>
+ nameRecordZ; /* The name records where count is the number of records. */
+ public:
+ DEFINE_SIZE_ARRAY (6, nameRecordZ);
+};
+
+#undef entry_index
+#undef entry_score
+
+struct name_accelerator_t : name::accelerator_t {
+ name_accelerator_t (hb_face_t *face) : name::accelerator_t (face) {}
+};
+
+} /* namespace OT */
+
+
+#endif /* OT_NAME_NAME_HH */