зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1711472 - Update HarfBuzz to 2.8.1. r=jfkthame
Differential Revision: https://phabricator.services.mozilla.com/D115212
This commit is contained in:
Родитель
5e87b4eae0
Коммит
0e8b28fb35
|
@ -20,7 +20,6 @@ EXTRA_DIST = \
|
|||
meson.build \
|
||||
meson_options.txt \
|
||||
subprojects/expat.wrap \
|
||||
subprojects/fontconfig.wrap \
|
||||
subprojects/freetype2.wrap \
|
||||
subprojects/glib.wrap \
|
||||
subprojects/libffi.wrap \
|
||||
|
|
|
@ -1,3 +1,12 @@
|
|||
Overview of changes leading to 2.8.1
|
||||
Tuesday, May 4, 2021
|
||||
====================================
|
||||
- Subsetter now fully supports GSUB/GPOS/GDEF tables (including variations); as
|
||||
such, layout tables are retained by subsetter by default. (Garret Rieger, Qunxin Liu)
|
||||
- Build scripts no longer check for FontConfig as HarfBuzz does not use it.
|
||||
- hb-view supports iTerm2 and kitty inline image protocols (Khaled Hosny),
|
||||
it can also use Chafa for terminal graphics if available (Hans Petter Jansson).
|
||||
|
||||
Overview of changes leading to 2.8.0
|
||||
Tuesday, March 16, 2021
|
||||
====================================
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
This directory contains the HarfBuzz source from the upstream repo:
|
||||
https://github.com/harfbuzz/harfbuzz
|
||||
|
||||
Current version: 2.8.0 [commit 03538e872a0610a65fad692b33d3646f387cf578]
|
||||
Current version: 2.8.1 [commit b37f03f16b39d397a626f097858e9ae550234ca0]
|
||||
|
||||
UPDATING:
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
AC_PREREQ([2.64])
|
||||
AC_INIT([HarfBuzz],
|
||||
[2.8.0],
|
||||
[2.8.1],
|
||||
[https://github.com/harfbuzz/harfbuzz/issues/new],
|
||||
[harfbuzz],
|
||||
[http://harfbuzz.org/])
|
||||
|
@ -214,21 +214,21 @@ AM_CONDITIONAL(HAVE_CAIRO_FT, $have_cairo_ft)
|
|||
|
||||
dnl ==========================================================================
|
||||
|
||||
AC_ARG_WITH(fontconfig,
|
||||
[AS_HELP_STRING([--with-fontconfig=@<:@yes/no/auto@:>@],
|
||||
[Use fontconfig @<:@default=auto@:>@])],,
|
||||
[with_fontconfig=auto])
|
||||
have_fontconfig=false
|
||||
if test "x$with_fontconfig" = "xyes" -o "x$with_fontconfig" = "xauto"; then
|
||||
PKG_CHECK_MODULES(FONTCONFIG, fontconfig, have_fontconfig=true, :)
|
||||
AC_ARG_WITH(chafa,
|
||||
[AS_HELP_STRING([--with-chafa=@<:@yes/no/auto@:>@],
|
||||
[Use chafa @<:@default=auto@:>@])],,
|
||||
[with_chafa=auto])
|
||||
have_chafa=false
|
||||
if test "x$with_chafa" = "xyes" -o "x$with_chafa" = "xauto"; then
|
||||
PKG_CHECK_MODULES(CHAFA, chafa >= 1.6.0, have_chafa=true, :)
|
||||
fi
|
||||
if test "x$with_fontconfig" = "xyes" -a "x$have_fontconfig" != "xtrue"; then
|
||||
AC_MSG_ERROR([fontconfig support requested but not found])
|
||||
if test "x$with_chafa" = "xyes" -a "x$have_chafa" != "xtrue"; then
|
||||
AC_MSG_ERROR([chafa support requested but not found])
|
||||
fi
|
||||
if $have_fontconfig; then
|
||||
AC_DEFINE(HAVE_FONTCONFIG, 1, [Have fontconfig library])
|
||||
if $have_chafa; then
|
||||
AC_DEFINE(HAVE_CHAFA, 1, [Have chafa terminal graphics library])
|
||||
fi
|
||||
AM_CONDITIONAL(HAVE_FONTCONFIG, $have_fontconfig)
|
||||
AM_CONDITIONAL(HAVE_CHAFA, $have_chafa)
|
||||
|
||||
dnl ==========================================================================
|
||||
|
||||
|
@ -432,6 +432,7 @@ test/shaping/data/in-house/Makefile
|
|||
test/shaping/data/text-rendering-tests/Makefile
|
||||
test/subset/Makefile
|
||||
test/subset/data/Makefile
|
||||
test/subset/data/repack_tests/Makefile
|
||||
docs/Makefile
|
||||
docs/version.xml
|
||||
])
|
||||
|
@ -465,7 +466,7 @@ Font callbacks (the more the merrier):
|
|||
|
||||
Tools used for command-line utilities:
|
||||
Cairo: ${have_cairo}
|
||||
Fontconfig: ${have_fontconfig}
|
||||
Chafa: ${have_chafa}
|
||||
|
||||
Additional shapers:
|
||||
Graphite2: ${have_graphite2}
|
||||
|
|
|
@ -342,7 +342,7 @@ test_gsub_would_substitute_SOURCES = test-gsub-would-substitute.cc
|
|||
test_gsub_would_substitute_CPPFLAGS = $(HBCFLAGS) $(FREETYPE_CFLAGS)
|
||||
test_gsub_would_substitute_LDADD = libharfbuzz.la $(HBLIBS) $(FREETYPE_LIBS)
|
||||
|
||||
COMPILED_TESTS = test-algs test-array test-iter test-meta test-number test-ot-tag test-unicode-ranges test-bimap
|
||||
COMPILED_TESTS = test-algs test-array test-iter test-meta test-number test-ot-tag test-priority-queue test-unicode-ranges test-bimap test-repacker
|
||||
COMPILED_TESTS_CPPFLAGS = $(HBCFLAGS) -DMAIN -UNDEBUG
|
||||
COMPILED_TESTS_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
check_PROGRAMS += $(COMPILED_TESTS)
|
||||
|
@ -356,6 +356,14 @@ test_array_SOURCES = test-array.cc
|
|||
test_array_CPPFLAGS = $(HBCFLAGS)
|
||||
test_array_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
|
||||
test_priority_queue_SOURCES = test-priority-queue.cc hb-static.cc
|
||||
test_priority_queue_CPPFLAGS = $(HBCFLAGS)
|
||||
test_priority_queue_LDADD = libharfbuzz.la $(HBLIBS)
|
||||
|
||||
test_repacker_SOURCES = test-repacker.cc hb-static.cc
|
||||
test_repacker_CPPFLAGS = $(HBCFLAGS)
|
||||
test_repacker_LDADD = libharfbuzz.la libharfbuzz-subset.la $(HBLIBS)
|
||||
|
||||
test_iter_SOURCES = test-iter.cc hb-static.cc
|
||||
test_iter_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
|
||||
test_iter_LDADD = $(COMPILED_TESTS_LDADD)
|
||||
|
|
|
@ -167,6 +167,7 @@ HB_BASE_sources = \
|
|||
hb-unicode.hh \
|
||||
hb-utf.hh \
|
||||
hb-vector.hh \
|
||||
hb-priority-queue.hh \
|
||||
hb.hh \
|
||||
$(NULL)
|
||||
|
||||
|
@ -268,6 +269,7 @@ HB_SUBSET_sources = \
|
|||
hb-subset-plan.hh \
|
||||
hb-subset.cc \
|
||||
hb-subset.hh \
|
||||
hb-repacker.hh \
|
||||
$(NULL)
|
||||
|
||||
HB_SUBSET_headers = \
|
||||
|
|
|
@ -25,10 +25,8 @@ Input files:
|
|||
"""
|
||||
|
||||
import collections
|
||||
import html
|
||||
from html.parser import HTMLParser
|
||||
def write (s):
|
||||
sys.stdout.flush ()
|
||||
sys.stdout.buffer.write (s.encode ('utf-8'))
|
||||
import itertools
|
||||
import re
|
||||
import sys
|
||||
|
@ -37,16 +35,16 @@ import unicodedata
|
|||
if len (sys.argv) != 3:
|
||||
sys.exit (__doc__)
|
||||
|
||||
from html import unescape
|
||||
def html_unescape (parser, entity):
|
||||
return unescape (entity)
|
||||
|
||||
def expect (condition, message=None):
|
||||
if not condition:
|
||||
if message is None:
|
||||
raise AssertionError
|
||||
raise AssertionError (message)
|
||||
|
||||
def write (s):
|
||||
sys.stdout.flush ()
|
||||
sys.stdout.buffer.write (s.encode ('utf-8'))
|
||||
|
||||
DEFAULT_LANGUAGE_SYSTEM = ''
|
||||
|
||||
# from https://www-01.sil.org/iso639-3/iso-639-3.tab
|
||||
|
@ -383,10 +381,10 @@ class OpenTypeRegistryParser (HTMLParser):
|
|||
self._current_tr[-1] += data
|
||||
|
||||
def handle_charref (self, name):
|
||||
self.handle_data (html_unescape (self, '&#%s;' % name))
|
||||
self.handle_data (html.unescape ('&#%s;' % name))
|
||||
|
||||
def handle_entityref (self, name):
|
||||
self.handle_data (html_unescape (self, '&%s;' % name))
|
||||
self.handle_data (html.unescape ('&%s;' % name))
|
||||
|
||||
def parse (self, filename):
|
||||
"""Parse the OpenType language system tag registry.
|
||||
|
|
|
@ -54,7 +54,7 @@ struct Anchor
|
|||
DEFINE_SIZE_STATIC (4);
|
||||
};
|
||||
|
||||
typedef LArrayOf<Anchor> GlyphAnchors;
|
||||
typedef Array32Of<Anchor> GlyphAnchors;
|
||||
|
||||
struct ankr
|
||||
{
|
||||
|
@ -64,7 +64,7 @@ struct ankr
|
|||
unsigned int i,
|
||||
unsigned int num_glyphs) const
|
||||
{
|
||||
const NNOffsetTo<GlyphAnchors> *offset = (this+lookupTable).get_value (glyph_id, num_glyphs);
|
||||
const NNOffset16To<GlyphAnchors> *offset = (this+lookupTable).get_value (glyph_id, num_glyphs);
|
||||
if (!offset)
|
||||
return Null (Anchor);
|
||||
const GlyphAnchors &anchors = &(this+anchorData) + *offset;
|
||||
|
@ -83,9 +83,9 @@ struct ankr
|
|||
protected:
|
||||
HBUINT16 version; /* Version number (set to zero) */
|
||||
HBUINT16 flags; /* Flags (currently unused; set to zero) */
|
||||
LOffsetTo<Lookup<NNOffsetTo<GlyphAnchors>>>
|
||||
Offset32To<Lookup<NNOffset16To<GlyphAnchors>>>
|
||||
lookupTable; /* Offset to the table's lookup table */
|
||||
LNNOffsetTo<HBUINT8>
|
||||
NNOffset32To<HBUINT8>
|
||||
anchorData; /* Offset to the glyph data table */
|
||||
|
||||
public:
|
||||
|
|
|
@ -164,7 +164,7 @@ struct LookupSegmentArray
|
|||
|
||||
HBGlyphID last; /* Last GlyphID in this segment */
|
||||
HBGlyphID first; /* First GlyphID in this segment */
|
||||
NNOffsetTo<UnsizedArrayOf<T>>
|
||||
NNOffset16To<UnsizedArrayOf<T>>
|
||||
valuesZ; /* A 16-bit offset from the start of
|
||||
* the table to the data. */
|
||||
public:
|
||||
|
@ -659,7 +659,7 @@ struct ClassTable
|
|||
}
|
||||
protected:
|
||||
HBGlyphID firstGlyph; /* First glyph index included in the trimmed array. */
|
||||
ArrayOf<HBUCHAR> classArray; /* The class codes (indexed by glyph index minus
|
||||
Array16Of<HBUCHAR> classArray; /* The class codes (indexed by glyph index minus
|
||||
* firstGlyph). */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (4, classArray);
|
||||
|
@ -678,7 +678,8 @@ struct ObsoleteTypes
|
|||
const void *base,
|
||||
const T *array)
|
||||
{
|
||||
return (offset - ((const char *) array - (const char *) base)) / T::static_size;
|
||||
/* https://github.com/harfbuzz/harfbuzz/issues/2816 */
|
||||
return (offset - unsigned ((const char *) array - (const char *) base)) / T::static_size;
|
||||
}
|
||||
template <typename T>
|
||||
static unsigned int byteOffsetToIndex (unsigned int offset,
|
||||
|
|
|
@ -144,7 +144,7 @@ struct FeatureName
|
|||
protected:
|
||||
HBUINT16 feature; /* Feature type. */
|
||||
HBUINT16 nSettings; /* The number of records in the setting name array. */
|
||||
LNNOffsetTo<UnsizedArrayOf<SettingName>>
|
||||
NNOffset32To<UnsizedArrayOf<SettingName>>
|
||||
settingTableZ; /* Offset in bytes from the beginning of this table to
|
||||
* this feature's setting name array. The actual type of
|
||||
* record this offset refers to will depend on the
|
||||
|
|
|
@ -79,7 +79,7 @@ struct DecompositionAction
|
|||
* to decompose before more frequent ones. The ligatures
|
||||
* on the line of text will decompose in increasing
|
||||
* value of this field. */
|
||||
ArrayOf<HBUINT16>
|
||||
Array16Of<HBUINT16>
|
||||
decomposedglyphs;
|
||||
/* Number of 16-bit glyph indexes that follow;
|
||||
* the ligature will be decomposed into these glyphs.
|
||||
|
@ -310,7 +310,7 @@ struct WidthDeltaPair
|
|||
DEFINE_SIZE_STATIC (24);
|
||||
};
|
||||
|
||||
typedef OT::LArrayOf<WidthDeltaPair> WidthDeltaCluster;
|
||||
typedef OT::Array32Of<WidthDeltaPair> WidthDeltaCluster;
|
||||
|
||||
struct JustificationCategory
|
||||
{
|
||||
|
@ -358,20 +358,20 @@ struct JustificationHeader
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<JustificationCategory>
|
||||
Offset16To<JustificationCategory>
|
||||
justClassTable; /* Offset to the justification category state table. */
|
||||
OffsetTo<WidthDeltaCluster>
|
||||
Offset16To<WidthDeltaCluster>
|
||||
wdcTable; /* Offset from start of justification table to start
|
||||
* of the subtable containing the width delta factors
|
||||
* for the glyphs in your font.
|
||||
*
|
||||
* The width delta clusters table. */
|
||||
OffsetTo<PostcompensationActionChain>
|
||||
Offset16To<PostcompensationActionChain>
|
||||
pcTable; /* Offset from start of justification table to start
|
||||
* of postcompensation subtable (set to zero if none).
|
||||
*
|
||||
* The postcompensation subtable, if present in the font. */
|
||||
Lookup<OffsetTo<WidthDeltaCluster>>
|
||||
Lookup<Offset16To<WidthDeltaCluster>>
|
||||
lookupTable; /* Lookup table associating glyphs with width delta
|
||||
* clusters. See the description of Width Delta Clusters
|
||||
* table for details on how to interpret the lookup values. */
|
||||
|
@ -398,13 +398,13 @@ struct just
|
|||
FixedVersion<>version; /* Version of the justification table
|
||||
* (0x00010000u for version 1.0). */
|
||||
HBUINT16 format; /* Format of the justification table (set to 0). */
|
||||
OffsetTo<JustificationHeader>
|
||||
Offset16To<JustificationHeader>
|
||||
horizData; /* Byte offset from the start of the justification table
|
||||
* to the header for tables that contain justification
|
||||
* information for horizontal text.
|
||||
* If you are not including this information,
|
||||
* store 0. */
|
||||
OffsetTo<JustificationHeader>
|
||||
Offset16To<JustificationHeader>
|
||||
vertData; /* ditto, vertical */
|
||||
|
||||
public:
|
||||
|
|
|
@ -710,18 +710,18 @@ struct KerxSubTableFormat6
|
|||
{
|
||||
struct Long
|
||||
{
|
||||
LNNOffsetTo<Lookup<HBUINT32>> rowIndexTable;
|
||||
LNNOffsetTo<Lookup<HBUINT32>> columnIndexTable;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD32>> array;
|
||||
NNOffset32To<Lookup<HBUINT32>> rowIndexTable;
|
||||
NNOffset32To<Lookup<HBUINT32>> columnIndexTable;
|
||||
NNOffset32To<UnsizedArrayOf<FWORD32>> array;
|
||||
} l;
|
||||
struct Short
|
||||
{
|
||||
LNNOffsetTo<Lookup<HBUINT16>> rowIndexTable;
|
||||
LNNOffsetTo<Lookup<HBUINT16>> columnIndexTable;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD>> array;
|
||||
NNOffset32To<Lookup<HBUINT16>> rowIndexTable;
|
||||
NNOffset32To<Lookup<HBUINT16>> columnIndexTable;
|
||||
NNOffset32To<UnsizedArrayOf<FWORD>> array;
|
||||
} s;
|
||||
} u;
|
||||
LNNOffsetTo<UnsizedArrayOf<FWORD>> vector;
|
||||
NNOffset32To<UnsizedArrayOf<FWORD>> vector;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 24);
|
||||
};
|
||||
|
|
|
@ -304,7 +304,7 @@ struct ContextualSubtable
|
|||
bool mark_set;
|
||||
unsigned int mark;
|
||||
const ContextualSubtable *table;
|
||||
const UnsizedOffsetListOf<Lookup<HBGlyphID>, HBUINT, false> &subs;
|
||||
const UnsizedListOfOffset16To<Lookup<HBGlyphID>, HBUINT, false> &subs;
|
||||
};
|
||||
|
||||
bool apply (hb_aat_apply_context_t *c) const
|
||||
|
@ -348,7 +348,7 @@ struct ContextualSubtable
|
|||
protected:
|
||||
StateTable<Types, EntryData>
|
||||
machine;
|
||||
NNOffsetTo<UnsizedOffsetListOf<Lookup<HBGlyphID>, HBUINT, false>, HBUINT>
|
||||
NNOffsetTo<UnsizedListOfOffset16To<Lookup<HBGlyphID>, HBUINT, false>, HBUINT>
|
||||
substitutionTables;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (20);
|
||||
|
|
|
@ -58,7 +58,7 @@ struct opbdFormat0
|
|||
bool get_bounds (hb_font_t *font, hb_codepoint_t glyph_id,
|
||||
hb_glyph_extents_t *extents, const void *base) const
|
||||
{
|
||||
const OffsetTo<OpticalBounds> *bounds_offset = lookupTable.get_value (glyph_id, font->face->get_num_glyphs ());
|
||||
const Offset16To<OpticalBounds> *bounds_offset = lookupTable.get_value (glyph_id, font->face->get_num_glyphs ());
|
||||
if (!bounds_offset) return false;
|
||||
const OpticalBounds &bounds = base+*bounds_offset;
|
||||
|
||||
|
@ -79,7 +79,7 @@ struct opbdFormat0
|
|||
}
|
||||
|
||||
protected:
|
||||
Lookup<OffsetTo<OpticalBounds>>
|
||||
Lookup<Offset16To<OpticalBounds>>
|
||||
lookupTable; /* Lookup table associating glyphs with the four
|
||||
* int16 values for the left-side, top-side,
|
||||
* right-side, and bottom-side optical bounds. */
|
||||
|
@ -92,7 +92,7 @@ struct opbdFormat1
|
|||
bool get_bounds (hb_font_t *font, hb_codepoint_t glyph_id,
|
||||
hb_glyph_extents_t *extents, const void *base) const
|
||||
{
|
||||
const OffsetTo<OpticalBounds> *bounds_offset = lookupTable.get_value (glyph_id, font->face->get_num_glyphs ());
|
||||
const Offset16To<OpticalBounds> *bounds_offset = lookupTable.get_value (glyph_id, font->face->get_num_glyphs ());
|
||||
if (!bounds_offset) return false;
|
||||
const OpticalBounds &bounds = base+*bounds_offset;
|
||||
|
||||
|
@ -116,7 +116,7 @@ struct opbdFormat1
|
|||
}
|
||||
|
||||
protected:
|
||||
Lookup<OffsetTo<OpticalBounds>>
|
||||
Lookup<Offset16To<OpticalBounds>>
|
||||
lookupTable; /* Lookup table associating glyphs with the four
|
||||
* int16 values for the left-side, top-side,
|
||||
* right-side, and bottom-side optical bounds. */
|
||||
|
|
|
@ -66,7 +66,7 @@ struct TrackTableEntry
|
|||
NameID trackNameID; /* The 'name' table index for this track.
|
||||
* (a short word or phrase like "loose"
|
||||
* or "very tight") */
|
||||
NNOffsetTo<UnsizedArrayOf<FWORD>>
|
||||
NNOffset16To<UnsizedArrayOf<FWORD>>
|
||||
valuesZ; /* Offset from start of tracking table to
|
||||
* per-size tracking values for this track. */
|
||||
|
||||
|
@ -141,7 +141,7 @@ struct TrackData
|
|||
protected:
|
||||
HBUINT16 nTracks; /* Number of separate tracks included in this table. */
|
||||
HBUINT16 nSizes; /* Number of point sizes included in this table. */
|
||||
LNNOffsetTo<UnsizedArrayOf<HBFixed>>
|
||||
NNOffset32To<UnsizedArrayOf<HBFixed>>
|
||||
sizeTable; /* Offset from start of the tracking table to
|
||||
* Array[nSizes] of size values.. */
|
||||
UnsizedArrayOf<TrackTableEntry>
|
||||
|
@ -212,10 +212,10 @@ struct trak
|
|||
FixedVersion<>version; /* Version of the tracking table
|
||||
* (0x00010000u for version 1.0). */
|
||||
HBUINT16 format; /* Format of the tracking table (set to 0). */
|
||||
OffsetTo<TrackData>
|
||||
Offset16To<TrackData>
|
||||
horizData; /* Offset from start of tracking table to TrackData
|
||||
* for horizontal text (or 0 if none). */
|
||||
OffsetTo<TrackData>
|
||||
Offset16To<TrackData>
|
||||
vertData; /* Offset from start of tracking table to TrackData
|
||||
* for vertical text (or 0 if none). */
|
||||
HBUINT16 reserved; /* Reserved. Set to 0. */
|
||||
|
|
|
@ -50,7 +50,7 @@ struct FTStringRange
|
|||
}
|
||||
|
||||
protected:
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
NNOffset16To<UnsizedArrayOf<HBUINT8>>
|
||||
tag; /* Offset from the start of the table to
|
||||
* the beginning of the string */
|
||||
HBUINT16 length; /* String length (in bytes) */
|
||||
|
@ -80,7 +80,7 @@ struct ltag
|
|||
protected:
|
||||
HBUINT32 version; /* Table version; currently 1 */
|
||||
HBUINT32 flags; /* Table flags; currently none defined */
|
||||
LArrayOf<FTStringRange>
|
||||
Array32Of<FTStringRange>
|
||||
tagRanges; /* Range for each tag's string */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (12, tagRanges);
|
||||
|
|
|
@ -219,7 +219,7 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
|||
unsigned P = sizeof (Type),
|
||||
hb_enable_if (P == 1)>
|
||||
const T *as () const
|
||||
{ return length < hb_null_size (T) ? &Null (T) : reinterpret_cast<const T *> (arrayZ); }
|
||||
{ return length < hb_min_size (T) ? &Null (T) : reinterpret_cast<const T *> (arrayZ); }
|
||||
|
||||
template <typename T,
|
||||
unsigned P = sizeof (Type),
|
||||
|
|
|
@ -263,7 +263,7 @@ struct UnsizedByteStr : UnsizedArrayOf <HBUINT8>
|
|||
|
||||
T *ip = c->allocate_size<T> (T::static_size);
|
||||
if (unlikely (!ip)) return_trace (false);
|
||||
return_trace (c->check_assign (*ip, value));
|
||||
return_trace (c->check_assign (*ip, value, HB_SERIALIZE_ERROR_INT_OVERFLOW));
|
||||
}
|
||||
|
||||
template <typename V>
|
||||
|
|
|
@ -86,6 +86,9 @@
|
|||
#define HB_NO_LEGACY
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_CONFIG_OVERRIDE_H
|
||||
#include "config-override.h"
|
||||
#endif
|
||||
|
||||
/* Closure of options. */
|
||||
|
||||
|
@ -155,9 +158,5 @@
|
|||
#endif
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_CONFIG_OVERRIDE_H
|
||||
#include "config-override.h"
|
||||
#endif
|
||||
|
||||
|
||||
#endif /* HB_CONFIG_HH */
|
||||
|
|
|
@ -438,6 +438,10 @@ struct hb_no_trace_t {
|
|||
#define TRACE_SUBSET(this) hb_no_trace_t<bool> trace
|
||||
#endif
|
||||
|
||||
#ifndef HB_DEBUG_SUBSET_REPACK
|
||||
#define HB_DEBUG_SUBSET_REPACK (HB_DEBUG+0)
|
||||
#endif
|
||||
|
||||
#ifndef HB_DEBUG_DISPATCH
|
||||
#define HB_DEBUG_DISPATCH ( \
|
||||
HB_DEBUG_APPLY + \
|
||||
|
|
|
@ -623,7 +623,7 @@ _hb_directwrite_shape_full (hb_shape_plan_t *shape_plan,
|
|||
* but we never attempt to shape a word longer than 64K characters
|
||||
* in a single gfxShapedWord, so we cannot exceed that limit.
|
||||
*/
|
||||
uint32_t textLength = buffer->len;
|
||||
uint32_t textLength = chars_len;
|
||||
|
||||
TextAnalysis analysis (textString, textLength, nullptr, readingDirection);
|
||||
TextAnalysis::Run *runHead;
|
||||
|
|
|
@ -140,9 +140,9 @@ struct hb_hashmap_t
|
|||
return true;
|
||||
}
|
||||
|
||||
void set (K key, V value)
|
||||
bool set (K key, V value)
|
||||
{
|
||||
set_with_hash (key, hb_hash (key), value);
|
||||
return set_with_hash (key, hb_hash (key), value);
|
||||
}
|
||||
|
||||
V get (K key) const
|
||||
|
@ -211,15 +211,15 @@ struct hb_hashmap_t
|
|||
|
||||
protected:
|
||||
|
||||
void set_with_hash (K key, uint32_t hash, V value)
|
||||
bool set_with_hash (K key, uint32_t hash, V value)
|
||||
{
|
||||
if (unlikely (!successful)) return;
|
||||
if (unlikely (key == kINVALID)) return;
|
||||
if ((occupancy + occupancy / 2) >= mask && !resize ()) return;
|
||||
if (unlikely (!successful)) return false;
|
||||
if (unlikely (key == kINVALID)) return true;
|
||||
if (unlikely ((occupancy + occupancy / 2) >= mask && !resize ())) return false;
|
||||
unsigned int i = bucket_for_hash (key, hash);
|
||||
|
||||
if (value == vINVALID && items[i].key != key)
|
||||
return; /* Trying to delete non-existent key. */
|
||||
return true; /* Trying to delete non-existent key. */
|
||||
|
||||
if (!items[i].is_unused ())
|
||||
{
|
||||
|
@ -235,6 +235,8 @@ struct hb_hashmap_t
|
|||
occupancy++;
|
||||
if (!items[i].is_tombstone ())
|
||||
population++;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
unsigned int bucket_for (K key) const
|
||||
|
|
|
@ -39,8 +39,11 @@
|
|||
|
||||
#define HB_NULL_POOL_SIZE 384
|
||||
|
||||
/* Use SFINAE to sniff whether T has min_size; in which case return T::null_size,
|
||||
* otherwise return sizeof(T). */
|
||||
/* Use SFINAE to sniff whether T has min_size; in which case return the larger
|
||||
* of sizeof(T) and T::null_size, otherwise return sizeof(T).
|
||||
*
|
||||
* The main purpose of this is to let structs communicate that they are not nullable,
|
||||
* by defining min_size but *not* null_size. */
|
||||
|
||||
/* The hard way...
|
||||
* https://stackoverflow.com/questions/7776448/sfinae-tried-with-bool-gives-compiler-error-template-argument-tvalue-invol
|
||||
|
@ -49,8 +52,9 @@
|
|||
template <typename T, typename>
|
||||
struct _hb_null_size : hb_integral_constant<unsigned, sizeof (T)> {};
|
||||
template <typename T>
|
||||
struct _hb_null_size<T, hb_void_t<decltype (T::min_size)>> : hb_integral_constant<unsigned, T::null_size> {};
|
||||
|
||||
struct _hb_null_size<T, hb_void_t<decltype (T::min_size)>>
|
||||
: hb_integral_constant<unsigned,
|
||||
(sizeof (T) > T::null_size ? sizeof (T) : T::null_size)> {};
|
||||
template <typename T>
|
||||
using hb_null_size = _hb_null_size<T, void>;
|
||||
#define hb_null_size(T) hb_null_size<T>::value
|
||||
|
@ -68,6 +72,14 @@ template <typename T>
|
|||
using hb_static_size = _hb_static_size<T, void>;
|
||||
#define hb_static_size(T) hb_static_size<T>::value
|
||||
|
||||
template <typename T, typename>
|
||||
struct _hb_min_size : hb_integral_constant<unsigned, sizeof (T)> {};
|
||||
template <typename T>
|
||||
struct _hb_min_size<T, hb_void_t<decltype (T::min_size)>> : hb_integral_constant<unsigned, T::min_size> {};
|
||||
template <typename T>
|
||||
using hb_min_size = _hb_min_size<T, void>;
|
||||
#define hb_min_size(T) hb_min_size<T>::value
|
||||
|
||||
|
||||
/*
|
||||
* Null()
|
||||
|
|
|
@ -218,7 +218,7 @@ struct TTCHeaderVersion1
|
|||
Tag ttcTag; /* TrueType Collection ID string: 'ttcf' */
|
||||
FixedVersion<>version; /* Version of the TTC Header (1.0),
|
||||
* 0x00010000u */
|
||||
LArrayOf<LOffsetTo<OpenTypeOffsetTable>>
|
||||
Array32Of<Offset32To<OpenTypeOffsetTable>>
|
||||
table; /* Array of offsets to the OffsetTable for each font
|
||||
* from the beginning of the file */
|
||||
public:
|
||||
|
@ -295,7 +295,7 @@ struct ResourceRecord
|
|||
HBINT16 nameOffset; /* Offset from beginning of resource name list
|
||||
* to resource name, -1 means there is none. */
|
||||
HBUINT8 attrs; /* Resource attributes */
|
||||
NNOffsetTo<LArrayOf<HBUINT8>, HBUINT24>
|
||||
NNOffset24To<Array32Of<HBUINT8>>
|
||||
offset; /* Offset from beginning of data block to
|
||||
* data for this resource */
|
||||
HBUINT32 reserved; /* Reserved for handle to resource */
|
||||
|
@ -330,7 +330,7 @@ struct ResourceTypeRecord
|
|||
protected:
|
||||
Tag tag; /* Resource type. */
|
||||
HBUINT16 resCountM1; /* Number of resources minus 1. */
|
||||
NNOffsetTo<UnsizedArrayOf<ResourceRecord>>
|
||||
NNOffset16To<UnsizedArrayOf<ResourceRecord>>
|
||||
resourcesZ; /* Offset from beginning of resource type list
|
||||
* to reference item list for this type. */
|
||||
public:
|
||||
|
@ -386,7 +386,7 @@ struct ResourceMap
|
|||
HBUINT32 reserved1; /* Reserved for handle to next resource map */
|
||||
HBUINT16 resreved2; /* Reserved for file reference number */
|
||||
HBUINT16 attrs; /* Resource fork attribute */
|
||||
NNOffsetTo<ArrayOfM1<ResourceTypeRecord>>
|
||||
NNOffset16To<ArrayOfM1<ResourceTypeRecord>>
|
||||
typeList; /* Offset from beginning of map to
|
||||
* resource type list */
|
||||
Offset16 nameList; /* Offset from beginning of map to
|
||||
|
@ -418,10 +418,10 @@ struct ResourceForkHeader
|
|||
}
|
||||
|
||||
protected:
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
NNOffset32To<UnsizedArrayOf<HBUINT8>>
|
||||
data; /* Offset from beginning of resource fork
|
||||
* to resource data */
|
||||
LNNOffsetTo<ResourceMap >
|
||||
NNOffset32To<ResourceMap >
|
||||
map; /* Offset from beginning of resource fork
|
||||
* to resource map */
|
||||
HBUINT32 dataLen; /* Length of resource data */
|
||||
|
|
|
@ -196,6 +196,12 @@ DECLARE_NULL_NAMESPACE_BYTES (OT, Index);
|
|||
|
||||
typedef Index NameID;
|
||||
|
||||
struct VarIdx : HBUINT32 {
|
||||
static constexpr unsigned NO_VARIATION = 0xFFFFFFFFu;
|
||||
VarIdx& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; }
|
||||
};
|
||||
DECLARE_NULL_NAMESPACE_BYTES (OT, VarIdx);
|
||||
|
||||
/* Offset, Null offset = 0 */
|
||||
template <typename Type, bool has_null=true>
|
||||
struct Offset : Type
|
||||
|
@ -209,7 +215,9 @@ struct Offset : Type
|
|||
void *serialize (hb_serialize_context_t *c, const void *base)
|
||||
{
|
||||
void *t = c->start_embed<void> ();
|
||||
c->check_assign (*this, (unsigned) ((char *) t - (char *) base));
|
||||
c->check_assign (*this,
|
||||
(unsigned) ((char *) t - (char *) base),
|
||||
HB_SERIALIZE_ERROR_OFFSET_OVERFLOW);
|
||||
return t;
|
||||
}
|
||||
|
||||
|
@ -218,6 +226,7 @@ struct Offset : Type
|
|||
};
|
||||
|
||||
typedef Offset<HBUINT16> Offset16;
|
||||
typedef Offset<HBUINT24> Offset24;
|
||||
typedef Offset<HBUINT32> Offset32;
|
||||
|
||||
|
||||
|
@ -287,7 +296,7 @@ struct _hb_has_null<Type, true>
|
|||
static Type *get_crap () { return &Crap (Type); }
|
||||
};
|
||||
|
||||
template <typename Type, typename OffsetType=HBUINT16, bool has_null=true>
|
||||
template <typename Type, typename OffsetType, bool has_null=true>
|
||||
struct OffsetTo : Offset<OffsetType, has_null>
|
||||
{
|
||||
HB_DELETE_COPY_ASSIGN (OffsetTo);
|
||||
|
@ -378,7 +387,7 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
|||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!c->check_struct (this))) return_trace (false);
|
||||
if (unlikely (this->is_null ())) return_trace (true);
|
||||
if (unlikely (!c->check_range (base, *this))) return_trace (false);
|
||||
if (unlikely ((const char *) base + (unsigned) *this < (const char *) base)) return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
@ -401,12 +410,14 @@ struct OffsetTo : Offset<OffsetType, has_null>
|
|||
DEFINE_SIZE_STATIC (sizeof (OffsetType));
|
||||
};
|
||||
/* Partial specializations. */
|
||||
template <typename Type, bool has_null=true>
|
||||
using LOffsetTo = OffsetTo<Type, HBUINT32, has_null>;
|
||||
template <typename Type, typename OffsetType=HBUINT16>
|
||||
using NNOffsetTo = OffsetTo<Type, OffsetType, false>;
|
||||
template <typename Type>
|
||||
using LNNOffsetTo = LOffsetTo<Type, false>;
|
||||
template <typename Type, bool has_null=true> using Offset16To = OffsetTo<Type, HBUINT16, has_null>;
|
||||
template <typename Type, bool has_null=true> using Offset24To = OffsetTo<Type, HBUINT24, has_null>;
|
||||
template <typename Type, bool has_null=true> using Offset32To = OffsetTo<Type, HBUINT32, has_null>;
|
||||
|
||||
template <typename Type, typename OffsetType> using NNOffsetTo = OffsetTo<Type, OffsetType, false>;
|
||||
template <typename Type> using NNOffset16To = Offset16To<Type, false>;
|
||||
template <typename Type> using NNOffset24To = Offset24To<Type, false>;
|
||||
template <typename Type> using NNOffset32To = Offset32To<Type, false>;
|
||||
|
||||
|
||||
/*
|
||||
|
@ -513,11 +524,11 @@ struct UnsizedArrayOf
|
|||
|
||||
/* Unsized array of offset's */
|
||||
template <typename Type, typename OffsetType, bool has_null=true>
|
||||
using UnsizedOffsetArrayOf = UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null>>;
|
||||
using UnsizedArray16OfOffsetTo = UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null>>;
|
||||
|
||||
/* Unsized array of offsets relative to the beginning of the array itself. */
|
||||
template <typename Type, typename OffsetType, bool has_null=true>
|
||||
struct UnsizedOffsetListOf : UnsizedOffsetArrayOf<Type, OffsetType, has_null>
|
||||
struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, has_null>
|
||||
{
|
||||
const Type& operator [] (int i_) const
|
||||
{
|
||||
|
@ -538,7 +549,7 @@ struct UnsizedOffsetListOf : UnsizedOffsetArrayOf<Type, OffsetType, has_null>
|
|||
bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace ((UnsizedOffsetArrayOf<Type, OffsetType, has_null>
|
||||
return_trace ((UnsizedArray16OfOffsetTo<Type, OffsetType, has_null>
|
||||
::sanitize (c, count, this, hb_forward<Ts> (ds)...)));
|
||||
}
|
||||
};
|
||||
|
@ -569,7 +580,7 @@ struct SortedUnsizedArrayOf : UnsizedArrayOf<Type>
|
|||
|
||||
|
||||
/* An array with a number of elements. */
|
||||
template <typename Type, typename LenType=HBUINT16>
|
||||
template <typename Type, typename LenType>
|
||||
struct ArrayOf
|
||||
{
|
||||
typedef Type item_t;
|
||||
|
@ -617,17 +628,30 @@ struct ArrayOf
|
|||
hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
|
||||
{ return as_array ().sub_array (start_offset, count); }
|
||||
|
||||
hb_success_t serialize (hb_serialize_context_t *c, unsigned items_len)
|
||||
template <typename T>
|
||||
Type &lsearch (const T &x, Type ¬_found = Crap (Type))
|
||||
{ return *as_array ().lsearch (x, ¬_found); }
|
||||
template <typename T>
|
||||
const Type &lsearch (const T &x, const Type ¬_found = Null (Type)) const
|
||||
{ return *as_array ().lsearch (x, ¬_found); }
|
||||
template <typename T>
|
||||
bool lfind (const T &x, unsigned *pos = nullptr) const
|
||||
{ return as_array ().lfind (x, pos); }
|
||||
|
||||
void qsort (unsigned int start = 0, unsigned int end = (unsigned int) -1)
|
||||
{ as_array ().qsort (start, end); }
|
||||
|
||||
HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned items_len)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
c->check_assign (len, items_len);
|
||||
c->check_assign (len, items_len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
|
||||
if (unlikely (!c->extend (*this))) return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
template <typename Iterator,
|
||||
hb_requires (hb_is_source_of (Iterator, Type))>
|
||||
hb_success_t serialize (hb_serialize_context_t *c, Iterator items)
|
||||
HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
unsigned count = items.len ();
|
||||
|
@ -656,7 +680,7 @@ struct ArrayOf
|
|||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (this);
|
||||
if (unlikely (!c->extend_min (out))) return_trace (nullptr);
|
||||
c->check_assign (out->len, len);
|
||||
c->check_assign (out->len, len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
|
||||
if (unlikely (!as_array ().copy (c))) return_trace (nullptr);
|
||||
return_trace (out);
|
||||
}
|
||||
|
@ -674,19 +698,6 @@ struct ArrayOf
|
|||
return_trace (true);
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
Type &lsearch (const T &x, Type ¬_found = Crap (Type))
|
||||
{ return *as_array ().lsearch (x, ¬_found); }
|
||||
template <typename T>
|
||||
const Type &lsearch (const T &x, const Type ¬_found = Null (Type)) const
|
||||
{ return *as_array ().lsearch (x, ¬_found); }
|
||||
template <typename T>
|
||||
bool lfind (const T &x, unsigned *pos = nullptr) const
|
||||
{ return as_array ().lfind (x, pos); }
|
||||
|
||||
void qsort (unsigned int start = 0, unsigned int end = (unsigned int) -1)
|
||||
{ as_array ().qsort (start, end); }
|
||||
|
||||
bool sanitize_shallow (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
@ -699,21 +710,18 @@ struct ArrayOf
|
|||
public:
|
||||
DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
|
||||
};
|
||||
template <typename Type>
|
||||
using LArrayOf = ArrayOf<Type, HBUINT32>;
|
||||
template <typename Type> using Array16Of = ArrayOf<Type, HBUINT16>;
|
||||
template <typename Type> using Array32Of = ArrayOf<Type, HBUINT32>;
|
||||
using PString = ArrayOf<HBUINT8, HBUINT8>;
|
||||
|
||||
/* Array of Offset's */
|
||||
template <typename Type>
|
||||
using OffsetArrayOf = ArrayOf<OffsetTo<Type, HBUINT16>>;
|
||||
template <typename Type>
|
||||
using LOffsetArrayOf = ArrayOf<OffsetTo<Type, HBUINT32>>;
|
||||
template <typename Type>
|
||||
using LOffsetLArrayOf = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32>;
|
||||
template <typename Type> using Array16OfOffset16To = ArrayOf<OffsetTo<Type, HBUINT16>, HBUINT16>;
|
||||
template <typename Type> using Array16OfOffset32To = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT16>;
|
||||
template <typename Type> using Array32OfOffset32To = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32>;
|
||||
|
||||
/* Array of offsets relative to the beginning of the array itself. */
|
||||
template <typename Type>
|
||||
struct OffsetListOf : OffsetArrayOf<Type>
|
||||
struct List16OfOffset16To : Array16OfOffset16To<Type>
|
||||
{
|
||||
const Type& operator [] (int i_) const
|
||||
{
|
||||
|
@ -731,7 +739,7 @@ struct OffsetListOf : OffsetArrayOf<Type>
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
struct OffsetListOf<Type> *out = c->serializer->embed (*this);
|
||||
struct List16OfOffset16To<Type> *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
unsigned int count = this->len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
|
@ -743,7 +751,7 @@ struct OffsetListOf : OffsetArrayOf<Type>
|
|||
bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (OffsetArrayOf<Type>::sanitize (c, this, hb_forward<Ts> (ds)...));
|
||||
return_trace (Array16OfOffset16To<Type>::sanitize (c, this, hb_forward<Ts> (ds)...));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -787,7 +795,7 @@ struct HeadlessArrayOf
|
|||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
c->check_assign (lenP1, items_len + 1);
|
||||
c->check_assign (lenP1, items_len + 1, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
|
||||
if (unlikely (!c->extend (*this))) return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
|
@ -859,6 +867,7 @@ struct ArrayOfM1
|
|||
{
|
||||
TRACE_SANITIZE (this);
|
||||
if (unlikely (!sanitize_shallow (c))) return_trace (false);
|
||||
if (!sizeof... (Ts) && hb_is_trivially_copyable (Type)) return_trace (true);
|
||||
unsigned int count = lenM1 + 1;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!c->dispatch (arrayZ[i], hb_forward<Ts> (ds)...)))
|
||||
|
@ -882,7 +891,7 @@ struct ArrayOfM1
|
|||
};
|
||||
|
||||
/* An array with sorted elements. Supports binary searching. */
|
||||
template <typename Type, typename LenType=HBUINT16>
|
||||
template <typename Type, typename LenType>
|
||||
struct SortedArrayOf : ArrayOf<Type, LenType>
|
||||
{
|
||||
hb_sorted_array_t< Type> as_array () { return hb_sorted_array (this->arrayZ, this->len); }
|
||||
|
@ -933,6 +942,9 @@ struct SortedArrayOf : ArrayOf<Type, LenType>
|
|||
{ return as_array ().bfind (x, i, not_found, to_store); }
|
||||
};
|
||||
|
||||
template <typename Type> using SortedArray16Of = SortedArrayOf<Type, HBUINT16>;
|
||||
template <typename Type> using SortedArray32Of = SortedArrayOf<Type, HBUINT32>;
|
||||
|
||||
/*
|
||||
* Binary-search arrays
|
||||
*/
|
||||
|
|
|
@ -1390,7 +1390,7 @@ struct cff1
|
|||
|
||||
public:
|
||||
FixedVersion<HBUINT8> version; /* Version of CFF table. set to 0x0100u */
|
||||
OffsetTo<CFF1NameIndex, HBUINT8> nameIndex; /* headerSize = Offset to Name INDEX. */
|
||||
NNOffsetTo<CFF1NameIndex, HBUINT8> nameIndex; /* headerSize = Offset to Name INDEX. */
|
||||
HBUINT8 offSize; /* offset size (unused?) */
|
||||
|
||||
public:
|
||||
|
|
|
@ -276,7 +276,9 @@ struct CmapSubtableFormat4
|
|||
HBUINT16 *idRangeOffset = serialize_rangeoffset_glyid (c, format4_iter, endCode, startCode, idDelta, segcount);
|
||||
if (unlikely (!c->check_success (idRangeOffset))) return;
|
||||
|
||||
if (unlikely (!c->check_assign(this->length, c->length () - table_initpos))) return;
|
||||
if (unlikely (!c->check_assign(this->length,
|
||||
c->length () - table_initpos,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
|
||||
this->segCountX2 = segcount * 2;
|
||||
this->entrySelector = hb_max (1u, hb_bit_storage (segcount)) - 1;
|
||||
this->searchRange = 2 * (1u << this->entrySelector);
|
||||
|
@ -670,7 +672,7 @@ struct CmapSubtableLongSegmented
|
|||
HBUINT16 reserved; /* Reserved; set to 0. */
|
||||
HBUINT32 length; /* Byte length of this subtable. */
|
||||
HBUINT32 language; /* Ignore. */
|
||||
SortedArrayOf<CmapSubtableLongGroup, HBUINT32>
|
||||
SortedArray32Of<CmapSubtableLongGroup>
|
||||
groups; /* Groupings. */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (16, groups);
|
||||
|
@ -784,7 +786,7 @@ struct UnicodeValueRange
|
|||
DEFINE_SIZE_STATIC (4);
|
||||
};
|
||||
|
||||
struct DefaultUVS : SortedArrayOf<UnicodeValueRange, HBUINT32>
|
||||
struct DefaultUVS : SortedArray32Of<UnicodeValueRange>
|
||||
{
|
||||
void collect_unicodes (hb_set_t *out) const
|
||||
{
|
||||
|
@ -850,7 +852,9 @@ struct DefaultUVS : SortedArrayOf<UnicodeValueRange, HBUINT32>
|
|||
}
|
||||
else
|
||||
{
|
||||
if (unlikely (!c->check_assign (out->len, (c->length () - init_len) / UnicodeValueRange::static_size))) return nullptr;
|
||||
if (unlikely (!c->check_assign (out->len,
|
||||
(c->length () - init_len) / UnicodeValueRange::static_size,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW))) return nullptr;
|
||||
return out;
|
||||
}
|
||||
}
|
||||
|
@ -876,23 +880,21 @@ struct UVSMapping
|
|||
DEFINE_SIZE_STATIC (5);
|
||||
};
|
||||
|
||||
struct NonDefaultUVS : SortedArrayOf<UVSMapping, HBUINT32>
|
||||
struct NonDefaultUVS : SortedArray32Of<UVSMapping>
|
||||
{
|
||||
void collect_unicodes (hb_set_t *out) const
|
||||
{
|
||||
unsigned int count = len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
out->add (arrayZ[i].unicodeValue);
|
||||
for (const auto& a : as_array ())
|
||||
out->add (a.unicodeValue);
|
||||
}
|
||||
|
||||
void collect_mapping (hb_set_t *unicodes, /* OUT */
|
||||
hb_map_t *mapping /* OUT */) const
|
||||
{
|
||||
unsigned count = len;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
for (const auto& a : as_array ())
|
||||
{
|
||||
hb_codepoint_t unicode = arrayZ[i].unicodeValue;
|
||||
hb_codepoint_t glyphid = arrayZ[i].glyphID;
|
||||
hb_codepoint_t unicode = a.unicodeValue;
|
||||
hb_codepoint_t glyphid = a.glyphID;
|
||||
unicodes->add (unicode);
|
||||
mapping->set (unicode, glyphid);
|
||||
}
|
||||
|
@ -1041,9 +1043,9 @@ struct VariationSelectorRecord
|
|||
}
|
||||
|
||||
HBUINT24 varSelector; /* Variation selector. */
|
||||
LOffsetTo<DefaultUVS>
|
||||
Offset32To<DefaultUVS>
|
||||
defaultUVS; /* Offset to Default UVS Table. May be 0. */
|
||||
LOffsetTo<NonDefaultUVS>
|
||||
Offset32To<NonDefaultUVS>
|
||||
nonDefaultUVS; /* Offset to Non-Default UVS Table. May be 0. */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (11);
|
||||
|
@ -1058,9 +1060,8 @@ struct CmapSubtableFormat14
|
|||
|
||||
void collect_variation_selectors (hb_set_t *out) const
|
||||
{
|
||||
unsigned int count = record.len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
out->add (record.arrayZ[i].varSelector);
|
||||
for (const auto& a : record.as_array ())
|
||||
out->add (a.varSelector);
|
||||
}
|
||||
void collect_variation_unicodes (hb_codepoint_t variation_selector,
|
||||
hb_set_t *out) const
|
||||
|
@ -1112,10 +1113,12 @@ struct CmapSubtableFormat14
|
|||
return;
|
||||
|
||||
int tail_len = init_tail - c->tail;
|
||||
c->check_assign (this->length, c->length () - table_initpos + tail_len);
|
||||
c->check_assign (this->length, c->length () - table_initpos + tail_len,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW);
|
||||
c->check_assign (this->record.len,
|
||||
(c->length () - table_initpos - CmapSubtableFormat14::min_size) /
|
||||
VariationSelectorRecord::static_size);
|
||||
VariationSelectorRecord::static_size,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW);
|
||||
|
||||
/* Correct the incorrect write order by reversing the order of the variation
|
||||
records array. */
|
||||
|
@ -1180,7 +1183,7 @@ struct CmapSubtableFormat14
|
|||
protected:
|
||||
HBUINT16 format; /* Format number is set to 14. */
|
||||
HBUINT32 length; /* Byte length of this subtable. */
|
||||
SortedArrayOf<VariationSelectorRecord, HBUINT32>
|
||||
SortedArray32Of<VariationSelectorRecord>
|
||||
record; /* Variation selector records; sorted
|
||||
* in increasing order of `varSelector'. */
|
||||
public:
|
||||
|
@ -1338,7 +1341,7 @@ struct EncodingRecord
|
|||
|
||||
HBUINT16 platformID; /* Platform ID. */
|
||||
HBUINT16 encodingID; /* Platform-specific encoding ID. */
|
||||
LOffsetTo<CmapSubtable>
|
||||
Offset32To<CmapSubtable>
|
||||
subtable; /* Byte offset from beginning of table to the subtable for this encoding. */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
|
@ -1401,7 +1404,9 @@ struct cmap
|
|||
}
|
||||
}
|
||||
|
||||
c->check_assign(this->encodingRecord.len, (c->length () - cmap::min_size)/EncodingRecord::static_size);
|
||||
c->check_assign(this->encodingRecord.len,
|
||||
(c->length () - cmap::min_size)/EncodingRecord::static_size,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW);
|
||||
}
|
||||
|
||||
void closure_glyphs (const hb_set_t *unicodes,
|
||||
|
@ -1697,7 +1702,7 @@ struct cmap
|
|||
|
||||
protected:
|
||||
HBUINT16 version; /* Table version number (0). */
|
||||
SortedArrayOf<EncodingRecord>
|
||||
SortedArray16Of<EncodingRecord>
|
||||
encodingRecord; /* Encoding tables. */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (4, encodingRecord);
|
||||
|
|
|
@ -510,7 +510,7 @@ struct IndexSubtableRecord
|
|||
|
||||
HBGlyphID firstGlyphIndex;
|
||||
HBGlyphID lastGlyphIndex;
|
||||
LOffsetTo<IndexSubtable> offsetToSubtable;
|
||||
Offset32To<IndexSubtable> offsetToSubtable;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
};
|
||||
|
@ -672,7 +672,7 @@ struct BitmapSizeTable
|
|||
}
|
||||
|
||||
protected:
|
||||
LNNOffsetTo<IndexSubtableArray>
|
||||
NNOffset32To<IndexSubtableArray>
|
||||
indexSubtableArrayOffset;
|
||||
HBUINT32 indexTablesSize;
|
||||
HBUINT32 numberOfIndexSubtables;
|
||||
|
@ -697,7 +697,7 @@ struct BitmapSizeTable
|
|||
struct GlyphBitmapDataFormat17
|
||||
{
|
||||
SmallGlyphMetrics glyphMetrics;
|
||||
LArrayOf<HBUINT8> data;
|
||||
Array32Of<HBUINT8> data;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (9, data);
|
||||
};
|
||||
|
@ -705,14 +705,14 @@ struct GlyphBitmapDataFormat17
|
|||
struct GlyphBitmapDataFormat18
|
||||
{
|
||||
BigGlyphMetrics glyphMetrics;
|
||||
LArrayOf<HBUINT8> data;
|
||||
Array32Of<HBUINT8> data;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (12, data);
|
||||
};
|
||||
|
||||
struct GlyphBitmapDataFormat19
|
||||
{
|
||||
LArrayOf<HBUINT8> data;
|
||||
Array32Of<HBUINT8> data;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (4, data);
|
||||
};
|
||||
|
@ -798,7 +798,7 @@ struct CBLC
|
|||
|
||||
protected:
|
||||
FixedVersion<> version;
|
||||
LArrayOf<BitmapSizeTable> sizeTables;
|
||||
Array32Of<BitmapSizeTable> sizeTables;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (8, sizeTables);
|
||||
};
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
#define HB_OT_COLOR_COLR_TABLE_HH
|
||||
|
||||
#include "hb-open-type.hh"
|
||||
#include "hb-ot-layout-common.hh"
|
||||
|
||||
/*
|
||||
* COLR -- Color
|
||||
|
@ -39,7 +40,6 @@
|
|||
|
||||
namespace OT {
|
||||
|
||||
|
||||
struct LayerRecord
|
||||
{
|
||||
operator hb_ot_color_layer_t () const { return {glyphId, colorIdx}; }
|
||||
|
@ -90,6 +90,467 @@ struct BaseGlyphRecord
|
|||
DEFINE_SIZE_STATIC (6);
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct Variable
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
protected:
|
||||
T value;
|
||||
VarIdx varIdx;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (4 + T::static_size);
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
struct NoVariable
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
T value;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (T::static_size);
|
||||
};
|
||||
|
||||
// Color structures
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct ColorIndex
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
HBUINT16 paletteIndex;
|
||||
Var<F2DOT14> alpha;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (2 + Var<F2DOT14>::static_size);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct ColorStop
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
Var<F2DOT14> stopOffset;
|
||||
ColorIndex<Var> color;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (Var<F2DOT14>::static_size + ColorIndex<Var>::static_size);
|
||||
};
|
||||
|
||||
struct Extend : HBUINT8
|
||||
{
|
||||
enum {
|
||||
EXTEND_PAD = 0,
|
||||
EXTEND_REPEAT = 1,
|
||||
EXTEND_REFLECT = 2,
|
||||
};
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (1);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct ColorLine
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) &&
|
||||
stops.sanitize (c));
|
||||
}
|
||||
|
||||
Extend extend;
|
||||
Array16Of<ColorStop<Var>> stops;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY_SIZED (3, stops);
|
||||
};
|
||||
|
||||
// Composition modes
|
||||
|
||||
// Compositing modes are taken from https://www.w3.org/TR/compositing-1/
|
||||
// NOTE: a brief audit of major implementations suggests most support most
|
||||
// or all of the specified modes.
|
||||
struct CompositeMode : HBUINT8
|
||||
{
|
||||
enum {
|
||||
// Porter-Duff modes
|
||||
// https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators
|
||||
COMPOSITE_CLEAR = 0, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_clear
|
||||
COMPOSITE_SRC = 1, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_src
|
||||
COMPOSITE_DEST = 2, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dst
|
||||
COMPOSITE_SRC_OVER = 3, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcover
|
||||
COMPOSITE_DEST_OVER = 4, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstover
|
||||
COMPOSITE_SRC_IN = 5, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcin
|
||||
COMPOSITE_DEST_IN = 6, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstin
|
||||
COMPOSITE_SRC_OUT = 7, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcout
|
||||
COMPOSITE_DEST_OUT = 8, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstout
|
||||
COMPOSITE_SRC_ATOP = 9, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_srcatop
|
||||
COMPOSITE_DEST_ATOP = 10, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_dstatop
|
||||
COMPOSITE_XOR = 11, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_xor
|
||||
COMPOSITE_PLUS = 12, // https://www.w3.org/TR/compositing-1/#porterduffcompositingoperators_plus
|
||||
|
||||
// Blend modes
|
||||
// https://www.w3.org/TR/compositing-1/#blending
|
||||
COMPOSITE_SCREEN = 13, // https://www.w3.org/TR/compositing-1/#blendingscreen
|
||||
COMPOSITE_OVERLAY = 14, // https://www.w3.org/TR/compositing-1/#blendingoverlay
|
||||
COMPOSITE_DARKEN = 15, // https://www.w3.org/TR/compositing-1/#blendingdarken
|
||||
COMPOSITE_LIGHTEN = 16, // https://www.w3.org/TR/compositing-1/#blendinglighten
|
||||
COMPOSITE_COLOR_DODGE = 17, // https://www.w3.org/TR/compositing-1/#blendingcolordodge
|
||||
COMPOSITE_COLOR_BURN = 18, // https://www.w3.org/TR/compositing-1/#blendingcolorburn
|
||||
COMPOSITE_HARD_LIGHT = 19, // https://www.w3.org/TR/compositing-1/#blendinghardlight
|
||||
COMPOSITE_SOFT_LIGHT = 20, // https://www.w3.org/TR/compositing-1/#blendingsoftlight
|
||||
COMPOSITE_DIFFERENCE = 21, // https://www.w3.org/TR/compositing-1/#blendingdifference
|
||||
COMPOSITE_EXCLUSION = 22, // https://www.w3.org/TR/compositing-1/#blendingexclusion
|
||||
COMPOSITE_MULTIPLY = 23, // https://www.w3.org/TR/compositing-1/#blendingmultiply
|
||||
|
||||
// Modes that, uniquely, do not operate on components
|
||||
// https://www.w3.org/TR/compositing-1/#blendingnonseparable
|
||||
COMPOSITE_HSL_HUE = 24, // https://www.w3.org/TR/compositing-1/#blendinghue
|
||||
COMPOSITE_HSL_SATURATION = 25, // https://www.w3.org/TR/compositing-1/#blendingsaturation
|
||||
COMPOSITE_HSL_COLOR = 26, // https://www.w3.org/TR/compositing-1/#blendingcolor
|
||||
COMPOSITE_HSL_LUMINOSITY = 27, // https://www.w3.org/TR/compositing-1/#blendingluminosity
|
||||
};
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (1);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct Affine2x3
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
Var<HBFixed> xx;
|
||||
Var<HBFixed> yx;
|
||||
Var<HBFixed> xy;
|
||||
Var<HBFixed> yy;
|
||||
Var<HBFixed> dx;
|
||||
Var<HBFixed> dy;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (6 * Var<HBFixed>::static_size);
|
||||
};
|
||||
|
||||
struct PaintColrLayers
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 1 */
|
||||
HBUINT8 numLayers;
|
||||
HBUINT32 firstLayerIndex; /* index into COLRv1::layersV1 */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (6);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct PaintSolid
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 2(noVar) or 3(Var)*/
|
||||
ColorIndex<Var> color;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (1 + ColorIndex<Var>::static_size);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct PaintLinearGradient
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 4(noVar) or 5 (Var) */
|
||||
Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintLinearGradient
|
||||
* table) to ColorLine subtable. */
|
||||
Var<FWORD> x0;
|
||||
Var<FWORD> y0;
|
||||
Var<FWORD> x1;
|
||||
Var<FWORD> y1;
|
||||
Var<FWORD> x2;
|
||||
Var<FWORD> y2;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (4 + 6 * Var<FWORD>::static_size);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct PaintRadialGradient
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 6(noVar) or 7 (Var) */
|
||||
Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintRadialGradient
|
||||
* table) to ColorLine subtable. */
|
||||
Var<FWORD> x0;
|
||||
Var<FWORD> y0;
|
||||
Var<UFWORD> radius0;
|
||||
Var<FWORD> x1;
|
||||
Var<FWORD> y1;
|
||||
Var<UFWORD> radius1;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (4 + 6 * Var<FWORD>::static_size);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct PaintSweepGradient
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && colorLine.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 8(noVar) or 9 (Var) */
|
||||
Offset24To<ColorLine<Var>> colorLine; /* Offset (from beginning of PaintSweepGradient
|
||||
* table) to ColorLine subtable. */
|
||||
Var<FWORD> centerX;
|
||||
Var<FWORD> centerY;
|
||||
Var<HBFixed> startAngle;
|
||||
Var<HBFixed> endAngle;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (2 * Var<FWORD>::static_size + 2 * Var<HBFixed>::static_size);
|
||||
};
|
||||
|
||||
struct Paint;
|
||||
// Paint a non-COLR glyph, filled as indicated by paint.
|
||||
struct PaintGlyph
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && paint.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 10 */
|
||||
Offset24To<Paint> paint; /* Offset (from beginning of PaintGlyph table) to Paint subtable. */
|
||||
HBUINT16 gid;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (6);
|
||||
};
|
||||
|
||||
struct PaintColrGlyph
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 11 */
|
||||
HBUINT16 gid;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (3);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct PaintTransform
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && src.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 12(noVar) or 13 (Var) */
|
||||
Offset24To<Paint> src; /* Offset (from beginning of PaintTransform table) to Paint subtable. */
|
||||
Affine2x3<Var> transform;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (4 + Affine2x3<Var>::static_size);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct PaintTranslate
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && src.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 14(noVar) or 15 (Var) */
|
||||
Offset24To<Paint> src; /* Offset (from beginning of PaintTranslate table) to Paint subtable. */
|
||||
Var<HBFixed> dx;
|
||||
Var<HBFixed> dy;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (4 + Var<HBFixed>::static_size);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct PaintRotate
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && src.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 16 (noVar) or 17(Var) */
|
||||
Offset24To<Paint> src; /* Offset (from beginning of PaintRotate table) to Paint subtable. */
|
||||
Var<HBFixed> angle;
|
||||
Var<HBFixed> centerX;
|
||||
Var<HBFixed> centerY;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (4 + 3 * Var<HBFixed>::static_size);
|
||||
};
|
||||
|
||||
template <template<typename> class Var>
|
||||
struct PaintSkew
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) && src.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 18(noVar) or 19 (Var) */
|
||||
Offset24To<Paint> src; /* Offset (from beginning of PaintSkew table) to Paint subtable. */
|
||||
Var<HBFixed> xSkewAngle;
|
||||
Var<HBFixed> ySkewAngle;
|
||||
Var<HBFixed> centerX;
|
||||
Var<HBFixed> centerY;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (4 + 4 * Var<HBFixed>::static_size);
|
||||
};
|
||||
|
||||
struct PaintComposite
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (c->check_struct (this) &&
|
||||
src.sanitize (c, this) &&
|
||||
backdrop.sanitize (c, this));
|
||||
}
|
||||
|
||||
HBUINT8 format; /* format = 20 */
|
||||
Offset24To<Paint> src; /* Offset (from beginning of PaintComposite table) to source Paint subtable. */
|
||||
CompositeMode mode; /* If mode is unrecognized use COMPOSITE_CLEAR */
|
||||
Offset24To<Paint> backdrop; /* Offset (from beginning of PaintComposite table) to backdrop Paint subtable. */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
};
|
||||
|
||||
struct Paint
|
||||
{
|
||||
template <typename context_t, typename ...Ts>
|
||||
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
|
||||
{
|
||||
TRACE_DISPATCH (this, u.format);
|
||||
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
|
||||
switch (u.format) {
|
||||
case 1: return_trace (c->dispatch (u.paintformat1, hb_forward<Ts> (ds)...));
|
||||
case 2: return_trace (c->dispatch (u.paintformat2, hb_forward<Ts> (ds)...));
|
||||
case 3: return_trace (c->dispatch (u.paintformat3, hb_forward<Ts> (ds)...));
|
||||
case 4: return_trace (c->dispatch (u.paintformat4, hb_forward<Ts> (ds)...));
|
||||
case 5: return_trace (c->dispatch (u.paintformat5, hb_forward<Ts> (ds)...));
|
||||
case 6: return_trace (c->dispatch (u.paintformat6, hb_forward<Ts> (ds)...));
|
||||
case 7: return_trace (c->dispatch (u.paintformat7, hb_forward<Ts> (ds)...));
|
||||
case 8: return_trace (c->dispatch (u.paintformat8, hb_forward<Ts> (ds)...));
|
||||
case 9: return_trace (c->dispatch (u.paintformat9, hb_forward<Ts> (ds)...));
|
||||
case 10: return_trace (c->dispatch (u.paintformat10, hb_forward<Ts> (ds)...));
|
||||
case 11: return_trace (c->dispatch (u.paintformat11, hb_forward<Ts> (ds)...));
|
||||
case 12: return_trace (c->dispatch (u.paintformat12, hb_forward<Ts> (ds)...));
|
||||
case 13: return_trace (c->dispatch (u.paintformat13, hb_forward<Ts> (ds)...));
|
||||
case 14: return_trace (c->dispatch (u.paintformat14, hb_forward<Ts> (ds)...));
|
||||
case 15: return_trace (c->dispatch (u.paintformat15, hb_forward<Ts> (ds)...));
|
||||
case 16: return_trace (c->dispatch (u.paintformat16, hb_forward<Ts> (ds)...));
|
||||
case 17: return_trace (c->dispatch (u.paintformat17, hb_forward<Ts> (ds)...));
|
||||
case 18: return_trace (c->dispatch (u.paintformat18, hb_forward<Ts> (ds)...));
|
||||
case 19: return_trace (c->dispatch (u.paintformat19, hb_forward<Ts> (ds)...));
|
||||
case 20: return_trace (c->dispatch (u.paintformat20, hb_forward<Ts> (ds)...));
|
||||
default:return_trace (c->default_return_value ());
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
union {
|
||||
HBUINT8 format;
|
||||
PaintColrLayers paintformat1;
|
||||
PaintSolid<NoVariable> paintformat2;
|
||||
PaintSolid<Variable> paintformat3;
|
||||
PaintLinearGradient<NoVariable> paintformat4;
|
||||
PaintLinearGradient<Variable> paintformat5;
|
||||
PaintRadialGradient<NoVariable> paintformat6;
|
||||
PaintRadialGradient<Variable> paintformat7;
|
||||
PaintSweepGradient<NoVariable> paintformat8;
|
||||
PaintSweepGradient<Variable> paintformat9;
|
||||
PaintGlyph paintformat10;
|
||||
PaintColrGlyph paintformat11;
|
||||
PaintTransform<NoVariable> paintformat12;
|
||||
PaintTransform<Variable> paintformat13;
|
||||
PaintTranslate<NoVariable> paintformat14;
|
||||
PaintTranslate<Variable> paintformat15;
|
||||
PaintRotate<NoVariable> paintformat16;
|
||||
PaintRotate<Variable> paintformat17;
|
||||
PaintSkew<NoVariable> paintformat18;
|
||||
PaintSkew<Variable> paintformat19;
|
||||
PaintComposite paintformat20;
|
||||
} u;
|
||||
};
|
||||
|
||||
struct BaseGlyphV1Record
|
||||
{
|
||||
int cmp (hb_codepoint_t g) const
|
||||
{ return g < glyphId ? -1 : g > glyphId ? 1 : 0; }
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) && paint.sanitize (c, this)));
|
||||
}
|
||||
|
||||
public:
|
||||
HBGlyphID glyphId; /* Glyph ID of reference glyph */
|
||||
Offset32To<Paint> paint; /* Offset (from beginning of BaseGlyphV1Record) to Paint,
|
||||
* Typically PaintColrLayers */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (6);
|
||||
};
|
||||
|
||||
typedef SortedArray32Of<BaseGlyphV1Record> BaseGlyphV1List;
|
||||
|
||||
struct LayerV1List : Array32OfOffset32To<Paint>
|
||||
{
|
||||
const Paint& get_paint (unsigned i) const
|
||||
{ return this+(*this)[i]; }
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (Array32OfOffset32To<Paint>::sanitize (c, this));
|
||||
}
|
||||
};
|
||||
|
||||
struct COLR
|
||||
{
|
||||
static constexpr hb_tag_t tableTag = HB_OT_TAG_COLR;
|
||||
|
@ -150,9 +611,13 @@ struct COLR
|
|||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
(this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
|
||||
(this+layersZ).sanitize (c, numLayers)));
|
||||
return_trace (c->check_struct (this) &&
|
||||
(this+baseGlyphsZ).sanitize (c, numBaseGlyphs) &&
|
||||
(this+layersZ).sanitize (c, numLayers) &&
|
||||
(version == 0 || (version == 1 &&
|
||||
baseGlyphsV1List.sanitize (c, this) &&
|
||||
layersV1.sanitize (c, this) &&
|
||||
varStore.sanitize (c, this))));
|
||||
}
|
||||
|
||||
template<typename BaseIterator, typename LayerIterator,
|
||||
|
@ -263,13 +728,17 @@ struct COLR
|
|||
protected:
|
||||
HBUINT16 version; /* Table version number (starts at 0). */
|
||||
HBUINT16 numBaseGlyphs; /* Number of Base Glyph Records. */
|
||||
LNNOffsetTo<SortedUnsizedArrayOf<BaseGlyphRecord>>
|
||||
NNOffset32To<SortedUnsizedArrayOf<BaseGlyphRecord>>
|
||||
baseGlyphsZ; /* Offset to Base Glyph records. */
|
||||
LNNOffsetTo<UnsizedArrayOf<LayerRecord>>
|
||||
NNOffset32To<UnsizedArrayOf<LayerRecord>>
|
||||
layersZ; /* Offset to Layer Records. */
|
||||
HBUINT16 numLayers; /* Number of Layer Records. */
|
||||
// Version-1 additions
|
||||
Offset32To<BaseGlyphV1List> baseGlyphsV1List;
|
||||
Offset32To<LayerV1List> layersV1;
|
||||
Offset32To<VariationStore> varStore;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (14);
|
||||
DEFINE_SIZE_MIN (14);
|
||||
};
|
||||
|
||||
} /* namespace OT */
|
||||
|
|
|
@ -87,15 +87,15 @@ struct CPALV1Tail
|
|||
}
|
||||
|
||||
protected:
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT32>>
|
||||
NNOffset32To<UnsizedArrayOf<HBUINT32>>
|
||||
paletteFlagsZ; /* Offset from the beginning of CPAL table to
|
||||
* the Palette Type Array. Set to 0 if no array
|
||||
* is provided. */
|
||||
LNNOffsetTo<UnsizedArrayOf<NameID>>
|
||||
NNOffset32To<UnsizedArrayOf<NameID>>
|
||||
paletteLabelsZ; /* Offset from the beginning of CPAL table to
|
||||
* the palette labels array. Set to 0 if no
|
||||
* array is provided. */
|
||||
LNNOffsetTo<UnsizedArrayOf<NameID>>
|
||||
NNOffset32To<UnsizedArrayOf<NameID>>
|
||||
colorLabelsZ; /* Offset from the beginning of CPAL table to
|
||||
* the color labels array. Set to 0
|
||||
* if no array is provided. */
|
||||
|
@ -173,7 +173,7 @@ struct CPAL
|
|||
HBUINT16 numPalettes; /* Number of palettes in the table. */
|
||||
HBUINT16 numColorRecords; /* Total number of color records, combined for
|
||||
* all palettes. */
|
||||
LNNOffsetTo<UnsizedArrayOf<BGRAColor>>
|
||||
NNOffset32To<UnsizedArrayOf<BGRAColor>>
|
||||
colorRecordsZ; /* Offset from the beginning of CPAL table to
|
||||
* the first ColorRecord. */
|
||||
UnsizedArrayOf<HBUINT16>
|
||||
|
|
|
@ -185,7 +185,7 @@ struct SBIXStrike
|
|||
HBUINT16 resolution; /* The device pixel density (in PPI) for which this
|
||||
* strike was designed. (E.g., 96 PPI, 192 PPI.) */
|
||||
protected:
|
||||
UnsizedArrayOf<LOffsetTo<SBIXGlyph>>
|
||||
UnsizedArrayOf<Offset32To<SBIXGlyph>>
|
||||
imageOffsetsZ; /* Offset from the beginning of the strike data header
|
||||
* to bitmap data for an individual glyph ID. */
|
||||
public:
|
||||
|
@ -352,11 +352,11 @@ struct sbix
|
|||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
auto *out = c->serializer->start_embed<LOffsetLArrayOf<SBIXStrike>> ();
|
||||
auto *out = c->serializer->start_embed<Array32OfOffset32To<SBIXStrike>> ();
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
hb_vector_t<LOffsetTo<SBIXStrike>*> new_strikes;
|
||||
hb_vector_t<Offset32To<SBIXStrike>*> new_strikes;
|
||||
hb_vector_t<hb_serialize_context_t::objidx_t> objidxs;
|
||||
for (int i = strikes.len - 1; i >= 0; --i)
|
||||
{
|
||||
|
@ -400,7 +400,7 @@ struct sbix
|
|||
HBUINT16 version; /* Table version number — set to 1 */
|
||||
HBUINT16 flags; /* Bit 0: Set to 1. Bit 1: Draw outlines.
|
||||
* Bits 2 to 15: reserved (set to 0). */
|
||||
LOffsetLArrayOf<SBIXStrike>
|
||||
Array32OfOffset32To<SBIXStrike>
|
||||
strikes; /* Offsets from the beginning of the 'sbix'
|
||||
* table to data for each individual bitmap strike. */
|
||||
public:
|
||||
|
|
|
@ -62,7 +62,7 @@ struct SVGDocumentIndexEntry
|
|||
* this index entry. */
|
||||
HBUINT16 endGlyphID; /* The last glyph ID in the range described by
|
||||
* this index entry. Must be >= startGlyphID. */
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
NNOffset32To<UnsizedArrayOf<HBUINT8>>
|
||||
svgDoc; /* Offset from the beginning of the SVG Document Index
|
||||
* to an SVG document. Must be non-zero. */
|
||||
HBUINT32 svgDocLength; /* Length of the SVG document.
|
||||
|
@ -107,7 +107,7 @@ struct SVG
|
|||
|
||||
protected:
|
||||
HBUINT16 version; /* Table version (starting at 0). */
|
||||
LOffsetTo<SortedArrayOf<SVGDocumentIndexEntry>>
|
||||
Offset32To<SortedArray16Of<SVGDocumentIndexEntry>>
|
||||
svgDocEntries; /* Offset (relative to the start of the SVG table) to the
|
||||
* SVG Documents Index. Must be non-zero. */
|
||||
/* Array of SVG Document Index Entries. */
|
||||
|
|
|
@ -71,7 +71,7 @@ struct gasp
|
|||
|
||||
protected:
|
||||
HBUINT16 version; /* Version number (set to 1) */
|
||||
ArrayOf<GaspRange>
|
||||
Array16Of<GaspRange>
|
||||
gaspRanges; /* Number of records to follow
|
||||
* Sorted by ppem */
|
||||
public:
|
||||
|
|
|
@ -110,7 +110,7 @@ struct hdmx
|
|||
for (const hb_item_type<Iterator>& _ : +it)
|
||||
c->start_embed<DeviceRecord> ()->serialize (c, _.first, _.second);
|
||||
|
||||
return_trace (c->successful);
|
||||
return_trace (c->successful ());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -146,7 +146,7 @@ struct hmtxvmtx
|
|||
|
||||
_mtx.fini ();
|
||||
|
||||
if (unlikely (c->serializer->ran_out_of_room || c->serializer->in_error ()))
|
||||
if (unlikely (c->serializer->in_error ()))
|
||||
return_trace (false);
|
||||
|
||||
// Amend header num hmetrics
|
||||
|
|
|
@ -103,7 +103,7 @@ struct BaseCoordFormat3
|
|||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 3 */
|
||||
FWORD coordinate; /* X or Y value, in design units */
|
||||
OffsetTo<Device>
|
||||
Offset16To<Device>
|
||||
deviceTable; /* Offset to Device table for X or
|
||||
* Y value, from beginning of
|
||||
* BaseCoord table (may be NULL). */
|
||||
|
@ -173,11 +173,11 @@ struct FeatMinMaxRecord
|
|||
protected:
|
||||
Tag tag; /* 4-byte feature identification tag--must
|
||||
* match feature tag in FeatureList */
|
||||
OffsetTo<BaseCoord>
|
||||
Offset16To<BaseCoord>
|
||||
minCoord; /* Offset to BaseCoord table that defines
|
||||
* the minimum extent value, from beginning
|
||||
* of MinMax table (may be NULL) */
|
||||
OffsetTo<BaseCoord>
|
||||
Offset16To<BaseCoord>
|
||||
maxCoord; /* Offset to BaseCoord table that defines
|
||||
* the maximum extent value, from beginning
|
||||
* of MinMax table (may be NULL) */
|
||||
|
@ -212,15 +212,15 @@ struct MinMax
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<BaseCoord>
|
||||
Offset16To<BaseCoord>
|
||||
minCoord; /* Offset to BaseCoord table that defines
|
||||
* minimum extent value, from the beginning
|
||||
* of MinMax table (may be NULL) */
|
||||
OffsetTo<BaseCoord>
|
||||
Offset16To<BaseCoord>
|
||||
maxCoord; /* Offset to BaseCoord table that defines
|
||||
* maximum extent value, from the beginning
|
||||
* of MinMax table (may be NULL) */
|
||||
SortedArrayOf<FeatMinMaxRecord>
|
||||
SortedArray16Of<FeatMinMaxRecord>
|
||||
featMinMaxRecords;
|
||||
/* Array of FeatMinMaxRecords, in alphabetical
|
||||
* order by featureTableTag */
|
||||
|
@ -247,7 +247,7 @@ struct BaseValues
|
|||
Index defaultIndex; /* Index number of default baseline for this
|
||||
* script — equals index position of baseline tag
|
||||
* in baselineTags array of the BaseTagList */
|
||||
OffsetArrayOf<BaseCoord>
|
||||
Array16OfOffset16To<BaseCoord>
|
||||
baseCoords; /* Number of BaseCoord tables defined — should equal
|
||||
* baseTagCount in the BaseTagList
|
||||
*
|
||||
|
@ -275,7 +275,7 @@ struct BaseLangSysRecord
|
|||
|
||||
protected:
|
||||
Tag baseLangSysTag; /* 4-byte language system identification tag */
|
||||
OffsetTo<MinMax>
|
||||
Offset16To<MinMax>
|
||||
minMax; /* Offset to MinMax table, from beginning
|
||||
* of BaseScript table */
|
||||
public:
|
||||
|
@ -305,13 +305,13 @@ struct BaseScript
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<BaseValues>
|
||||
Offset16To<BaseValues>
|
||||
baseValues; /* Offset to BaseValues table, from beginning
|
||||
* of BaseScript table (may be NULL) */
|
||||
OffsetTo<MinMax>
|
||||
Offset16To<MinMax>
|
||||
defaultMinMax; /* Offset to MinMax table, from beginning of
|
||||
* BaseScript table (may be NULL) */
|
||||
SortedArrayOf<BaseLangSysRecord>
|
||||
SortedArray16Of<BaseLangSysRecord>
|
||||
baseLangSysRecords;
|
||||
/* Number of BaseLangSysRecords
|
||||
* defined — may be zero (0) */
|
||||
|
@ -339,7 +339,7 @@ struct BaseScriptRecord
|
|||
|
||||
protected:
|
||||
Tag baseScriptTag; /* 4-byte script identification tag */
|
||||
OffsetTo<BaseScript>
|
||||
Offset16To<BaseScript>
|
||||
baseScript; /* Offset to BaseScript table, from beginning
|
||||
* of BaseScriptList */
|
||||
|
||||
|
@ -364,7 +364,7 @@ struct BaseScriptList
|
|||
}
|
||||
|
||||
protected:
|
||||
SortedArrayOf<BaseScriptRecord>
|
||||
SortedArray16Of<BaseScriptRecord>
|
||||
baseScriptRecords;
|
||||
|
||||
public:
|
||||
|
@ -426,12 +426,12 @@ struct Axis
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<SortedArrayOf<Tag>>
|
||||
Offset16To<SortedArray16Of<Tag>>
|
||||
baseTagList; /* Offset to BaseTagList table, from beginning
|
||||
* of Axis table (may be NULL)
|
||||
* Array of 4-byte baseline identification tags — must
|
||||
* be in alphabetical order */
|
||||
OffsetTo<BaseScriptList>
|
||||
Offset16To<BaseScriptList>
|
||||
baseScriptList; /* Offset to BaseScriptList table, from beginning
|
||||
* of Axis table
|
||||
* Array of BaseScriptRecords, in alphabetical order
|
||||
|
@ -501,11 +501,11 @@ struct BASE
|
|||
|
||||
protected:
|
||||
FixedVersion<>version; /* Version of the BASE table */
|
||||
OffsetTo<Axis>hAxis; /* Offset to horizontal Axis table, from beginning
|
||||
Offset16To<Axis>hAxis; /* Offset to horizontal Axis table, from beginning
|
||||
* of BASE table (may be NULL) */
|
||||
OffsetTo<Axis>vAxis; /* Offset to vertical Axis table, from beginning
|
||||
Offset16To<Axis>vAxis; /* Offset to vertical Axis table, from beginning
|
||||
* of BASE table (may be NULL) */
|
||||
LOffsetTo<VariationStore>
|
||||
Offset32To<VariationStore>
|
||||
varStore; /* Offset to the table of Item Variation
|
||||
* Store--from beginning of BASE
|
||||
* header (may be NULL). Introduced
|
||||
|
|
|
@ -88,12 +88,66 @@ static inline void ClassDef_serialize (hb_serialize_context_t *c,
|
|||
Iterator it);
|
||||
|
||||
static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
||||
const hb_set_t &glyphset,
|
||||
const hb_map_t &gid_klass_map,
|
||||
hb_sorted_vector_t<HBGlyphID> &glyphs,
|
||||
const hb_set_t &klasses,
|
||||
bool use_class_zero,
|
||||
hb_map_t *klass_map /*INOUT*/);
|
||||
|
||||
|
||||
struct hb_prune_langsys_context_t
|
||||
{
|
||||
hb_prune_langsys_context_t (const void *table_,
|
||||
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map_,
|
||||
const hb_map_t *duplicate_feature_map_,
|
||||
hb_set_t *new_collected_feature_indexes_)
|
||||
:table (table_),
|
||||
script_langsys_map (script_langsys_map_),
|
||||
duplicate_feature_map (duplicate_feature_map_),
|
||||
new_feature_indexes (new_collected_feature_indexes_),
|
||||
script_count (0),langsys_count (0) {}
|
||||
|
||||
bool visitedScript (const void *s)
|
||||
{
|
||||
if (script_count++ > HB_MAX_SCRIPTS)
|
||||
return true;
|
||||
|
||||
return visited (s, visited_script);
|
||||
}
|
||||
|
||||
bool visitedLangsys (const void *l)
|
||||
{
|
||||
if (langsys_count++ > HB_MAX_LANGSYS)
|
||||
return true;
|
||||
|
||||
return visited (l, visited_langsys);
|
||||
}
|
||||
|
||||
private:
|
||||
template <typename T>
|
||||
bool visited (const T *p, hb_set_t &visited_set)
|
||||
{
|
||||
hb_codepoint_t delta = (hb_codepoint_t) ((uintptr_t) p - (uintptr_t) table);
|
||||
if (visited_set.has (delta))
|
||||
return true;
|
||||
|
||||
visited_set.add (delta);
|
||||
return false;
|
||||
}
|
||||
|
||||
public:
|
||||
const void *table;
|
||||
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map;
|
||||
const hb_map_t *duplicate_feature_map;
|
||||
hb_set_t *new_feature_indexes;
|
||||
|
||||
private:
|
||||
hb_set_t visited_script;
|
||||
hb_set_t visited_langsys;
|
||||
unsigned script_count;
|
||||
unsigned langsys_count;
|
||||
};
|
||||
|
||||
struct hb_subset_layout_context_t :
|
||||
hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
|
||||
{
|
||||
|
@ -125,16 +179,21 @@ struct hb_subset_layout_context_t :
|
|||
hb_subset_context_t *subset_context;
|
||||
const hb_tag_t table_tag;
|
||||
const hb_map_t *lookup_index_map;
|
||||
const hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map;
|
||||
const hb_map_t *feature_index_map;
|
||||
unsigned cur_script_index;
|
||||
|
||||
hb_subset_layout_context_t (hb_subset_context_t *c_,
|
||||
hb_tag_t tag_,
|
||||
hb_map_t *lookup_map_,
|
||||
hb_map_t *feature_map_) :
|
||||
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map_,
|
||||
hb_map_t *feature_index_map_) :
|
||||
subset_context (c_),
|
||||
table_tag (tag_),
|
||||
lookup_index_map (lookup_map_),
|
||||
feature_index_map (feature_map_),
|
||||
script_langsys_map (script_langsys_map_),
|
||||
feature_index_map (feature_index_map_),
|
||||
cur_script_index (0xFFFFu),
|
||||
script_count (0),
|
||||
langsys_count (0),
|
||||
feature_index_count (0),
|
||||
|
@ -325,7 +384,7 @@ struct Record
|
|||
}
|
||||
|
||||
Tag tag; /* 4-byte Tag identifier */
|
||||
OffsetTo<Type>
|
||||
Offset16To<Type>
|
||||
offset; /* Offset from beginning of object holding
|
||||
* the Record */
|
||||
public:
|
||||
|
@ -333,11 +392,11 @@ struct Record
|
|||
};
|
||||
|
||||
template <typename Type>
|
||||
struct RecordArrayOf : SortedArrayOf<Record<Type>>
|
||||
struct RecordArrayOf : SortedArray16Of<Record<Type>>
|
||||
{
|
||||
const OffsetTo<Type>& get_offset (unsigned int i) const
|
||||
const Offset16To<Type>& get_offset (unsigned int i) const
|
||||
{ return (*this)[i].offset; }
|
||||
OffsetTo<Type>& get_offset (unsigned int i)
|
||||
Offset16To<Type>& get_offset (unsigned int i)
|
||||
{ return (*this)[i].offset; }
|
||||
const Tag& get_tag (unsigned int i) const
|
||||
{ return (*this)[i].tag; }
|
||||
|
@ -407,6 +466,30 @@ struct RecordListOfFeature : RecordListOf<Feature>
|
|||
}
|
||||
};
|
||||
|
||||
struct Script;
|
||||
struct RecordListOfScript : RecordListOf<Script>
|
||||
{
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_subset_layout_context_t *l) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
unsigned count = this->len;
|
||||
for (auto _ : + hb_zip (*this, hb_range (count)))
|
||||
{
|
||||
auto snap = c->serializer->snapshot ();
|
||||
l->cur_script_index = _.second;
|
||||
bool ret = _.first.subset (l, this);
|
||||
if (!ret) c->serializer->revert (snap);
|
||||
else out->len++;
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
};
|
||||
|
||||
struct RangeRecord
|
||||
{
|
||||
int cmp (hb_codepoint_t g) const
|
||||
|
@ -434,7 +517,7 @@ struct RangeRecord
|
|||
DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
|
||||
|
||||
|
||||
struct IndexArray : ArrayOf<Index>
|
||||
struct IndexArray : Array16Of<Index>
|
||||
{
|
||||
bool intersects (const hb_map_t *indexes) const
|
||||
{ return hb_any (*this, indexes); }
|
||||
|
@ -474,7 +557,7 @@ struct IndexArray : ArrayOf<Index>
|
|||
|
||||
void add_indexes_to (hb_set_t* output /* OUT */) const
|
||||
{
|
||||
output->add_array (arrayZ, len);
|
||||
output->add_array (as_array ());
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -506,18 +589,46 @@ struct LangSys
|
|||
return_trace (c->embed (*this));
|
||||
}
|
||||
|
||||
bool operator == (const LangSys& o) const
|
||||
bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
|
||||
{
|
||||
if (featureIndex.len != o.featureIndex.len ||
|
||||
reqFeatureIndex != o.reqFeatureIndex)
|
||||
if (reqFeatureIndex != o.reqFeatureIndex)
|
||||
return false;
|
||||
|
||||
for (const auto _ : + hb_zip (featureIndex, o.featureIndex))
|
||||
auto iter =
|
||||
+ hb_iter (featureIndex)
|
||||
| hb_filter (feature_index_map)
|
||||
| hb_map (feature_index_map)
|
||||
;
|
||||
|
||||
auto o_iter =
|
||||
+ hb_iter (o.featureIndex)
|
||||
| hb_filter (feature_index_map)
|
||||
| hb_map (feature_index_map)
|
||||
;
|
||||
|
||||
if (iter.len () != o_iter.len ())
|
||||
return false;
|
||||
|
||||
for (const auto _ : + hb_zip (iter, o_iter))
|
||||
if (_.first != _.second) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void collect_features (hb_prune_langsys_context_t *c) const
|
||||
{
|
||||
if (!has_required_feature () && !get_feature_count ()) return;
|
||||
if (c->visitedLangsys (this)) return;
|
||||
if (has_required_feature () &&
|
||||
c->duplicate_feature_map->has (reqFeatureIndex))
|
||||
c->new_feature_indexes->add (get_required_feature_index ());
|
||||
|
||||
+ hb_iter (featureIndex)
|
||||
| hb_filter (c->duplicate_feature_map)
|
||||
| hb_sink (c->new_feature_indexes)
|
||||
;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_subset_layout_context_t *l,
|
||||
const Tag *tag = nullptr) const
|
||||
|
@ -581,6 +692,49 @@ struct Script
|
|||
bool has_default_lang_sys () const { return defaultLangSys != 0; }
|
||||
const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
|
||||
|
||||
void prune_langsys (hb_prune_langsys_context_t *c,
|
||||
unsigned script_index) const
|
||||
{
|
||||
if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
|
||||
if (c->visitedScript (this)) return;
|
||||
|
||||
if (!c->script_langsys_map->has (script_index))
|
||||
{
|
||||
hb_set_t* empty_set = hb_set_create ();
|
||||
if (unlikely (!c->script_langsys_map->set (script_index, empty_set)))
|
||||
{
|
||||
hb_set_destroy (empty_set);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
unsigned langsys_count = get_lang_sys_count ();
|
||||
if (has_default_lang_sys ())
|
||||
{
|
||||
//only collect features from non-redundant langsys
|
||||
const LangSys& d = get_default_lang_sys ();
|
||||
d.collect_features (c);
|
||||
|
||||
for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
|
||||
{
|
||||
const LangSys& l = this+_.first.offset;
|
||||
if (l.compare (d, c->duplicate_feature_map)) continue;
|
||||
|
||||
l.collect_features (c);
|
||||
c->script_langsys_map->get (script_index)->add (_.second);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
|
||||
{
|
||||
const LangSys& l = this+_.first.offset;
|
||||
l.collect_features (c);
|
||||
c->script_langsys_map->get (script_index)->add (_.second);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_subset_layout_context_t *l,
|
||||
const Tag *tag) const
|
||||
|
@ -609,16 +763,17 @@ struct Script
|
|||
}
|
||||
}
|
||||
|
||||
+ langSys.iter ()
|
||||
| hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
|
||||
| hb_filter ([&] (const Record<LangSys>& record)
|
||||
{
|
||||
const LangSys& d = this+defaultLangSys;
|
||||
const LangSys& l = this+record.offset;
|
||||
return !(l == d);
|
||||
})
|
||||
| hb_apply (subset_record_array (l, &(out->langSys), this))
|
||||
;
|
||||
const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
|
||||
if (active_langsys)
|
||||
{
|
||||
unsigned count = langSys.len;
|
||||
+ hb_zip (langSys, hb_range (count))
|
||||
| hb_filter (active_langsys, hb_second)
|
||||
| hb_map (hb_first)
|
||||
| hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
|
||||
| hb_apply (subset_record_array (l, &(out->langSys), this))
|
||||
;
|
||||
}
|
||||
|
||||
return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
|
||||
}
|
||||
|
@ -631,7 +786,7 @@ struct Script
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<LangSys>
|
||||
Offset16To<LangSys>
|
||||
defaultLangSys; /* Offset to DefaultLangSys table--from
|
||||
* beginning of Script table--may be Null */
|
||||
RecordArrayOf<LangSys>
|
||||
|
@ -641,7 +796,7 @@ struct Script
|
|||
DEFINE_SIZE_ARRAY_SIZED (4, langSys);
|
||||
};
|
||||
|
||||
typedef RecordListOf<Script> ScriptList;
|
||||
typedef RecordListOfScript ScriptList;
|
||||
|
||||
|
||||
/* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
|
||||
|
@ -856,7 +1011,7 @@ struct FeatureParamsCharacterVariants
|
|||
* user-interface labels for the
|
||||
* feature parameters. (Must be zero
|
||||
* if numParameters is zero.) */
|
||||
ArrayOf<HBUINT24>
|
||||
Array16Of<HBUINT24>
|
||||
characters; /* Array of the Unicode Scalar Value
|
||||
* of the characters for which this
|
||||
* feature provides glyph variants.
|
||||
|
@ -953,7 +1108,7 @@ struct Feature
|
|||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
bool subset_featureParams = out->featureParams.serialize_subset (c, featureParams, this, tag);
|
||||
out->featureParams.serialize_subset (c, featureParams, this, tag);
|
||||
|
||||
auto it =
|
||||
+ hb_iter (lookupIndex)
|
||||
|
@ -962,8 +1117,9 @@ struct Feature
|
|||
;
|
||||
|
||||
out->lookupIndex.serialize (c->serializer, l, it);
|
||||
return_trace (bool (it) || subset_featureParams
|
||||
|| (tag && *tag == HB_TAG ('p', 'r', 'e', 'f')));
|
||||
// The decision to keep or drop this feature is already made before we get here
|
||||
// so always retain it.
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c,
|
||||
|
@ -998,7 +1154,7 @@ struct Feature
|
|||
unsigned int new_offset_int = orig_offset -
|
||||
(((char *) this) - ((char *) closure->list_base));
|
||||
|
||||
OffsetTo<FeatureParams> new_offset;
|
||||
Offset16To<FeatureParams> new_offset;
|
||||
/* Check that it would not overflow. */
|
||||
new_offset = new_offset_int;
|
||||
if (new_offset == new_offset_int &&
|
||||
|
@ -1010,7 +1166,7 @@ struct Feature
|
|||
return_trace (true);
|
||||
}
|
||||
|
||||
OffsetTo<FeatureParams>
|
||||
Offset16To<FeatureParams>
|
||||
featureParams; /* Offset to Feature Parameters table (if one
|
||||
* has been defined for the feature), relative
|
||||
* to the beginning of the Feature Table; = Null
|
||||
|
@ -1049,11 +1205,11 @@ struct Lookup
|
|||
unsigned int get_subtable_count () const { return subTable.len; }
|
||||
|
||||
template <typename TSubTable>
|
||||
const OffsetArrayOf<TSubTable>& get_subtables () const
|
||||
{ return reinterpret_cast<const OffsetArrayOf<TSubTable> &> (subTable); }
|
||||
const Array16OfOffset16To<TSubTable>& get_subtables () const
|
||||
{ return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
|
||||
template <typename TSubTable>
|
||||
OffsetArrayOf<TSubTable>& get_subtables ()
|
||||
{ return reinterpret_cast<OffsetArrayOf<TSubTable> &> (subTable); }
|
||||
Array16OfOffset16To<TSubTable>& get_subtables ()
|
||||
{ return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
|
||||
|
||||
template <typename TSubTable>
|
||||
const TSubTable& get_subtable (unsigned int i) const
|
||||
|
@ -1131,10 +1287,18 @@ struct Lookup
|
|||
const hb_set_t *glyphset = c->plan->glyphset_gsub ();
|
||||
unsigned int lookup_type = get_type ();
|
||||
+ hb_iter (get_subtables <TSubTable> ())
|
||||
| hb_filter ([this, glyphset, lookup_type] (const OffsetTo<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
|
||||
| hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
|
||||
| hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
|
||||
;
|
||||
|
||||
if (lookupFlag & LookupFlag::UseMarkFilteringSet)
|
||||
{
|
||||
if (unlikely (!c->serializer->extend (out))) return_trace (false);
|
||||
const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
|
||||
HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
|
||||
outMarkFilteringSet = markFilteringSet;
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
@ -1179,7 +1343,7 @@ struct Lookup
|
|||
private:
|
||||
HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */
|
||||
HBUINT16 lookupFlag; /* Lookup qualifiers */
|
||||
ArrayOf<Offset16>
|
||||
Array16Of<Offset16>
|
||||
subTable; /* Array of SubTables */
|
||||
/*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
|
||||
* structure. This field is only present if bit
|
||||
|
@ -1188,10 +1352,10 @@ struct Lookup
|
|||
DEFINE_SIZE_ARRAY (6, subTable);
|
||||
};
|
||||
|
||||
typedef OffsetListOf<Lookup> LookupList;
|
||||
typedef List16OfOffset16To<Lookup> LookupList;
|
||||
|
||||
template <typename TLookup>
|
||||
struct LookupOffsetList : OffsetListOf<TLookup>
|
||||
struct LookupOffsetList : List16OfOffset16To<TLookup>
|
||||
{
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_subset_layout_context_t *l) const
|
||||
|
@ -1212,7 +1376,7 @@ struct LookupOffsetList : OffsetListOf<TLookup>
|
|||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (OffsetListOf<TLookup>::sanitize (c, this));
|
||||
return_trace (List16OfOffset16To<TLookup>::sanitize (c, this));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1250,19 +1414,25 @@ struct CoverageFormat1
|
|||
bool intersects (const hb_set_t *glyphs) const
|
||||
{
|
||||
/* TODO Speed up, using hb_set_next() and bsearch()? */
|
||||
unsigned int count = glyphArray.len;
|
||||
const HBGlyphID *arr = glyphArray.arrayZ;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (glyphs->has (arr[i]))
|
||||
for (const auto& g : glyphArray.as_array ())
|
||||
if (glyphs->has (g))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
|
||||
{ return glyphs->has (glyphArray[index]); }
|
||||
|
||||
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
|
||||
{
|
||||
unsigned count = glyphArray.len;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
if (glyphs->has (glyphArray[i]))
|
||||
intersect_glyphs->add (glyphArray[i]);
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
bool collect_coverage (set_t *glyphs) const
|
||||
{ return glyphs->add_sorted_array (glyphArray.arrayZ, glyphArray.len); }
|
||||
{ return glyphs->add_sorted_array (glyphArray.as_array ()); }
|
||||
|
||||
public:
|
||||
/* Older compilers need this to be public. */
|
||||
|
@ -1284,7 +1454,7 @@ struct CoverageFormat1
|
|||
|
||||
protected:
|
||||
HBUINT16 coverageFormat; /* Format identifier--format = 1 */
|
||||
SortedArrayOf<HBGlyphID>
|
||||
SortedArray16Of<HBGlyphID>
|
||||
glyphArray; /* Array of GlyphIDs--in numerical order */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (4, glyphArray);
|
||||
|
@ -1358,20 +1528,16 @@ struct CoverageFormat2
|
|||
{
|
||||
/* TODO Speed up, using hb_set_next() and bsearch()? */
|
||||
/* TODO(iter) Rewrite as dagger. */
|
||||
unsigned count = rangeRecord.len;
|
||||
const RangeRecord *arr = rangeRecord.arrayZ;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
if (arr[i].intersects (glyphs))
|
||||
for (const auto& range : rangeRecord.as_array ())
|
||||
if (range.intersects (glyphs))
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
|
||||
{
|
||||
/* TODO(iter) Rewrite as dagger. */
|
||||
unsigned count = rangeRecord.len;
|
||||
const RangeRecord *arr = rangeRecord.arrayZ;
|
||||
for (unsigned i = 0; i < count; i++) {
|
||||
const RangeRecord &range = arr[i];
|
||||
for (const auto& range : rangeRecord.as_array ())
|
||||
{
|
||||
if (range.value <= index &&
|
||||
index < (unsigned int) range.value + (range.last - range.first) &&
|
||||
range.intersects (glyphs))
|
||||
|
@ -1382,6 +1548,16 @@ struct CoverageFormat2
|
|||
return false;
|
||||
}
|
||||
|
||||
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
|
||||
{
|
||||
for (const auto& range : rangeRecord.as_array ())
|
||||
{
|
||||
if (!range.intersects (glyphs)) continue;
|
||||
for (hb_codepoint_t g = range.first; g <= range.last; g++)
|
||||
if (glyphs->has (g)) intersect_glyphs->add (g);
|
||||
}
|
||||
}
|
||||
|
||||
template <typename set_t>
|
||||
bool collect_coverage (set_t *glyphs) const
|
||||
{
|
||||
|
@ -1448,7 +1624,7 @@ struct CoverageFormat2
|
|||
|
||||
protected:
|
||||
HBUINT16 coverageFormat; /* Format identifier--format = 2 */
|
||||
SortedArrayOf<RangeRecord>
|
||||
SortedArray16Of<RangeRecord>
|
||||
rangeRecord; /* Array of glyph ranges--ordered by
|
||||
* Start GlyphID. rangeCount entries
|
||||
* long */
|
||||
|
@ -1564,6 +1740,16 @@ struct Coverage
|
|||
}
|
||||
}
|
||||
|
||||
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
|
||||
{
|
||||
switch (u.format)
|
||||
{
|
||||
case 1: return u.format1.intersected_coverage_glyphs (glyphs, intersect_glyphs);
|
||||
case 2: return u.format2.intersected_coverage_glyphs (glyphs, intersect_glyphs);
|
||||
default:return ;
|
||||
}
|
||||
}
|
||||
|
||||
struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
|
||||
{
|
||||
static constexpr bool is_sorted_iterator = true;
|
||||
|
@ -1645,10 +1831,10 @@ Coverage_serialize (hb_serialize_context_t *c,
|
|||
{ c->start_embed<Coverage> ()->serialize (c, it); }
|
||||
|
||||
static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
||||
const hb_set_t &glyphset,
|
||||
const hb_map_t &gid_klass_map,
|
||||
hb_sorted_vector_t<HBGlyphID> &glyphs,
|
||||
const hb_set_t &klasses,
|
||||
bool use_class_zero,
|
||||
hb_map_t *klass_map /*INOUT*/)
|
||||
{
|
||||
if (!klass_map)
|
||||
|
@ -1660,7 +1846,7 @@ static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
|||
|
||||
/* any glyph not assigned a class value falls into Class zero (0),
|
||||
* if any glyph assigned to class 0, remapping must start with 0->0*/
|
||||
if (glyphset.get_population () > gid_klass_map.get_population ())
|
||||
if (!use_class_zero)
|
||||
klass_map->set (0, 0);
|
||||
|
||||
unsigned idx = klass_map->has (0) ? 1 : 0;
|
||||
|
@ -1708,6 +1894,7 @@ struct ClassDefFormat1
|
|||
|
||||
if (unlikely (!it))
|
||||
{
|
||||
classFormat = 1;
|
||||
startGlyph = 0;
|
||||
classValue.len = 0;
|
||||
return_trace (true);
|
||||
|
@ -1730,7 +1917,10 @@ struct ClassDefFormat1
|
|||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_map_t *klass_map = nullptr /*OUT*/) const
|
||||
hb_map_t *klass_map = nullptr /*OUT*/,
|
||||
bool keep_empty_table = true,
|
||||
bool use_class_zero = true,
|
||||
const Coverage* glyph_filter = nullptr) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
|
@ -1742,9 +1932,12 @@ struct ClassDefFormat1
|
|||
|
||||
hb_codepoint_t start = startGlyph;
|
||||
hb_codepoint_t end = start + classValue.len;
|
||||
|
||||
for (const hb_codepoint_t gid : + hb_range (start, end)
|
||||
| hb_filter (glyphset))
|
||||
| hb_filter (glyphset))
|
||||
{
|
||||
if (glyph_filter && !glyph_filter->has(gid)) continue;
|
||||
|
||||
unsigned klass = classValue[gid - start];
|
||||
if (!klass) continue;
|
||||
|
||||
|
@ -1753,9 +1946,13 @@ struct ClassDefFormat1
|
|||
orig_klasses.add (klass);
|
||||
}
|
||||
|
||||
ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map,
|
||||
glyphs, orig_klasses, klass_map);
|
||||
return_trace ((bool) glyphs);
|
||||
unsigned glyph_count = glyph_filter
|
||||
? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
|
||||
: glyphset.get_population ();
|
||||
use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
|
||||
ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
|
||||
glyphs, orig_klasses, use_class_zero, klass_map);
|
||||
return_trace (keep_empty_table || (bool) glyphs);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -1829,10 +2026,28 @@ struct ClassDefFormat1
|
|||
return false;
|
||||
}
|
||||
|
||||
void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
|
||||
{
|
||||
unsigned count = classValue.len;
|
||||
if (klass == 0)
|
||||
{
|
||||
hb_codepoint_t endGlyph = startGlyph + count -1;
|
||||
for (hb_codepoint_t g : glyphs->iter ())
|
||||
if (g < startGlyph || g > endGlyph)
|
||||
intersect_glyphs->add (g);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
if (classValue[i] == klass && glyphs->has (startGlyph + i))
|
||||
intersect_glyphs->add (startGlyph + i);
|
||||
}
|
||||
|
||||
protected:
|
||||
HBUINT16 classFormat; /* Format identifier--format = 1 */
|
||||
HBGlyphID startGlyph; /* First GlyphID of the classValueArray */
|
||||
ArrayOf<HBUINT16>
|
||||
Array16Of<HBUINT16>
|
||||
classValue; /* Array of Class Values--one per GlyphID */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (6, classValue);
|
||||
|
@ -1858,6 +2073,7 @@ struct ClassDefFormat2
|
|||
|
||||
if (unlikely (!it))
|
||||
{
|
||||
classFormat = 2;
|
||||
rangeRecord.len = 0;
|
||||
return_trace (true);
|
||||
}
|
||||
|
@ -1903,7 +2119,10 @@ struct ClassDefFormat2
|
|||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_map_t *klass_map = nullptr /*OUT*/) const
|
||||
hb_map_t *klass_map = nullptr /*OUT*/,
|
||||
bool keep_empty_table = true,
|
||||
bool use_class_zero = true,
|
||||
const Coverage* glyph_filter = nullptr) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
|
@ -1923,15 +2142,20 @@ struct ClassDefFormat2
|
|||
for (hb_codepoint_t g = start; g < end; g++)
|
||||
{
|
||||
if (!glyphset.has (g)) continue;
|
||||
if (glyph_filter && !glyph_filter->has (g)) continue;
|
||||
glyphs.push (glyph_map[g]);
|
||||
gid_org_klass_map.set (glyph_map[g], klass);
|
||||
orig_klasses.add (klass);
|
||||
}
|
||||
}
|
||||
|
||||
ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map,
|
||||
glyphs, orig_klasses, klass_map);
|
||||
return_trace ((bool) glyphs);
|
||||
unsigned glyph_count = glyph_filter
|
||||
? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
|
||||
: glyphset.get_population ();
|
||||
use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
|
||||
ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
|
||||
glyphs, orig_klasses, use_class_zero, klass_map);
|
||||
return_trace (keep_empty_table || (bool) glyphs);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -2005,9 +2229,57 @@ struct ClassDefFormat2
|
|||
return false;
|
||||
}
|
||||
|
||||
void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
|
||||
{
|
||||
unsigned count = rangeRecord.len;
|
||||
if (klass == 0)
|
||||
{
|
||||
hb_codepoint_t g = HB_SET_VALUE_INVALID;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
if (!hb_set_next (glyphs, &g))
|
||||
break;
|
||||
while (g != HB_SET_VALUE_INVALID && g < rangeRecord[i].first)
|
||||
{
|
||||
intersect_glyphs->add (g);
|
||||
hb_set_next (glyphs, &g);
|
||||
}
|
||||
g = rangeRecord[i].last;
|
||||
}
|
||||
while (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
|
||||
intersect_glyphs->add (g);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
hb_codepoint_t g = HB_SET_VALUE_INVALID;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
if (rangeRecord[i].value != klass) continue;
|
||||
|
||||
if (g != HB_SET_VALUE_INVALID)
|
||||
{
|
||||
if (g >= rangeRecord[i].first &&
|
||||
g <= rangeRecord[i].last)
|
||||
intersect_glyphs->add (g);
|
||||
if (g > rangeRecord[i].last)
|
||||
continue;
|
||||
}
|
||||
|
||||
g = rangeRecord[i].first - 1;
|
||||
while (hb_set_next (glyphs, &g))
|
||||
{
|
||||
if (g >= rangeRecord[i].first && g <= rangeRecord[i].last)
|
||||
intersect_glyphs->add (g);
|
||||
else if (g > rangeRecord[i].last)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
HBUINT16 classFormat; /* Format identifier--format = 2 */
|
||||
SortedArrayOf<RangeRecord>
|
||||
SortedArray16Of<RangeRecord>
|
||||
rangeRecord; /* Array of glyph ranges--ordered by
|
||||
* Start GlyphID */
|
||||
public:
|
||||
|
@ -2036,19 +2308,20 @@ struct ClassDef
|
|||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c, Iterator it)
|
||||
bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
|
||||
auto it = + it_with_class_zero | hb_filter (hb_second);
|
||||
|
||||
unsigned format = 2;
|
||||
if (likely (it))
|
||||
{
|
||||
hb_codepoint_t glyph_min = (*it).first;
|
||||
hb_codepoint_t glyph_max = + it
|
||||
| hb_map (hb_first)
|
||||
| hb_reduce (hb_max, 0u);
|
||||
hb_codepoint_t glyph_max = glyph_min;
|
||||
|
||||
unsigned num_glyphs = 0;
|
||||
unsigned num_ranges = 1;
|
||||
hb_codepoint_t prev_gid = glyph_min;
|
||||
unsigned prev_klass = (*it).second;
|
||||
|
@ -2057,7 +2330,9 @@ struct ClassDef
|
|||
{
|
||||
hb_codepoint_t cur_gid = gid_klass_pair.first;
|
||||
unsigned cur_klass = gid_klass_pair.second;
|
||||
if (cur_gid == glyph_min || !cur_klass) continue;
|
||||
num_glyphs++;
|
||||
if (cur_gid == glyph_min) continue;
|
||||
if (cur_gid > glyph_max) glyph_max = cur_gid;
|
||||
if (cur_gid != prev_gid + 1 ||
|
||||
cur_klass != prev_klass)
|
||||
num_ranges++;
|
||||
|
@ -2066,7 +2341,7 @@ struct ClassDef
|
|||
prev_klass = cur_klass;
|
||||
}
|
||||
|
||||
if (1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
|
||||
if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
|
||||
format = 1;
|
||||
}
|
||||
u.format = format;
|
||||
|
@ -2080,12 +2355,15 @@ struct ClassDef
|
|||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_map_t *klass_map = nullptr /*OUT*/) const
|
||||
hb_map_t *klass_map = nullptr /*OUT*/,
|
||||
bool keep_empty_table = true,
|
||||
bool use_class_zero = true,
|
||||
const Coverage* glyph_filter = nullptr) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
switch (u.format) {
|
||||
case 1: return_trace (u.format1.subset (c, klass_map));
|
||||
case 2: return_trace (u.format2.subset (c, klass_map));
|
||||
case 1: return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
|
||||
case 2: return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
|
||||
default:return_trace (false);
|
||||
}
|
||||
}
|
||||
|
@ -2142,6 +2420,15 @@ struct ClassDef
|
|||
}
|
||||
}
|
||||
|
||||
void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
|
||||
{
|
||||
switch (u.format) {
|
||||
case 1: return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
|
||||
case 2: return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
|
||||
default:return;
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
union {
|
||||
HBUINT16 format; /* Format identifier */
|
||||
|
@ -2439,7 +2726,7 @@ struct VarData
|
|||
protected:
|
||||
HBUINT16 itemCount;
|
||||
HBUINT16 shortCount;
|
||||
ArrayOf<HBUINT16> regionIndices;
|
||||
Array16Of<HBUINT16> regionIndices;
|
||||
/*UnsizedArrayOf<HBUINT8>bytesX;*/
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (6, regionIndices);
|
||||
|
@ -2447,6 +2734,7 @@ struct VarData
|
|||
|
||||
struct VariationStore
|
||||
{
|
||||
private:
|
||||
float get_delta (unsigned int outer, unsigned int inner,
|
||||
const int *coords, unsigned int coord_count) const
|
||||
{
|
||||
|
@ -2462,6 +2750,7 @@ struct VariationStore
|
|||
this+regions);
|
||||
}
|
||||
|
||||
public:
|
||||
float get_delta (unsigned int index,
|
||||
const int *coords, unsigned int coord_count) const
|
||||
{
|
||||
|
@ -2505,7 +2794,7 @@ struct VariationStore
|
|||
.serialize (c, &(src+src->regions), region_map))) return_trace (false);
|
||||
|
||||
/* TODO: The following code could be simplified when
|
||||
* OffsetListOf::subset () can take a custom param to be passed to VarData::serialize ()
|
||||
* List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize ()
|
||||
*/
|
||||
dataSets.len = set_count;
|
||||
unsigned int set_index = 0;
|
||||
|
@ -2580,8 +2869,8 @@ struct VariationStore
|
|||
|
||||
protected:
|
||||
HBUINT16 format;
|
||||
LOffsetTo<VarRegionList> regions;
|
||||
LOffsetArrayOf<VarData> dataSets;
|
||||
Offset32To<VarRegionList> regions;
|
||||
Array16OfOffset32To<VarData> dataSets;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (8, dataSets);
|
||||
};
|
||||
|
@ -2684,7 +2973,8 @@ struct ConditionSet
|
|||
+ conditions.iter ()
|
||||
| hb_apply (subset_offset_array (c, out->conditions, this))
|
||||
;
|
||||
return_trace (true);
|
||||
|
||||
return_trace (bool (out->conditions));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -2694,7 +2984,7 @@ struct ConditionSet
|
|||
}
|
||||
|
||||
protected:
|
||||
LOffsetArrayOf<Condition> conditions;
|
||||
Array16OfOffset32To<Condition> conditions;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (2, conditions);
|
||||
};
|
||||
|
@ -2719,6 +3009,12 @@ struct FeatureTableSubstitutionRecord
|
|||
bool subset (hb_subset_layout_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
if (!c->feature_index_map->has (featureIndex)) {
|
||||
// Feature that is being substituted is not being retained, so we don't
|
||||
// need this.
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
auto *out = c->subset_context->serializer->embed (this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
|
@ -2735,7 +3031,7 @@ struct FeatureTableSubstitutionRecord
|
|||
|
||||
protected:
|
||||
HBUINT16 featureIndex;
|
||||
LOffsetTo<Feature> feature;
|
||||
Offset32To<Feature> feature;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (6);
|
||||
};
|
||||
|
@ -2771,6 +3067,15 @@ struct FeatureTableSubstitution
|
|||
record.closure_features (this, lookup_indexes, feature_indexes);
|
||||
}
|
||||
|
||||
bool intersects_features (const hb_map_t *feature_index_map) const
|
||||
{
|
||||
for (const FeatureTableSubstitutionRecord& record : substitutions)
|
||||
{
|
||||
if (feature_index_map->has (record.featureIndex)) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_subset_layout_context_t *l) const
|
||||
{
|
||||
|
@ -2784,7 +3089,8 @@ struct FeatureTableSubstitution
|
|||
+ substitutions.iter ()
|
||||
| hb_apply (subset_record_array (l, &(out->substitutions), this))
|
||||
;
|
||||
return_trace (true);
|
||||
|
||||
return_trace (bool (out->substitutions));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -2797,7 +3103,7 @@ struct FeatureTableSubstitution
|
|||
|
||||
protected:
|
||||
FixedVersion<> version; /* Version--0x00010000u */
|
||||
ArrayOf<FeatureTableSubstitutionRecord>
|
||||
Array16Of<FeatureTableSubstitutionRecord>
|
||||
substitutions;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (6, substitutions);
|
||||
|
@ -2821,6 +3127,11 @@ struct FeatureVariationRecord
|
|||
(base+substitutions).closure_features (lookup_indexes, feature_indexes);
|
||||
}
|
||||
|
||||
bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
|
||||
{
|
||||
return (base+substitutions).intersects_features (feature_index_map);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_layout_context_t *c, const void *base) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
@ -2841,9 +3152,9 @@ struct FeatureVariationRecord
|
|||
}
|
||||
|
||||
protected:
|
||||
LOffsetTo<ConditionSet>
|
||||
Offset32To<ConditionSet>
|
||||
conditions;
|
||||
LOffsetTo<FeatureTableSubstitution>
|
||||
Offset32To<FeatureTableSubstitution>
|
||||
substitutions;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
|
@ -2907,9 +3218,18 @@ struct FeatureVariations
|
|||
out->version.major = version.major;
|
||||
out->version.minor = version.minor;
|
||||
|
||||
+ varRecords.iter ()
|
||||
| hb_apply (subset_record_array (l, &(out->varRecords), this))
|
||||
;
|
||||
int keep_up_to = -1;
|
||||
for (int i = varRecords.len - 1; i >= 0; i--) {
|
||||
if (varRecords[i].intersects_features (this, l->feature_index_map)) {
|
||||
keep_up_to = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
unsigned count = (unsigned) (keep_up_to + 1);
|
||||
for (unsigned i = 0; i < count; i++) {
|
||||
subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
|
||||
}
|
||||
return_trace (bool (out->varRecords));
|
||||
}
|
||||
|
||||
|
@ -2923,7 +3243,7 @@ struct FeatureVariations
|
|||
|
||||
protected:
|
||||
FixedVersion<> version; /* Version--0x00010000u */
|
||||
LArrayOf<FeatureVariationRecord>
|
||||
Array32Of<FeatureVariationRecord>
|
||||
varRecords;
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
|
||||
|
@ -3036,22 +3356,20 @@ struct VariationDevice
|
|||
if (unlikely (!out)) return_trace (nullptr);
|
||||
if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out);
|
||||
|
||||
unsigned org_idx = (outerIndex << 16) + innerIndex;
|
||||
if (!layout_variation_idx_map->has (org_idx))
|
||||
/* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
|
||||
if (!layout_variation_idx_map->has (varIdx))
|
||||
{
|
||||
c->revert (snap);
|
||||
return_trace (nullptr);
|
||||
}
|
||||
unsigned new_idx = layout_variation_idx_map->get (org_idx);
|
||||
out->outerIndex = new_idx >> 16;
|
||||
out->innerIndex = new_idx & 0xFFFF;
|
||||
unsigned new_idx = layout_variation_idx_map->get (varIdx);
|
||||
out->varIdx = new_idx;
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
void record_variation_index (hb_set_t *layout_variation_indices) const
|
||||
{
|
||||
unsigned var_idx = (outerIndex << 16) + innerIndex;
|
||||
layout_variation_indices->add (var_idx);
|
||||
layout_variation_indices->add (varIdx);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -3064,12 +3382,11 @@ struct VariationDevice
|
|||
|
||||
float get_delta (hb_font_t *font, const VariationStore &store) const
|
||||
{
|
||||
return store.get_delta (outerIndex, innerIndex, font->coords, font->num_coords);
|
||||
return store.get_delta (varIdx, font->coords, font->num_coords);
|
||||
}
|
||||
|
||||
protected:
|
||||
HBUINT16 outerIndex;
|
||||
HBUINT16 innerIndex;
|
||||
VarIdx varIdx;
|
||||
HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (6);
|
||||
|
|
|
@ -42,7 +42,7 @@ namespace OT {
|
|||
*/
|
||||
|
||||
/* Array of contour point indices--in increasing numerical order */
|
||||
struct AttachPoint : ArrayOf<HBUINT16>
|
||||
struct AttachPoint : Array16Of<HBUINT16>
|
||||
{
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
|
@ -110,10 +110,10 @@ struct AttachList
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
coverage; /* Offset to Coverage table -- from
|
||||
* beginning of AttachList table */
|
||||
OffsetArrayOf<AttachPoint>
|
||||
Array16OfOffset16To<AttachPoint>
|
||||
attachPoint; /* Array of AttachPoint tables
|
||||
* in Coverage Index order */
|
||||
public:
|
||||
|
@ -220,7 +220,7 @@ struct CaretValueFormat3
|
|||
protected:
|
||||
HBUINT16 caretValueFormat; /* Format identifier--format = 3 */
|
||||
FWORD coordinate; /* X or Y value, in design units */
|
||||
OffsetTo<Device>
|
||||
Offset16To<Device>
|
||||
deviceTable; /* Offset to Device table for X or Y
|
||||
* value--from beginning of CaretValue
|
||||
* table */
|
||||
|
@ -329,7 +329,7 @@ struct LigGlyph
|
|||
|
||||
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
|
||||
{
|
||||
for (const OffsetTo<CaretValue>& offset : carets.iter ())
|
||||
for (const Offset16To<CaretValue>& offset : carets.iter ())
|
||||
(this+offset).collect_variation_indices (c->layout_variation_indices);
|
||||
}
|
||||
|
||||
|
@ -340,7 +340,7 @@ struct LigGlyph
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetArrayOf<CaretValue>
|
||||
Array16OfOffset16To<CaretValue>
|
||||
carets; /* Offset array of CaretValue tables
|
||||
* --from beginning of LigGlyph table
|
||||
* --in increasing coordinate order */
|
||||
|
@ -408,10 +408,10 @@ struct LigCaretList
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
coverage; /* Offset to Coverage table--from
|
||||
* beginning of LigCaretList table */
|
||||
OffsetArrayOf<LigGlyph>
|
||||
Array16OfOffset16To<LigGlyph>
|
||||
ligGlyph; /* Array of LigGlyph tables
|
||||
* in Coverage Index order */
|
||||
public:
|
||||
|
@ -432,7 +432,7 @@ struct MarkGlyphSetsFormat1
|
|||
out->format = format;
|
||||
|
||||
bool ret = true;
|
||||
for (const LOffsetTo<Coverage>& offset : coverage.iter ())
|
||||
for (const Offset32To<Coverage>& offset : coverage.iter ())
|
||||
{
|
||||
auto *o = out->coverage.serialize_append (c->serializer);
|
||||
if (unlikely (!o))
|
||||
|
@ -460,7 +460,7 @@ struct MarkGlyphSetsFormat1
|
|||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 1 */
|
||||
ArrayOf<LOffsetTo<Coverage>>
|
||||
Array16Of<Offset32To<Coverage>>
|
||||
coverage; /* Array of long offsets to mark set
|
||||
* coverage tables */
|
||||
public:
|
||||
|
@ -643,10 +643,10 @@ struct GDEF
|
|||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
|
||||
bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this);
|
||||
bool subset_glyphclassdef = out->glyphClassDef.serialize_subset (c, glyphClassDef, this, nullptr, false, true);
|
||||
bool subset_attachlist = out->attachList.serialize_subset (c, attachList, this);
|
||||
bool subset_ligcaretlist = out->ligCaretList.serialize_subset (c, ligCaretList, this);
|
||||
bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this);
|
||||
bool subset_markattachclassdef = out->markAttachClassDef.serialize_subset (c, markAttachClassDef, this, nullptr, false, true);
|
||||
|
||||
bool subset_markglyphsetsdef = true;
|
||||
if (version.to_int () >= 0x00010002u)
|
||||
|
@ -687,28 +687,28 @@ struct GDEF
|
|||
protected:
|
||||
FixedVersion<>version; /* Version of the GDEF table--currently
|
||||
* 0x00010003u */
|
||||
OffsetTo<ClassDef>
|
||||
Offset16To<ClassDef>
|
||||
glyphClassDef; /* Offset to class definition table
|
||||
* for glyph type--from beginning of
|
||||
* GDEF header (may be Null) */
|
||||
OffsetTo<AttachList>
|
||||
Offset16To<AttachList>
|
||||
attachList; /* Offset to list of glyphs with
|
||||
* attachment points--from beginning
|
||||
* of GDEF header (may be Null) */
|
||||
OffsetTo<LigCaretList>
|
||||
Offset16To<LigCaretList>
|
||||
ligCaretList; /* Offset to list of positioning points
|
||||
* for ligature carets--from beginning
|
||||
* of GDEF header (may be Null) */
|
||||
OffsetTo<ClassDef>
|
||||
Offset16To<ClassDef>
|
||||
markAttachClassDef; /* Offset to class definition table for
|
||||
* mark attachment type--from beginning
|
||||
* of GDEF header (may be Null) */
|
||||
OffsetTo<MarkGlyphSets>
|
||||
Offset16To<MarkGlyphSets>
|
||||
markGlyphSetsDef; /* Offset to the table of mark set
|
||||
* definitions--from beginning of GDEF
|
||||
* header (may be NULL). Introduced
|
||||
* in version 0x00010002. */
|
||||
LOffsetTo<VariationStore>
|
||||
Offset32To<VariationStore>
|
||||
varStore; /* Offset to the table of Item Variation
|
||||
* Store--from beginning of GDEF
|
||||
* header (may be NULL). Introduced
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -46,14 +46,19 @@ struct SingleSubstFormat1
|
|||
bool intersects (const hb_set_t *glyphs) const
|
||||
{ return (this+coverage).intersects (glyphs); }
|
||||
|
||||
bool may_have_non_1to1 () const
|
||||
{ return false; }
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
unsigned d = deltaGlyphID;
|
||||
|
||||
+ hb_iter (this+coverage)
|
||||
| hb_filter (*c->glyphs)
|
||||
| hb_filter (c->parent_active_glyphs ())
|
||||
| hb_map ([d] (hb_codepoint_t g) { return (g + d) & 0xFFFFu; })
|
||||
| hb_sink (c->output)
|
||||
;
|
||||
|
||||
}
|
||||
|
||||
void closure_lookups (hb_closure_lookups_context_t *c) const {}
|
||||
|
@ -97,7 +102,7 @@ struct SingleSubstFormat1
|
|||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false);
|
||||
c->check_assign (deltaGlyphID, delta);
|
||||
c->check_assign (deltaGlyphID, delta, HB_SERIALIZE_ERROR_INT_OVERFLOW);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
@ -133,7 +138,7 @@ struct SingleSubstFormat1
|
|||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 1 */
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
coverage; /* Offset to Coverage table--from
|
||||
* beginning of Substitution table */
|
||||
HBUINT16 deltaGlyphID; /* Add to original GlyphID to get
|
||||
|
@ -147,13 +152,17 @@ struct SingleSubstFormat2
|
|||
bool intersects (const hb_set_t *glyphs) const
|
||||
{ return (this+coverage).intersects (glyphs); }
|
||||
|
||||
bool may_have_non_1to1 () const
|
||||
{ return false; }
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
+ hb_zip (this+coverage, substitute)
|
||||
| hb_filter (*c->glyphs, hb_first)
|
||||
| hb_filter (c->parent_active_glyphs (), hb_first)
|
||||
| hb_map (hb_second)
|
||||
| hb_sink (c->output)
|
||||
;
|
||||
|
||||
}
|
||||
|
||||
void closure_lookups (hb_closure_lookups_context_t *c) const {}
|
||||
|
@ -233,10 +242,10 @@ struct SingleSubstFormat2
|
|||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 2 */
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
coverage; /* Offset to Coverage table--from
|
||||
* beginning of Substitution table */
|
||||
ArrayOf<HBGlyphID>
|
||||
Array16Of<HBGlyphID>
|
||||
substitute; /* Array of substitute
|
||||
* GlyphIDs--ordered by Coverage Index */
|
||||
public:
|
||||
|
@ -377,7 +386,7 @@ struct Sequence
|
|||
}
|
||||
|
||||
protected:
|
||||
ArrayOf<HBGlyphID>
|
||||
Array16Of<HBGlyphID>
|
||||
substitute; /* String of GlyphIDs to substitute */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (2, substitute);
|
||||
|
@ -388,14 +397,18 @@ struct MultipleSubstFormat1
|
|||
bool intersects (const hb_set_t *glyphs) const
|
||||
{ return (this+coverage).intersects (glyphs); }
|
||||
|
||||
bool may_have_non_1to1 () const
|
||||
{ return true; }
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
+ hb_zip (this+coverage, sequence)
|
||||
| hb_filter (*c->glyphs, hb_first)
|
||||
| hb_filter (c->parent_active_glyphs (), hb_first)
|
||||
| hb_map (hb_second)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_apply ([c] (const Sequence &_) { _.closure (c); })
|
||||
;
|
||||
|
||||
}
|
||||
|
||||
void closure_lookups (hb_closure_lookups_context_t *c) const {}
|
||||
|
@ -475,10 +488,10 @@ struct MultipleSubstFormat1
|
|||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 1 */
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
coverage; /* Offset to Coverage table--from
|
||||
* beginning of Substitution table */
|
||||
OffsetArrayOf<Sequence>
|
||||
Array16OfOffset16To<Sequence>
|
||||
sequence; /* Array of Sequence tables
|
||||
* ordered by Coverage Index */
|
||||
public:
|
||||
|
@ -603,7 +616,7 @@ struct AlternateSet
|
|||
}
|
||||
|
||||
protected:
|
||||
ArrayOf<HBGlyphID>
|
||||
Array16Of<HBGlyphID>
|
||||
alternates; /* Array of alternate GlyphIDs--in
|
||||
* arbitrary order */
|
||||
public:
|
||||
|
@ -615,14 +628,18 @@ struct AlternateSubstFormat1
|
|||
bool intersects (const hb_set_t *glyphs) const
|
||||
{ return (this+coverage).intersects (glyphs); }
|
||||
|
||||
bool may_have_non_1to1 () const
|
||||
{ return false; }
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
+ hb_zip (this+coverage, alternateSet)
|
||||
| hb_filter (c->glyphs, hb_first)
|
||||
| hb_filter (c->parent_active_glyphs (), hb_first)
|
||||
| hb_map (hb_second)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_apply ([c] (const AlternateSet &_) { _.closure (c); })
|
||||
;
|
||||
|
||||
}
|
||||
|
||||
void closure_lookups (hb_closure_lookups_context_t *c) const {}
|
||||
|
@ -710,10 +727,10 @@ struct AlternateSubstFormat1
|
|||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 1 */
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
coverage; /* Offset to Coverage table--from
|
||||
* beginning of Substitution table */
|
||||
OffsetArrayOf<AlternateSet>
|
||||
Array16OfOffset16To<AlternateSet>
|
||||
alternateSet; /* Array of AlternateSet tables
|
||||
* ordered by Coverage Index */
|
||||
public:
|
||||
|
@ -965,7 +982,7 @@ struct LigatureSet
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetArrayOf<Ligature>
|
||||
Array16OfOffset16To<Ligature>
|
||||
ligature; /* Array LigatureSet tables
|
||||
* ordered by preference */
|
||||
public:
|
||||
|
@ -980,20 +997,24 @@ struct LigatureSubstFormat1
|
|||
+ hb_zip (this+coverage, ligatureSet)
|
||||
| hb_filter (*glyphs, hb_first)
|
||||
| hb_map (hb_second)
|
||||
| hb_map ([this, glyphs] (const OffsetTo<LigatureSet> &_)
|
||||
| hb_map ([this, glyphs] (const Offset16To<LigatureSet> &_)
|
||||
{ return (this+_).intersects (glyphs); })
|
||||
| hb_any
|
||||
;
|
||||
}
|
||||
|
||||
bool may_have_non_1to1 () const
|
||||
{ return true; }
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
+ hb_zip (this+coverage, ligatureSet)
|
||||
| hb_filter (*c->glyphs, hb_first)
|
||||
| hb_filter (c->parent_active_glyphs (), hb_first)
|
||||
| hb_map (hb_second)
|
||||
| hb_map (hb_add (this))
|
||||
| hb_apply ([c] (const LigatureSet &_) { _.closure (c); })
|
||||
;
|
||||
|
||||
}
|
||||
|
||||
void closure_lookups (hb_closure_lookups_context_t *c) const {}
|
||||
|
@ -1086,10 +1107,10 @@ struct LigatureSubstFormat1
|
|||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 1 */
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
coverage; /* Offset to Coverage table--from
|
||||
* beginning of Substitution table */
|
||||
OffsetArrayOf<LigatureSet>
|
||||
Array16OfOffset16To<LigatureSet>
|
||||
ligatureSet; /* Array LigatureSet tables
|
||||
* ordered by Coverage Index */
|
||||
public:
|
||||
|
@ -1157,7 +1178,7 @@ struct ReverseChainSingleSubstFormat1
|
|||
if (!(this+coverage).intersects (glyphs))
|
||||
return false;
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
|
||||
|
||||
unsigned int count;
|
||||
|
||||
|
@ -1174,15 +1195,18 @@ struct ReverseChainSingleSubstFormat1
|
|||
return true;
|
||||
}
|
||||
|
||||
bool may_have_non_1to1 () const
|
||||
{ return false; }
|
||||
|
||||
void closure (hb_closure_context_t *c) const
|
||||
{
|
||||
if (!intersects (c->glyphs)) return;
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
|
||||
const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
|
||||
const Array16Of<HBGlyphID> &substitute = StructAfter<Array16Of<HBGlyphID>> (lookahead);
|
||||
|
||||
+ hb_zip (this+coverage, substitute)
|
||||
| hb_filter (*c->glyphs, hb_first)
|
||||
| hb_filter (c->parent_active_glyphs (), hb_first)
|
||||
| hb_map (hb_second)
|
||||
| hb_sink (c->output)
|
||||
;
|
||||
|
@ -1200,12 +1224,12 @@ struct ReverseChainSingleSubstFormat1
|
|||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!(this+backtrack[i]).collect_coverage (c->before))) return;
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
|
||||
count = lookahead.len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
if (unlikely (!(this+lookahead[i]).collect_coverage (c->after))) return;
|
||||
|
||||
const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
|
||||
const Array16Of<HBGlyphID> &substitute = StructAfter<Array16Of<HBGlyphID>> (lookahead);
|
||||
count = substitute.len;
|
||||
c->output->add_array (substitute.arrayZ, substitute.len);
|
||||
}
|
||||
|
@ -1224,8 +1248,8 @@ struct ReverseChainSingleSubstFormat1
|
|||
unsigned int index = (this+coverage).get_coverage (c->buffer->cur ().codepoint);
|
||||
if (likely (index == NOT_COVERED)) return_trace (false);
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
|
||||
const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
|
||||
const Array16Of<HBGlyphID> &substitute = StructAfter<Array16Of<HBGlyphID>> (lookahead);
|
||||
|
||||
if (unlikely (index >= substitute.len)) return_trace (false);
|
||||
|
||||
|
@ -1250,11 +1274,80 @@ struct ReverseChainSingleSubstFormat1
|
|||
return_trace (false);
|
||||
}
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize_coverage_offset_array (hb_subset_context_t *c, Iterator it) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
|
||||
|
||||
if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
|
||||
return_trace (false);
|
||||
|
||||
for (auto& offset : it) {
|
||||
auto *o = out->serialize_append (c->serializer);
|
||||
if (unlikely (!o) || !o->serialize_subset (c, offset, this))
|
||||
return_trace (false);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
template<typename Iterator, typename BacktrackIterator, typename LookaheadIterator,
|
||||
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_pair_t)),
|
||||
hb_requires (hb_is_iterator (BacktrackIterator)),
|
||||
hb_requires (hb_is_iterator (LookaheadIterator))>
|
||||
bool serialize (hb_subset_context_t *c,
|
||||
Iterator coverage_subst_iter,
|
||||
BacktrackIterator backtrack_iter,
|
||||
LookaheadIterator lookahead_iter) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
if (unlikely (!c->serializer->check_success (out))) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (this->coverage))) return_trace (false);
|
||||
|
||||
if (!serialize_coverage_offset_array (c, backtrack_iter)) return_trace (false);
|
||||
if (!serialize_coverage_offset_array (c, lookahead_iter)) return_trace (false);
|
||||
|
||||
auto *substitute_out = c->serializer->start_embed<Array16Of<HBGlyphID>> ();
|
||||
auto substitutes =
|
||||
+ coverage_subst_iter
|
||||
| hb_map (hb_second)
|
||||
;
|
||||
|
||||
auto glyphs =
|
||||
+ coverage_subst_iter
|
||||
| hb_map_retains_sorting (hb_first)
|
||||
;
|
||||
if (unlikely (! c->serializer->check_success (substitute_out->serialize (c->serializer, substitutes))))
|
||||
return_trace (false);
|
||||
|
||||
if (unlikely (!out->coverage.serialize (c->serializer, out).serialize (c->serializer, glyphs)))
|
||||
return_trace (false);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
// TODO(subset)
|
||||
return_trace (false);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
|
||||
const Array16Of<HBGlyphID> &substitute = StructAfter<Array16Of<HBGlyphID>> (lookahead);
|
||||
|
||||
auto it =
|
||||
+ hb_zip (this+coverage, substitute)
|
||||
| hb_filter (glyphset, hb_first)
|
||||
| hb_filter (glyphset, hb_second)
|
||||
| hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const HBGlyphID &> p) -> hb_codepoint_pair_t
|
||||
{ return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
|
||||
;
|
||||
|
||||
return_trace (bool (it) && serialize (c, it, backtrack.iter (), lookahead.iter ()));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -1262,27 +1355,27 @@ struct ReverseChainSingleSubstFormat1
|
|||
TRACE_SANITIZE (this);
|
||||
if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
|
||||
return_trace (false);
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
|
||||
if (!lookahead.sanitize (c, this))
|
||||
return_trace (false);
|
||||
const ArrayOf<HBGlyphID> &substitute = StructAfter<ArrayOf<HBGlyphID>> (lookahead);
|
||||
const Array16Of<HBGlyphID> &substitute = StructAfter<Array16Of<HBGlyphID>> (lookahead);
|
||||
return_trace (substitute.sanitize (c));
|
||||
}
|
||||
|
||||
protected:
|
||||
HBUINT16 format; /* Format identifier--format = 1 */
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
coverage; /* Offset to Coverage table--from
|
||||
* beginning of table */
|
||||
OffsetArrayOf<Coverage>
|
||||
Array16OfOffset16To<Coverage>
|
||||
backtrack; /* Array of coverage tables
|
||||
* in backtracking sequence, in glyph
|
||||
* sequence order */
|
||||
OffsetArrayOf<Coverage>
|
||||
Array16OfOffset16To<Coverage>
|
||||
lookaheadX; /* Array of coverage tables
|
||||
* in lookahead sequence, in glyph
|
||||
* sequence order */
|
||||
ArrayOf<HBGlyphID>
|
||||
Array16Of<HBGlyphID>
|
||||
substituteX; /* Array of substitute
|
||||
* GlyphIDs--ordered by Coverage Index */
|
||||
public:
|
||||
|
@ -1388,6 +1481,12 @@ struct SubstLookup : Lookup
|
|||
return lookup_type_is_reverse (type);
|
||||
}
|
||||
|
||||
bool may_have_non_1to1 () const
|
||||
{
|
||||
hb_have_non_1to1_context_t c;
|
||||
return dispatch (&c);
|
||||
}
|
||||
|
||||
bool apply (hb_ot_apply_context_t *c) const
|
||||
{
|
||||
TRACE_APPLY (this);
|
||||
|
@ -1522,12 +1621,14 @@ struct SubstLookup : Lookup
|
|||
template <typename context_t>
|
||||
static inline typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
|
||||
|
||||
static inline hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
|
||||
static inline typename hb_closure_context_t::return_t closure_glyphs_recurse_func (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indices, unsigned seq_index, unsigned end_index);
|
||||
|
||||
static inline hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indices, unsigned seq_index, unsigned end_index)
|
||||
{
|
||||
if (!c->should_visit_lookup (lookup_index))
|
||||
return hb_empty_t ();
|
||||
|
||||
hb_closure_context_t::return_t ret = dispatch_recurse_func (c, lookup_index);
|
||||
hb_closure_context_t::return_t ret = closure_glyphs_recurse_func (c, lookup_index, covered_seq_indices, seq_index, end_index);
|
||||
|
||||
/* While in theory we should flush here, it will cause timeouts because a recursive
|
||||
* lookup can keep growing the glyph set. Skip, and outer loop will retry up to
|
||||
|
@ -1564,7 +1665,7 @@ struct GSUB : GSUBGPOS
|
|||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
hb_subset_layout_context_t l (c, tableTag, c->plan->gsub_lookups, c->plan->gsub_features);
|
||||
hb_subset_layout_context_t l (c, tableTag, c->plan->gsub_lookups, c->plan->gsub_langsys, c->plan->gsub_features);
|
||||
return GSUBGPOS::subset<SubstLookup> (&l);
|
||||
}
|
||||
|
||||
|
@ -1600,6 +1701,14 @@ template <typename context_t>
|
|||
return l.dispatch (c);
|
||||
}
|
||||
|
||||
/*static*/ typename hb_closure_context_t::return_t SubstLookup::closure_glyphs_recurse_func (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indices, unsigned seq_index, unsigned end_index)
|
||||
{
|
||||
const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (lookup_index);
|
||||
if (l.may_have_non_1to1 ())
|
||||
hb_set_add_range (covered_seq_indices, seq_index, end_index);
|
||||
return l.dispatch (c);
|
||||
}
|
||||
|
||||
/*static*/ inline hb_closure_lookups_context_t::return_t SubstLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
|
||||
{
|
||||
const SubstLookup &l = c->face->table.GSUB.get_relaxed ()->table->get_lookup (this_index);
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -45,7 +45,7 @@ typedef IndexArray JstfModList;
|
|||
* JstfMax -- Justification Maximum Table
|
||||
*/
|
||||
|
||||
typedef OffsetListOf<PosLookup> JstfMax;
|
||||
typedef List16OfOffset16To<PosLookup> JstfMax;
|
||||
|
||||
|
||||
/*
|
||||
|
@ -71,43 +71,43 @@ struct JstfPriority
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<JstfModList>
|
||||
Offset16To<JstfModList>
|
||||
shrinkageEnableGSUB; /* Offset to Shrinkage Enable GSUB
|
||||
* JstfModList table--from beginning of
|
||||
* JstfPriority table--may be NULL */
|
||||
OffsetTo<JstfModList>
|
||||
Offset16To<JstfModList>
|
||||
shrinkageDisableGSUB; /* Offset to Shrinkage Disable GSUB
|
||||
* JstfModList table--from beginning of
|
||||
* JstfPriority table--may be NULL */
|
||||
OffsetTo<JstfModList>
|
||||
Offset16To<JstfModList>
|
||||
shrinkageEnableGPOS; /* Offset to Shrinkage Enable GPOS
|
||||
* JstfModList table--from beginning of
|
||||
* JstfPriority table--may be NULL */
|
||||
OffsetTo<JstfModList>
|
||||
Offset16To<JstfModList>
|
||||
shrinkageDisableGPOS; /* Offset to Shrinkage Disable GPOS
|
||||
* JstfModList table--from beginning of
|
||||
* JstfPriority table--may be NULL */
|
||||
OffsetTo<JstfMax>
|
||||
Offset16To<JstfMax>
|
||||
shrinkageJstfMax; /* Offset to Shrinkage JstfMax table--
|
||||
* from beginning of JstfPriority table
|
||||
* --may be NULL */
|
||||
OffsetTo<JstfModList>
|
||||
Offset16To<JstfModList>
|
||||
extensionEnableGSUB; /* Offset to Extension Enable GSUB
|
||||
* JstfModList table--from beginning of
|
||||
* JstfPriority table--may be NULL */
|
||||
OffsetTo<JstfModList>
|
||||
Offset16To<JstfModList>
|
||||
extensionDisableGSUB; /* Offset to Extension Disable GSUB
|
||||
* JstfModList table--from beginning of
|
||||
* JstfPriority table--may be NULL */
|
||||
OffsetTo<JstfModList>
|
||||
Offset16To<JstfModList>
|
||||
extensionEnableGPOS; /* Offset to Extension Enable GPOS
|
||||
* JstfModList table--from beginning of
|
||||
* JstfPriority table--may be NULL */
|
||||
OffsetTo<JstfModList>
|
||||
Offset16To<JstfModList>
|
||||
extensionDisableGPOS; /* Offset to Extension Disable GPOS
|
||||
* JstfModList table--from beginning of
|
||||
* JstfPriority table--may be NULL */
|
||||
OffsetTo<JstfMax>
|
||||
Offset16To<JstfMax>
|
||||
extensionJstfMax; /* Offset to Extension JstfMax table--
|
||||
* from beginning of JstfPriority table
|
||||
* --may be NULL */
|
||||
|
@ -121,13 +121,13 @@ struct JstfPriority
|
|||
* JstfLangSys -- Justification Language System Table
|
||||
*/
|
||||
|
||||
struct JstfLangSys : OffsetListOf<JstfPriority>
|
||||
struct JstfLangSys : List16OfOffset16To<JstfPriority>
|
||||
{
|
||||
bool sanitize (hb_sanitize_context_t *c,
|
||||
const Record_sanitize_closure_t * = nullptr) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (OffsetListOf<JstfPriority>::sanitize (c));
|
||||
return_trace (List16OfOffset16To<JstfPriority>::sanitize (c));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -136,7 +136,7 @@ struct JstfLangSys : OffsetListOf<JstfPriority>
|
|||
* ExtenderGlyphs -- Extender Glyph Table
|
||||
*/
|
||||
|
||||
typedef SortedArrayOf<HBGlyphID> ExtenderGlyphs;
|
||||
typedef SortedArray16Of<HBGlyphID> ExtenderGlyphs;
|
||||
|
||||
|
||||
/*
|
||||
|
@ -174,10 +174,10 @@ struct JstfScript
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<ExtenderGlyphs>
|
||||
Offset16To<ExtenderGlyphs>
|
||||
extenderGlyphs; /* Offset to ExtenderGlyph table--from beginning
|
||||
* of JstfScript table-may be NULL */
|
||||
OffsetTo<JstfLangSys>
|
||||
Offset16To<JstfLangSys>
|
||||
defaultLangSys; /* Offset to DefaultJstfLangSys table--from
|
||||
* beginning of JstfScript table--may be Null */
|
||||
RecordArrayOf<JstfLangSys>
|
||||
|
|
|
@ -331,6 +331,8 @@ hb_ot_layout_get_glyphs_in_class (hb_face_t *face,
|
|||
*
|
||||
* Useful if the client program wishes to cache the list.
|
||||
*
|
||||
* Return value: Total number of attachment points for @glyph.
|
||||
*
|
||||
**/
|
||||
unsigned int
|
||||
hb_ot_layout_get_attach_points (hb_face_t *face,
|
||||
|
@ -357,6 +359,8 @@ hb_ot_layout_get_attach_points (hb_face_t *face,
|
|||
* Fetches a list of the caret positions defined for a ligature glyph in the GDEF
|
||||
* table of the font. The list returned will begin at the offset provided.
|
||||
*
|
||||
* Return value: Total number of ligature caret positions for @glyph.
|
||||
*
|
||||
**/
|
||||
unsigned int
|
||||
hb_ot_layout_get_ligature_carets (hb_font_t *font,
|
||||
|
@ -419,6 +423,8 @@ get_gsubgpos_table (hb_face_t *face,
|
|||
* Fetches a list of all scripts enumerated in the specified face's GSUB table
|
||||
* or GPOS table. The list returned will begin at the offset provided.
|
||||
*
|
||||
* Return value: Total number of script tags.
|
||||
*
|
||||
**/
|
||||
unsigned int
|
||||
hb_ot_layout_table_get_script_tags (hb_face_t *face,
|
||||
|
@ -585,6 +591,8 @@ hb_ot_layout_table_select_script (hb_face_t *face,
|
|||
*
|
||||
* Fetches a list of all feature tags in the given face's GSUB or GPOS table.
|
||||
*
|
||||
* Return value: Total number of feature tags.
|
||||
*
|
||||
**/
|
||||
unsigned int
|
||||
hb_ot_layout_table_get_feature_tags (hb_face_t *face,
|
||||
|
@ -647,6 +655,8 @@ hb_ot_layout_table_find_feature (hb_face_t *face,
|
|||
* Fetches a list of language tags in the given face's GSUB or GPOS table, underneath
|
||||
* the specified script index. The list returned will begin at the offset provided.
|
||||
*
|
||||
* Return value: Total number of language tags.
|
||||
*
|
||||
**/
|
||||
unsigned int
|
||||
hb_ot_layout_script_get_language_tags (hb_face_t *face,
|
||||
|
@ -818,6 +828,8 @@ hb_ot_layout_language_get_required_feature (hb_face_t *face,
|
|||
* Fetches a list of all features in the specified face's GSUB table
|
||||
* or GPOS table, underneath the specified script and language. The list
|
||||
* returned will begin at the offset provided.
|
||||
*
|
||||
* Return value: Total number of features.
|
||||
**/
|
||||
unsigned int
|
||||
hb_ot_layout_language_get_feature_indexes (hb_face_t *face,
|
||||
|
@ -850,6 +862,7 @@ hb_ot_layout_language_get_feature_indexes (hb_face_t *face,
|
|||
* or GPOS table, underneath the specified script and language. The list
|
||||
* returned will begin at the offset provided.
|
||||
*
|
||||
* Return value: Total number of feature tags.
|
||||
**/
|
||||
unsigned int
|
||||
hb_ot_layout_language_get_feature_tags (hb_face_t *face,
|
||||
|
@ -932,6 +945,8 @@ hb_ot_layout_language_find_feature (hb_face_t *face,
|
|||
* the specified face's GSUB table or GPOS table. The list returned will
|
||||
* begin at the offset provided.
|
||||
*
|
||||
* Return value: Total number of lookups.
|
||||
*
|
||||
* Since: 0.9.7
|
||||
**/
|
||||
unsigned int
|
||||
|
@ -960,6 +975,8 @@ hb_ot_layout_feature_get_lookups (hb_face_t *face,
|
|||
* Fetches the total number of lookups enumerated in the specified
|
||||
* face's GSUB table or GPOS table.
|
||||
*
|
||||
* Return value: Total number of lookups.
|
||||
*
|
||||
* Since: 0.9.22
|
||||
**/
|
||||
unsigned int
|
||||
|
@ -1262,6 +1279,8 @@ hb_ot_layout_lookup_collect_glyphs (hb_face_t *face,
|
|||
* Fetches a list of feature variations in the specified face's GSUB table
|
||||
* or GPOS table, at the specified variation coordinates.
|
||||
*
|
||||
* Return value: %true if feature variations were found, %false otherwise.
|
||||
*
|
||||
**/
|
||||
hb_bool_t
|
||||
hb_ot_layout_table_find_feature_variations (hb_face_t *face,
|
||||
|
@ -1291,6 +1310,8 @@ hb_ot_layout_table_find_feature_variations (hb_face_t *face,
|
|||
* the specified face's GSUB table or GPOS table, enabled at the specified
|
||||
* variations index. The list returned will begin at the offset provided.
|
||||
*
|
||||
* Return value: Total number of lookups.
|
||||
*
|
||||
**/
|
||||
unsigned int
|
||||
hb_ot_layout_feature_with_variations_get_lookups (hb_face_t *face,
|
||||
|
@ -1443,12 +1464,17 @@ hb_ot_layout_lookup_substitute_closure (hb_face_t *face,
|
|||
unsigned int lookup_index,
|
||||
hb_set_t *glyphs /* OUT */)
|
||||
{
|
||||
hb_map_t done_lookups;
|
||||
OT::hb_closure_context_t c (face, glyphs, &done_lookups);
|
||||
hb_set_t cur_intersected_glyphs;
|
||||
hb_map_t done_lookups_glyph_count;
|
||||
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> done_lookups_glyph_set;
|
||||
OT::hb_closure_context_t c (face, glyphs, &cur_intersected_glyphs, &done_lookups_glyph_count, &done_lookups_glyph_set);
|
||||
|
||||
const OT::SubstLookup& l = face->table.GSUB->table->get_lookup (lookup_index);
|
||||
|
||||
l.closure (&c, lookup_index);
|
||||
|
||||
for (auto _ : done_lookups_glyph_set.iter ())
|
||||
hb_set_destroy (_.second);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1467,8 +1493,10 @@ hb_ot_layout_lookups_substitute_closure (hb_face_t *face,
|
|||
const hb_set_t *lookups,
|
||||
hb_set_t *glyphs /* OUT */)
|
||||
{
|
||||
hb_map_t done_lookups;
|
||||
OT::hb_closure_context_t c (face, glyphs, &done_lookups);
|
||||
hb_set_t cur_intersected_glyphs;
|
||||
hb_map_t done_lookups_glyph_count;
|
||||
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> done_lookups_glyph_set;
|
||||
OT::hb_closure_context_t c (face, glyphs, &cur_intersected_glyphs, &done_lookups_glyph_count, &done_lookups_glyph_set);
|
||||
const OT::GSUB& gsub = *face->table.GSUB->table;
|
||||
|
||||
unsigned int iteration_count = 0;
|
||||
|
@ -1488,6 +1516,9 @@ hb_ot_layout_lookups_substitute_closure (hb_face_t *face,
|
|||
}
|
||||
} while (iteration_count++ <= HB_CLOSURE_MAX_STAGES &&
|
||||
glyphs_length != glyphs->get_population ());
|
||||
|
||||
for (auto _ : done_lookups_glyph_set.iter ())
|
||||
hb_set_destroy (_.second);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1925,7 +1956,7 @@ hb_ot_layout_substitute_lookup (OT::hb_ot_apply_context_t *c,
|
|||
*
|
||||
* Fetches a baseline value from the face.
|
||||
*
|
||||
* Return value: if found baseline value in the font.
|
||||
* Return value: %true if found baseline value in the font.
|
||||
*
|
||||
* Since: 2.6.0
|
||||
**/
|
||||
|
@ -1984,7 +2015,7 @@ struct hb_get_glyph_alternates_dispatch_t :
|
|||
*
|
||||
* Fetches alternates of a glyph from a given GSUB lookup index.
|
||||
*
|
||||
* Return value: total number of alternates found in the specific lookup index for the given glyph id.
|
||||
* Return value: Total number of alternates found in the specific lookup index for the given glyph id.
|
||||
*
|
||||
* Since: 2.6.8
|
||||
**/
|
||||
|
|
|
@ -314,7 +314,6 @@ _hb_glyph_info_get_unicode_space_fallback_type (const hb_glyph_info_t *info)
|
|||
hb_unicode_funcs_t::NOT_SPACE;
|
||||
}
|
||||
|
||||
static inline bool _hb_glyph_info_ligated (const hb_glyph_info_t *info);
|
||||
static inline bool _hb_glyph_info_substituted (const hb_glyph_info_t *info);
|
||||
|
||||
static inline bool
|
||||
|
@ -328,7 +327,7 @@ _hb_glyph_info_is_default_ignorable_and_not_hidden (const hb_glyph_info_t *info)
|
|||
{
|
||||
return ((info->unicode_props() & (UPROPS_MASK_IGNORABLE|UPROPS_MASK_HIDDEN))
|
||||
== UPROPS_MASK_IGNORABLE) &&
|
||||
!_hb_glyph_info_ligated (info);
|
||||
!_hb_glyph_info_substituted (info);
|
||||
}
|
||||
static inline void
|
||||
_hb_glyph_info_unhide (hb_glyph_info_t *info)
|
||||
|
|
|
@ -49,7 +49,7 @@ struct MathValueRecord
|
|||
|
||||
protected:
|
||||
HBINT16 value; /* The X or Y value in design units */
|
||||
OffsetTo<Device> deviceTable; /* Offset to the device table - from the
|
||||
Offset16To<Device> deviceTable; /* Offset to the device table - from the
|
||||
* beginning of parent table. May be NULL.
|
||||
* Suggested format for device table is 1. */
|
||||
|
||||
|
@ -181,11 +181,11 @@ struct MathItalicsCorrectionInfo
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<Coverage> coverage; /* Offset to Coverage table -
|
||||
Offset16To<Coverage> coverage; /* Offset to Coverage table -
|
||||
* from the beginning of
|
||||
* MathItalicsCorrectionInfo
|
||||
* table. */
|
||||
ArrayOf<MathValueRecord> italicsCorrection; /* Array of MathValueRecords
|
||||
Array16Of<MathValueRecord> italicsCorrection; /* Array of MathValueRecords
|
||||
* defining italics correction
|
||||
* values for each
|
||||
* covered glyph. */
|
||||
|
@ -214,11 +214,11 @@ struct MathTopAccentAttachment
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<Coverage> topAccentCoverage; /* Offset to Coverage table -
|
||||
Offset16To<Coverage> topAccentCoverage; /* Offset to Coverage table -
|
||||
* from the beginning of
|
||||
* MathTopAccentAttachment
|
||||
* table. */
|
||||
ArrayOf<MathValueRecord> topAccentAttachment; /* Array of MathValueRecords
|
||||
Array16Of<MathValueRecord> topAccentAttachment; /* Array of MathValueRecords
|
||||
* defining top accent
|
||||
* attachment points for each
|
||||
* covered glyph. */
|
||||
|
@ -320,7 +320,7 @@ struct MathKernInfoRecord
|
|||
protected:
|
||||
/* Offset to MathKern table for each corner -
|
||||
* from the beginning of MathKernInfo table. May be NULL. */
|
||||
OffsetTo<MathKern> mathKern[4];
|
||||
Offset16To<MathKern> mathKern[4];
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
|
@ -346,12 +346,12 @@ struct MathKernInfo
|
|||
}
|
||||
|
||||
protected:
|
||||
OffsetTo<Coverage>
|
||||
Offset16To<Coverage>
|
||||
mathKernCoverage;
|
||||
/* Offset to Coverage table -
|
||||
* from the beginning of the
|
||||
* MathKernInfo table. */
|
||||
ArrayOf<MathKernInfoRecord>
|
||||
Array16Of<MathKernInfoRecord>
|
||||
mathKernInfoRecords;
|
||||
/* Array of MathKernInfoRecords,
|
||||
* per-glyph information for
|
||||
|
@ -395,22 +395,22 @@ struct MathGlyphInfo
|
|||
protected:
|
||||
/* Offset to MathItalicsCorrectionInfo table -
|
||||
* from the beginning of MathGlyphInfo table. */
|
||||
OffsetTo<MathItalicsCorrectionInfo> mathItalicsCorrectionInfo;
|
||||
Offset16To<MathItalicsCorrectionInfo> mathItalicsCorrectionInfo;
|
||||
|
||||
/* Offset to MathTopAccentAttachment table -
|
||||
* from the beginning of MathGlyphInfo table. */
|
||||
OffsetTo<MathTopAccentAttachment> mathTopAccentAttachment;
|
||||
Offset16To<MathTopAccentAttachment> mathTopAccentAttachment;
|
||||
|
||||
/* Offset to coverage table for Extended Shape glyphs -
|
||||
* from the beginning of MathGlyphInfo table. When the left or right glyph of
|
||||
* a box is an extended shape variant, the (ink) box (and not the default
|
||||
* position defined by values in MathConstants table) should be used for
|
||||
* vertical positioning purposes. May be NULL.. */
|
||||
OffsetTo<Coverage> extendedShapeCoverage;
|
||||
Offset16To<Coverage> extendedShapeCoverage;
|
||||
|
||||
/* Offset to MathKernInfo table -
|
||||
* from the beginning of MathGlyphInfo table. */
|
||||
OffsetTo<MathKernInfo> mathKernInfo;
|
||||
Offset16To<MathKernInfo> mathKernInfo;
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (8);
|
||||
|
@ -532,7 +532,7 @@ struct MathGlyphAssembly
|
|||
/* Italics correction of this
|
||||
* MathGlyphAssembly. Should not
|
||||
* depend on the assembly size. */
|
||||
ArrayOf<MathGlyphPartRecord>
|
||||
Array16Of<MathGlyphPartRecord>
|
||||
partRecords; /* Array of part records, from
|
||||
* left to right and bottom to
|
||||
* top. */
|
||||
|
@ -572,10 +572,10 @@ struct MathGlyphConstruction
|
|||
protected:
|
||||
/* Offset to MathGlyphAssembly table for this shape - from the beginning of
|
||||
MathGlyphConstruction table. May be NULL. */
|
||||
OffsetTo<MathGlyphAssembly> glyphAssembly;
|
||||
Offset16To<MathGlyphAssembly> glyphAssembly;
|
||||
|
||||
/* MathGlyphVariantRecords for alternative variants of the glyphs. */
|
||||
ArrayOf<MathGlyphVariantRecord> mathGlyphVariantRecord;
|
||||
Array16Of<MathGlyphVariantRecord> mathGlyphVariantRecord;
|
||||
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (4, mathGlyphVariantRecord);
|
||||
|
@ -636,7 +636,7 @@ struct MathVariants
|
|||
{
|
||||
bool vertical = HB_DIRECTION_IS_VERTICAL (direction);
|
||||
unsigned int count = vertical ? vertGlyphCount : horizGlyphCount;
|
||||
const OffsetTo<Coverage> &coverage = vertical ? vertGlyphCoverage
|
||||
const Offset16To<Coverage> &coverage = vertical ? vertGlyphCoverage
|
||||
: horizGlyphCoverage;
|
||||
|
||||
unsigned int index = (this+coverage).get_coverage (glyph);
|
||||
|
@ -653,11 +653,11 @@ struct MathVariants
|
|||
/* Minimum overlap of connecting
|
||||
* glyphs during glyph construction,
|
||||
* in design units. */
|
||||
OffsetTo<Coverage> vertGlyphCoverage;
|
||||
Offset16To<Coverage> vertGlyphCoverage;
|
||||
/* Offset to Coverage table -
|
||||
* from the beginning of MathVariants
|
||||
* table. */
|
||||
OffsetTo<Coverage> horizGlyphCoverage;
|
||||
Offset16To<Coverage> horizGlyphCoverage;
|
||||
/* Offset to Coverage table -
|
||||
* from the beginning of MathVariants
|
||||
* table. */
|
||||
|
@ -671,7 +671,7 @@ struct MathVariants
|
|||
/* Array of offsets to MathGlyphConstruction tables - from the beginning of
|
||||
the MathVariants table, for shapes growing in vertical/horizontal
|
||||
direction. */
|
||||
UnsizedArrayOf<OffsetTo<MathGlyphConstruction>>
|
||||
UnsizedArrayOf<Offset16To<MathGlyphConstruction>>
|
||||
glyphConstruction;
|
||||
|
||||
public:
|
||||
|
@ -711,11 +711,11 @@ struct MATH
|
|||
protected:
|
||||
FixedVersion<>version; /* Version of the MATH table
|
||||
* initially set to 0x00010000u */
|
||||
OffsetTo<MathConstants>
|
||||
Offset16To<MathConstants>
|
||||
mathConstants; /* MathConstants table */
|
||||
OffsetTo<MathGlyphInfo>
|
||||
Offset16To<MathGlyphInfo>
|
||||
mathGlyphInfo; /* MathGlyphInfo table */
|
||||
OffsetTo<MathVariants>
|
||||
Offset16To<MathVariants>
|
||||
mathVariants; /* MathVariants table */
|
||||
|
||||
public:
|
||||
|
|
|
@ -56,7 +56,7 @@ struct DataMap
|
|||
|
||||
protected:
|
||||
Tag tag; /* A tag indicating the type of metadata. */
|
||||
LNNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
NNOffset32To<UnsizedArrayOf<HBUINT8>>
|
||||
dataZ; /* Offset in bytes from the beginning of the
|
||||
* metadata table to the data for this tag. */
|
||||
HBUINT32 dataLength; /* Length of the data. The data is not required to
|
||||
|
@ -113,7 +113,7 @@ struct meta
|
|||
* Offset from the beginning of the table to the data.
|
||||
* Per OT specification:
|
||||
* Reserved. Not used; should be set to 0. */
|
||||
LArrayOf<DataMap>
|
||||
Array32Of<DataMap>
|
||||
dataMaps;/* Array of data map records. */
|
||||
public:
|
||||
DEFINE_SIZE_ARRAY (16, dataMaps);
|
||||
|
|
|
@ -149,7 +149,7 @@ struct NameRecord
|
|||
HBUINT16 languageID; /* Language ID. */
|
||||
HBUINT16 nameID; /* Name ID. */
|
||||
HBUINT16 length; /* String length (in bytes). */
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
NNOffset16To<UnsizedArrayOf<HBUINT8>>
|
||||
offset; /* String offset from start of storage area (in bytes). */
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (12);
|
||||
|
@ -230,7 +230,8 @@ struct name
|
|||
c->copy_all (records, src_string_pool);
|
||||
free (records.arrayZ);
|
||||
|
||||
if (unlikely (c->ran_out_of_room)) return_trace (false);
|
||||
|
||||
if (unlikely (c->ran_out_of_room ())) return_trace (false);
|
||||
|
||||
this->stringOffset = c->length ();
|
||||
|
||||
|
@ -357,7 +358,7 @@ struct name
|
|||
/* We only implement format 0 for now. */
|
||||
HBUINT16 format; /* Format selector (=0/1). */
|
||||
HBUINT16 count; /* Number of name records. */
|
||||
NNOffsetTo<UnsizedArrayOf<HBUINT8>>
|
||||
NNOffset16To<UnsizedArrayOf<HBUINT8>>
|
||||
stringOffset; /* Offset to start of string storage (from start of table). */
|
||||
UnsizedArrayOf<NameRecord>
|
||||
nameRecordZ; /* The name records where count is the number of records. */
|
||||
|
|
|
@ -56,7 +56,7 @@ struct postV2Tail
|
|||
}
|
||||
|
||||
protected:
|
||||
ArrayOf<HBUINT16> glyphNameIndex; /* This is not an offset, but is the
|
||||
Array16Of<HBUINT16> glyphNameIndex; /* This is not an offset, but is the
|
||||
* ordinal number of the glyph in 'post'
|
||||
* string tables. */
|
||||
/*UnsizedArrayOf<HBUINT8>
|
||||
|
@ -236,7 +236,7 @@ struct post
|
|||
|
||||
private:
|
||||
uint32_t version;
|
||||
const ArrayOf<HBUINT16> *glyphNameIndex;
|
||||
const Array16Of<HBUINT16> *glyphNameIndex;
|
||||
hb_vector_t<uint32_t> index_to_offset;
|
||||
const uint8_t *pool;
|
||||
hb_atomic_ptr_t<uint16_t *> gids_sorted_by_name;
|
||||
|
|
|
@ -208,11 +208,11 @@ struct ManifestLookup
|
|||
{
|
||||
public:
|
||||
OT::Tag tag;
|
||||
OT::OffsetTo<OT::SubstLookup> lookupOffset;
|
||||
OT::Offset16To<OT::SubstLookup> lookupOffset;
|
||||
public:
|
||||
DEFINE_SIZE_STATIC (6);
|
||||
};
|
||||
typedef OT::ArrayOf<ManifestLookup> Manifest;
|
||||
typedef OT::Array16Of<ManifestLookup> Manifest;
|
||||
|
||||
static bool
|
||||
arabic_fallback_plan_init_win1256 (arabic_fallback_plan_t *fallback_plan HB_UNUSED,
|
||||
|
|
|
@ -92,7 +92,7 @@ recategorize_combining_class (hb_codepoint_t u,
|
|||
case HB_MODIFIED_COMBINING_CLASS_CCC15: /* tsere */
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC16: /* segol */
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC17: /* patah */
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC18: /* qamats */
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC18: /* qamats & qamats qatan */
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC20: /* qubuts */
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC22: /* meteg */
|
||||
return HB_UNICODE_COMBINING_CLASS_BELOW;
|
||||
|
@ -104,7 +104,7 @@ recategorize_combining_class (hb_codepoint_t u,
|
|||
return HB_UNICODE_COMBINING_CLASS_ABOVE_RIGHT;
|
||||
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC25: /* sin dot */
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC19: /* holam */
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC19: /* holam & holam haser for vav */
|
||||
return HB_UNICODE_COMBINING_CLASS_ABOVE_LEFT;
|
||||
|
||||
case HB_MODIFIED_COMBINING_CLASS_CCC26: /* point varika */
|
||||
|
|
|
@ -297,7 +297,7 @@ struct STAT
|
|||
unsigned int axis_index;
|
||||
if (!get_design_axes ().lfind (tag, &axis_index)) return false;
|
||||
|
||||
hb_array_t<const OffsetTo<AxisValue>> axis_values = get_axis_value_offsets ();
|
||||
hb_array_t<const Offset16To<AxisValue>> axis_values = get_axis_value_offsets ();
|
||||
for (unsigned int i = 0; i < axis_values.length; i++)
|
||||
{
|
||||
const AxisValue& axis_value = this+axis_values[i];
|
||||
|
@ -359,7 +359,7 @@ struct STAT
|
|||
hb_array_t<const StatAxisRecord> const get_design_axes () const
|
||||
{ return (this+designAxesOffset).as_array (designAxisCount); }
|
||||
|
||||
hb_array_t<const OffsetTo<AxisValue>> const get_axis_value_offsets () const
|
||||
hb_array_t<const Offset16To<AxisValue>> const get_axis_value_offsets () const
|
||||
{ return (this+offsetToAxisValueOffsets).as_array (axisValueCount); }
|
||||
|
||||
|
||||
|
@ -373,7 +373,7 @@ struct STAT
|
|||
* in the 'fvar' table. In all fonts, must
|
||||
* be greater than zero if axisValueCount
|
||||
* is greater than zero. */
|
||||
LNNOffsetTo<UnsizedArrayOf<StatAxisRecord>>
|
||||
NNOffset32To<UnsizedArrayOf<StatAxisRecord>>
|
||||
designAxesOffset;
|
||||
/* Offset in bytes from the beginning of
|
||||
* the STAT table to the start of the design
|
||||
|
@ -381,7 +381,7 @@ struct STAT
|
|||
* set to zero; if designAxisCount is greater
|
||||
* than zero, must be greater than zero. */
|
||||
HBUINT16 axisValueCount; /* The number of axis value tables. */
|
||||
LNNOffsetTo<UnsizedArrayOf<OffsetTo<AxisValue>>>
|
||||
NNOffset32To<UnsizedArrayOf<Offset16To<AxisValue>>>
|
||||
offsetToAxisValueOffsets;
|
||||
/* Offset in bytes from the beginning of
|
||||
* the STAT table to the start of the design
|
||||
|
|
|
@ -58,7 +58,7 @@ struct AxisValueMap
|
|||
DEFINE_SIZE_STATIC (4);
|
||||
};
|
||||
|
||||
struct SegmentMaps : ArrayOf<AxisValueMap>
|
||||
struct SegmentMaps : Array16Of<AxisValueMap>
|
||||
{
|
||||
int map (int value, unsigned int from_offset = 0, unsigned int to_offset = 1) const
|
||||
{
|
||||
|
|
|
@ -303,7 +303,7 @@ struct fvar
|
|||
protected:
|
||||
FixedVersion<>version; /* Version of the fvar table
|
||||
* initially set to 0x00010000u */
|
||||
OffsetTo<AxisRecord>
|
||||
Offset16To<AxisRecord>
|
||||
firstAxis; /* Offset in bytes from the beginning of the table
|
||||
* to the start of the AxisRecord array. */
|
||||
HBUINT16 reserved; /* This field is permanently reserved. Set to 2. */
|
||||
|
|
|
@ -374,7 +374,7 @@ struct GlyphVariationData
|
|||
* low 12 bits are the number of tuple variation tables
|
||||
* for this glyph. The number of tuple variation tables
|
||||
* can be any number between 1 and 4095. */
|
||||
OffsetTo<HBUINT8>
|
||||
Offset16To<HBUINT8>
|
||||
data; /* Offset from the start of the GlyphVariationData table
|
||||
* to the serialized data. */
|
||||
/* TupleVariationHeader tupleVariationHeaders[] *//* Array of tuple variation headers. */
|
||||
|
@ -676,7 +676,7 @@ no_more_gaps:
|
|||
* can be referenced within glyph variation data tables for
|
||||
* multiple glyphs, as opposed to other tuple records stored
|
||||
* directly within a glyph variation data table. */
|
||||
LNNOffsetTo<UnsizedArrayOf<F2DOT14>>
|
||||
NNOffset32To<UnsizedArrayOf<F2DOT14>>
|
||||
sharedTuples; /* Offset from the start of this table to the shared tuple records.
|
||||
* Array of tuple records shared across all glyph variation data tables. */
|
||||
HBUINT16 glyphCount; /* The number of glyphs in this font. This must match the number of
|
||||
|
@ -684,7 +684,7 @@ no_more_gaps:
|
|||
HBUINT16 flags; /* Bit-field that gives the format of the offset array that follows.
|
||||
* If bit 0 is clear, the offsets are uint16; if bit 0 is set, the
|
||||
* offsets are uint32. */
|
||||
LOffsetTo<GlyphVariationData>
|
||||
Offset32To<GlyphVariationData>
|
||||
dataZ; /* Offset from the start of this table to the array of
|
||||
* GlyphVariationData tables. */
|
||||
UnsizedArrayOf<HBUINT8>
|
||||
|
|
|
@ -49,7 +49,7 @@ struct DeltaSetIndexMap
|
|||
{
|
||||
unsigned int width = plan.get_width ();
|
||||
unsigned int inner_bit_count = plan.get_inner_bit_count ();
|
||||
const hb_array_t<const unsigned int> output_map = plan.get_output_map ();
|
||||
const hb_array_t<const uint32_t> output_map = plan.get_output_map ();
|
||||
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (output_map.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0))))
|
||||
|
@ -76,7 +76,7 @@ struct DeltaSetIndexMap
|
|||
return_trace (true);
|
||||
}
|
||||
|
||||
unsigned int map (unsigned int v) const /* Returns 16.16 outer.inner. */
|
||||
uint32_t map (unsigned int v) const /* Returns 16.16 outer.inner. */
|
||||
{
|
||||
/* If count is zero, pass value unchanged. This takes
|
||||
* care of direct mapping for advance map. */
|
||||
|
@ -217,7 +217,7 @@ struct index_map_subset_plan_t
|
|||
hb_codepoint_t old_gid;
|
||||
if (plan->old_gid_for_new_gid (gid, &old_gid))
|
||||
{
|
||||
unsigned int v = input_map->map (old_gid);
|
||||
uint32_t v = input_map->map (old_gid);
|
||||
unsigned int outer = v >> 16;
|
||||
output_map[gid] = (outer_map[outer] << 16) | (inner_maps[outer][v & 0xFFFF]);
|
||||
}
|
||||
|
@ -234,14 +234,14 @@ struct index_map_subset_plan_t
|
|||
{ return (map_count? (DeltaSetIndexMap::min_size + get_width () * map_count): 0); }
|
||||
|
||||
bool is_identity () const { return get_output_map ().length == 0; }
|
||||
hb_array_t<const unsigned int> get_output_map () const { return output_map.as_array (); }
|
||||
hb_array_t<const uint32_t> get_output_map () const { return output_map.as_array (); }
|
||||
|
||||
protected:
|
||||
unsigned int map_count;
|
||||
hb_vector_t<unsigned int> max_inners;
|
||||
unsigned int outer_bit_count;
|
||||
unsigned int inner_bit_count;
|
||||
hb_vector_t<unsigned int> output_map;
|
||||
hb_vector_t<uint32_t> output_map;
|
||||
};
|
||||
|
||||
struct hvarvvar_subset_plan_t
|
||||
|
@ -408,7 +408,7 @@ struct HVARVVAR
|
|||
|
||||
float get_advance_var (hb_codepoint_t glyph, hb_font_t *font) const
|
||||
{
|
||||
unsigned int varidx = (this+advMap).map (glyph);
|
||||
uint32_t varidx = (this+advMap).map (glyph);
|
||||
return (this+varStore).get_delta (varidx, font->coords, font->num_coords);
|
||||
}
|
||||
|
||||
|
@ -416,7 +416,7 @@ struct HVARVVAR
|
|||
const int *coords, unsigned int coord_count) const
|
||||
{
|
||||
if (!has_side_bearing_deltas ()) return 0.f;
|
||||
unsigned int varidx = (this+lsbMap).map (glyph);
|
||||
uint32_t varidx = (this+lsbMap).map (glyph);
|
||||
return (this+varStore).get_delta (varidx, coords, coord_count);
|
||||
}
|
||||
|
||||
|
@ -425,13 +425,13 @@ struct HVARVVAR
|
|||
protected:
|
||||
FixedVersion<>version; /* Version of the metrics variation table
|
||||
* initially set to 0x00010000u */
|
||||
LOffsetTo<VariationStore>
|
||||
Offset32To<VariationStore>
|
||||
varStore; /* Offset to item variation store table. */
|
||||
LOffsetTo<DeltaSetIndexMap>
|
||||
Offset32To<DeltaSetIndexMap>
|
||||
advMap; /* Offset to advance var-idx mapping. */
|
||||
LOffsetTo<DeltaSetIndexMap>
|
||||
Offset32To<DeltaSetIndexMap>
|
||||
lsbMap; /* Offset to lsb/tsb var-idx mapping. */
|
||||
LOffsetTo<DeltaSetIndexMap>
|
||||
Offset32To<DeltaSetIndexMap>
|
||||
rsbMap; /* Offset to rsb/bsb var-idx mapping. */
|
||||
|
||||
public:
|
||||
|
@ -475,7 +475,7 @@ struct VVAR : HVARVVAR {
|
|||
bool subset (hb_subset_context_t *c) const { return HVARVVAR::_subset<VVAR> (c); }
|
||||
|
||||
protected:
|
||||
LOffsetTo<DeltaSetIndexMap>
|
||||
Offset32To<DeltaSetIndexMap>
|
||||
vorgMap; /* Offset to vertical-origin var-idx mapping. */
|
||||
|
||||
public:
|
||||
|
|
|
@ -103,7 +103,7 @@ protected:
|
|||
HBUINT16 valueRecordSize;/* The size in bytes of each value record —
|
||||
* must be greater than zero. */
|
||||
HBUINT16 valueRecordCount;/* The number of value records — may be zero. */
|
||||
OffsetTo<VariationStore>
|
||||
Offset16To<VariationStore>
|
||||
varStore; /* Offset to item variation store table. */
|
||||
UnsizedArrayOf<HBUINT8>
|
||||
valuesZ; /* Array of value records. The records must be
|
||||
|
|
|
@ -125,7 +125,7 @@ struct VORG
|
|||
FixedVersion<>version; /* Version of VORG table. Set to 0x00010000u. */
|
||||
FWORD defaultVertOriginY;
|
||||
/* The default vertical origin. */
|
||||
SortedArrayOf<VertOriginMetric>
|
||||
SortedArray16Of<VertOriginMetric>
|
||||
vertYOrigins; /* The array of vertical origins. */
|
||||
|
||||
public:
|
||||
|
|
|
@ -0,0 +1,151 @@
|
|||
/*
|
||||
* Copyright © 2020 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Garret Rieger
|
||||
*/
|
||||
|
||||
#ifndef HB_PRIORITY_QUEUE_HH
|
||||
#define HB_PRIORITY_QUEUE_HH
|
||||
|
||||
#include "hb.hh"
|
||||
#include "hb-vector.hh"
|
||||
|
||||
/*
|
||||
* hb_priority_queue_t
|
||||
*
|
||||
* Priority queue implemented as a binary heap. Supports extract minimum
|
||||
* and insert operations.
|
||||
*/
|
||||
struct hb_priority_queue_t
|
||||
{
|
||||
HB_DELETE_COPY_ASSIGN (hb_priority_queue_t);
|
||||
hb_priority_queue_t () { init (); }
|
||||
~hb_priority_queue_t () { fini (); }
|
||||
|
||||
private:
|
||||
typedef hb_pair_t<int64_t, unsigned> item_t;
|
||||
hb_vector_t<item_t> heap;
|
||||
|
||||
public:
|
||||
void init () { heap.init (); }
|
||||
|
||||
void fini () { heap.fini (); }
|
||||
|
||||
void reset () { heap.resize (0); }
|
||||
|
||||
bool in_error () const { return heap.in_error (); }
|
||||
|
||||
void insert (int64_t priority, unsigned value)
|
||||
{
|
||||
heap.push (item_t (priority, value));
|
||||
bubble_up (heap.length - 1);
|
||||
}
|
||||
|
||||
item_t pop_minimum ()
|
||||
{
|
||||
item_t result = heap[0];
|
||||
|
||||
heap[0] = heap[heap.length - 1];
|
||||
heap.shrink (heap.length - 1);
|
||||
bubble_down (0);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
const item_t& minimum ()
|
||||
{
|
||||
return heap[0];
|
||||
}
|
||||
|
||||
bool is_empty () const { return heap.length == 0; }
|
||||
explicit operator bool () const { return !is_empty (); }
|
||||
unsigned int get_population () const { return heap.length; }
|
||||
|
||||
/* Sink interface. */
|
||||
hb_priority_queue_t& operator << (item_t item)
|
||||
{ insert (item.first, item.second); return *this; }
|
||||
|
||||
private:
|
||||
|
||||
static constexpr unsigned parent (unsigned index)
|
||||
{
|
||||
return (index - 1) / 2;
|
||||
}
|
||||
|
||||
static constexpr unsigned left_child (unsigned index)
|
||||
{
|
||||
return 2 * index + 1;
|
||||
}
|
||||
|
||||
static constexpr unsigned right_child (unsigned index)
|
||||
{
|
||||
return 2 * index + 2;
|
||||
}
|
||||
|
||||
void bubble_down (unsigned index)
|
||||
{
|
||||
unsigned left = left_child (index);
|
||||
unsigned right = right_child (index);
|
||||
|
||||
bool has_left = left < heap.length;
|
||||
if (!has_left)
|
||||
// If there's no left, then there's also no right.
|
||||
return;
|
||||
|
||||
bool has_right = right < heap.length;
|
||||
if (heap[index].first <= heap[left].first
|
||||
&& (!has_right || heap[index].first <= heap[right].first))
|
||||
return;
|
||||
|
||||
if (!has_right || heap[left].first < heap[right].first)
|
||||
{
|
||||
swap (index, left);
|
||||
bubble_down (left);
|
||||
return;
|
||||
}
|
||||
|
||||
swap (index, right);
|
||||
bubble_down (right);
|
||||
}
|
||||
|
||||
void bubble_up (unsigned index)
|
||||
{
|
||||
if (index == 0) return;
|
||||
|
||||
unsigned parent_index = parent (index);
|
||||
if (heap[parent_index].first <= heap[index].first)
|
||||
return;
|
||||
|
||||
swap (index, parent_index);
|
||||
bubble_up (parent_index);
|
||||
}
|
||||
|
||||
void swap (unsigned a, unsigned b)
|
||||
{
|
||||
item_t temp = heap[a];
|
||||
heap[a] = heap[b];
|
||||
heap[b] = temp;
|
||||
}
|
||||
};
|
||||
|
||||
#endif /* HB_PRIORITY_QUEUE_HH */
|
|
@ -0,0 +1,758 @@
|
|||
/*
|
||||
* Copyright © 2020 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Garret Rieger
|
||||
*/
|
||||
|
||||
#ifndef HB_REPACKER_HH
|
||||
#define HB_REPACKER_HH
|
||||
|
||||
#include "hb-open-type.hh"
|
||||
#include "hb-map.hh"
|
||||
#include "hb-priority-queue.hh"
|
||||
#include "hb-serialize.hh"
|
||||
#include "hb-vector.hh"
|
||||
|
||||
|
||||
struct graph_t
|
||||
{
|
||||
struct vertex_t
|
||||
{
|
||||
vertex_t () :
|
||||
distance (0),
|
||||
incoming_edges (0),
|
||||
start (0),
|
||||
end (0),
|
||||
priority(0) {}
|
||||
|
||||
void fini () { obj.fini (); }
|
||||
|
||||
hb_serialize_context_t::object_t obj;
|
||||
int64_t distance;
|
||||
unsigned incoming_edges;
|
||||
unsigned start;
|
||||
unsigned end;
|
||||
unsigned priority;
|
||||
|
||||
bool is_shared () const
|
||||
{
|
||||
return incoming_edges > 1;
|
||||
}
|
||||
|
||||
bool is_leaf () const
|
||||
{
|
||||
return !obj.links.length;
|
||||
}
|
||||
|
||||
void raise_priority ()
|
||||
{
|
||||
priority++;
|
||||
}
|
||||
|
||||
int64_t modified_distance (unsigned order) const
|
||||
{
|
||||
// TODO(garretrieger): once priority is high enough, should try
|
||||
// setting distance = 0 which will force to sort immediately after
|
||||
// it's parent where possible.
|
||||
|
||||
int64_t modified_distance =
|
||||
hb_min (hb_max(distance + distance_modifier (), 0), 0x7FFFFFFFFF);
|
||||
return (modified_distance << 24) | (0x00FFFFFF & order);
|
||||
}
|
||||
|
||||
int64_t distance_modifier () const
|
||||
{
|
||||
if (!priority) return 0;
|
||||
int64_t table_size = obj.tail - obj.head;
|
||||
return -(table_size - table_size / (1 << hb_min(priority, 16u)));
|
||||
}
|
||||
};
|
||||
|
||||
struct overflow_record_t
|
||||
{
|
||||
unsigned parent;
|
||||
const hb_serialize_context_t::object_t::link_t* link;
|
||||
};
|
||||
|
||||
struct clone_buffer_t
|
||||
{
|
||||
clone_buffer_t () : head (nullptr), tail (nullptr) {}
|
||||
|
||||
bool copy (const hb_serialize_context_t::object_t& object)
|
||||
{
|
||||
fini ();
|
||||
unsigned size = object.tail - object.head;
|
||||
head = (char*) malloc (size);
|
||||
if (!head) return false;
|
||||
|
||||
memcpy (head, object.head, size);
|
||||
tail = head + size;
|
||||
return true;
|
||||
}
|
||||
|
||||
char* head;
|
||||
char* tail;
|
||||
|
||||
void fini ()
|
||||
{
|
||||
if (!head) return;
|
||||
free (head);
|
||||
head = nullptr;
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
* A topological sorting of an object graph. Ordered
|
||||
* in reverse serialization order (first object in the
|
||||
* serialization is at the end of the list). This matches
|
||||
* the 'packed' object stack used internally in the
|
||||
* serializer
|
||||
*/
|
||||
graph_t (const hb_vector_t<hb_serialize_context_t::object_t *>& objects)
|
||||
: edge_count_invalid (true),
|
||||
distance_invalid (true),
|
||||
positions_invalid (true),
|
||||
successful (true)
|
||||
{
|
||||
bool removed_nil = false;
|
||||
for (unsigned i = 0; i < objects.length; i++)
|
||||
{
|
||||
// TODO(grieger): check all links point to valid objects.
|
||||
|
||||
// If this graph came from a serialization buffer object 0 is the
|
||||
// nil object. We don't need it for our purposes here so drop it.
|
||||
if (i == 0 && !objects[i])
|
||||
{
|
||||
removed_nil = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
vertex_t* v = vertices_.push ();
|
||||
if (check_success (!vertices_.in_error ()))
|
||||
v->obj = *objects[i];
|
||||
if (!removed_nil) continue;
|
||||
for (unsigned i = 0; i < v->obj.links.length; i++)
|
||||
// Fix indices to account for removed nil object.
|
||||
v->obj.links[i].objidx--;
|
||||
}
|
||||
}
|
||||
|
||||
~graph_t ()
|
||||
{
|
||||
vertices_.fini_deep ();
|
||||
clone_buffers_.fini_deep ();
|
||||
}
|
||||
|
||||
bool in_error () const
|
||||
{
|
||||
return !successful || vertices_.in_error () || clone_buffers_.in_error ();
|
||||
}
|
||||
|
||||
const vertex_t& root () const
|
||||
{
|
||||
return vertices_[root_idx ()];
|
||||
}
|
||||
|
||||
unsigned root_idx () const
|
||||
{
|
||||
// Object graphs are in reverse order, the first object is at the end
|
||||
// of the vector. Since the graph is topologically sorted it's safe to
|
||||
// assume the first object has no incoming edges.
|
||||
return vertices_.length - 1;
|
||||
}
|
||||
|
||||
const hb_serialize_context_t::object_t& object(unsigned i) const
|
||||
{
|
||||
return vertices_[i].obj;
|
||||
}
|
||||
|
||||
/*
|
||||
* serialize graph into the provided serialization buffer.
|
||||
*/
|
||||
void serialize (hb_serialize_context_t* c) const
|
||||
{
|
||||
c->start_serialize<void> ();
|
||||
for (unsigned i = 0; i < vertices_.length; i++) {
|
||||
c->push ();
|
||||
|
||||
size_t size = vertices_[i].obj.tail - vertices_[i].obj.head;
|
||||
char* start = c->allocate_size <char> (size);
|
||||
if (!start) return;
|
||||
|
||||
memcpy (start, vertices_[i].obj.head, size);
|
||||
|
||||
for (const auto& link : vertices_[i].obj.links)
|
||||
serialize_link (link, start, c);
|
||||
|
||||
// All duplications are already encoded in the graph, so don't
|
||||
// enable sharing during packing.
|
||||
c->pop_pack (false);
|
||||
}
|
||||
c->end_serialize ();
|
||||
}
|
||||
|
||||
/*
|
||||
* Generates a new topological sorting of graph using Kahn's
|
||||
* algorithm: https://en.wikipedia.org/wiki/Topological_sorting#Algorithms
|
||||
*/
|
||||
void sort_kahn ()
|
||||
{
|
||||
positions_invalid = true;
|
||||
|
||||
if (vertices_.length <= 1) {
|
||||
// Graph of 1 or less doesn't need sorting.
|
||||
return;
|
||||
}
|
||||
|
||||
hb_vector_t<unsigned> queue;
|
||||
hb_vector_t<vertex_t> sorted_graph;
|
||||
hb_vector_t<unsigned> id_map;
|
||||
if (unlikely (!check_success (id_map.resize (vertices_.length)))) return;
|
||||
|
||||
hb_vector_t<unsigned> removed_edges;
|
||||
if (unlikely (!check_success (removed_edges.resize (vertices_.length)))) return;
|
||||
update_incoming_edge_count ();
|
||||
|
||||
queue.push (root_idx ());
|
||||
int new_id = vertices_.length - 1;
|
||||
|
||||
while (!queue.in_error () && queue.length)
|
||||
{
|
||||
unsigned next_id = queue[0];
|
||||
queue.remove (0);
|
||||
|
||||
vertex_t& next = vertices_[next_id];
|
||||
sorted_graph.push (next);
|
||||
id_map[next_id] = new_id--;
|
||||
|
||||
for (const auto& link : next.obj.links) {
|
||||
removed_edges[link.objidx]++;
|
||||
if (!(vertices_[link.objidx].incoming_edges - removed_edges[link.objidx]))
|
||||
queue.push (link.objidx);
|
||||
}
|
||||
}
|
||||
|
||||
check_success (!queue.in_error ());
|
||||
check_success (!sorted_graph.in_error ());
|
||||
if (!check_success (new_id == -1))
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Graph is not fully connected.");
|
||||
|
||||
remap_obj_indices (id_map, &sorted_graph);
|
||||
|
||||
sorted_graph.as_array ().reverse ();
|
||||
|
||||
vertices_.fini_deep ();
|
||||
vertices_ = sorted_graph;
|
||||
sorted_graph.fini_deep ();
|
||||
}
|
||||
|
||||
/*
|
||||
* Generates a new topological sorting of graph ordered by the shortest
|
||||
* distance to each node.
|
||||
*/
|
||||
void sort_shortest_distance ()
|
||||
{
|
||||
positions_invalid = true;
|
||||
|
||||
if (vertices_.length <= 1) {
|
||||
// Graph of 1 or less doesn't need sorting.
|
||||
return;
|
||||
}
|
||||
|
||||
update_distances ();
|
||||
|
||||
hb_priority_queue_t queue;
|
||||
hb_vector_t<vertex_t> sorted_graph;
|
||||
hb_vector_t<unsigned> id_map;
|
||||
if (unlikely (!check_success (id_map.resize (vertices_.length)))) return;
|
||||
|
||||
hb_vector_t<unsigned> removed_edges;
|
||||
if (unlikely (!check_success (removed_edges.resize (vertices_.length)))) return;
|
||||
update_incoming_edge_count ();
|
||||
|
||||
queue.insert (root ().modified_distance (0), root_idx ());
|
||||
int new_id = root_idx ();
|
||||
unsigned order = 1;
|
||||
while (!queue.in_error () && !queue.is_empty ())
|
||||
{
|
||||
unsigned next_id = queue.pop_minimum().second;
|
||||
|
||||
vertex_t& next = vertices_[next_id];
|
||||
sorted_graph.push (next);
|
||||
id_map[next_id] = new_id--;
|
||||
|
||||
for (const auto& link : next.obj.links) {
|
||||
removed_edges[link.objidx]++;
|
||||
if (!(vertices_[link.objidx].incoming_edges - removed_edges[link.objidx]))
|
||||
// Add the order that the links were encountered to the priority.
|
||||
// This ensures that ties between priorities objects are broken in a consistent
|
||||
// way. More specifically this is set up so that if a set of objects have the same
|
||||
// distance they'll be added to the topological order in the order that they are
|
||||
// referenced from the parent object.
|
||||
queue.insert (vertices_[link.objidx].modified_distance (order++),
|
||||
link.objidx);
|
||||
}
|
||||
}
|
||||
|
||||
check_success (!queue.in_error ());
|
||||
check_success (!sorted_graph.in_error ());
|
||||
if (!check_success (new_id == -1))
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Graph is not fully connected.");
|
||||
|
||||
remap_obj_indices (id_map, &sorted_graph);
|
||||
|
||||
sorted_graph.as_array ().reverse ();
|
||||
|
||||
vertices_.fini_deep ();
|
||||
vertices_ = sorted_graph;
|
||||
sorted_graph.fini_deep ();
|
||||
}
|
||||
|
||||
/*
|
||||
* Creates a copy of child and re-assigns the link from
|
||||
* parent to the clone. The copy is a shallow copy, objects
|
||||
* linked from child are not duplicated.
|
||||
*/
|
||||
void duplicate (unsigned parent_idx, unsigned child_idx)
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, " Duplicating %d => %d",
|
||||
parent_idx, child_idx);
|
||||
|
||||
positions_invalid = true;
|
||||
|
||||
auto* clone = vertices_.push ();
|
||||
auto& child = vertices_[child_idx];
|
||||
clone_buffer_t* buffer = clone_buffers_.push ();
|
||||
if (vertices_.in_error ()
|
||||
|| clone_buffers_.in_error ()
|
||||
|| !check_success (buffer->copy (child.obj))) {
|
||||
return;
|
||||
}
|
||||
|
||||
clone->obj.head = buffer->head;
|
||||
clone->obj.tail = buffer->tail;
|
||||
clone->distance = child.distance;
|
||||
|
||||
for (const auto& l : child.obj.links)
|
||||
clone->obj.links.push (l);
|
||||
|
||||
check_success (!clone->obj.links.in_error ());
|
||||
|
||||
auto& parent = vertices_[parent_idx];
|
||||
unsigned clone_idx = vertices_.length - 2;
|
||||
for (unsigned i = 0; i < parent.obj.links.length; i++)
|
||||
{
|
||||
auto& l = parent.obj.links[i];
|
||||
if (l.objidx == child_idx)
|
||||
{
|
||||
l.objidx = clone_idx;
|
||||
clone->incoming_edges++;
|
||||
child.incoming_edges--;
|
||||
}
|
||||
}
|
||||
|
||||
// The last object is the root of the graph, so swap back the root to the end.
|
||||
// The root's obj idx does change, however since it's root nothing else refers to it.
|
||||
// all other obj idx's will be unaffected.
|
||||
vertex_t root = vertices_[vertices_.length - 2];
|
||||
vertices_[vertices_.length - 2] = *clone;
|
||||
vertices_[vertices_.length - 1] = root;
|
||||
}
|
||||
|
||||
/*
|
||||
* Raises the sorting priority of all children.
|
||||
*/
|
||||
void raise_childrens_priority (unsigned parent_idx)
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, " Raising priority of all children of %d",
|
||||
parent_idx);
|
||||
// This operation doesn't change ordering until a sort is run, so no need
|
||||
// to invalidate positions. It does not change graph structure so no need
|
||||
// to update distances or edge counts.
|
||||
auto& parent = vertices_[parent_idx].obj;
|
||||
for (unsigned i = 0; i < parent.links.length; i++)
|
||||
vertices_[parent.links[i].objidx].raise_priority ();
|
||||
}
|
||||
|
||||
/*
|
||||
* Will any offsets overflow on graph when it's serialized?
|
||||
*/
|
||||
bool will_overflow (hb_vector_t<overflow_record_t>* overflows = nullptr)
|
||||
{
|
||||
if (overflows) overflows->resize (0);
|
||||
update_positions ();
|
||||
|
||||
for (int parent_idx = vertices_.length - 1; parent_idx >= 0; parent_idx--)
|
||||
{
|
||||
for (const auto& link : vertices_[parent_idx].obj.links)
|
||||
{
|
||||
int64_t offset = compute_offset (parent_idx, link);
|
||||
if (is_valid_offset (offset, link))
|
||||
continue;
|
||||
|
||||
if (!overflows) return true;
|
||||
|
||||
overflow_record_t r;
|
||||
r.parent = parent_idx;
|
||||
r.link = &link;
|
||||
overflows->push (r);
|
||||
}
|
||||
}
|
||||
|
||||
if (!overflows) return false;
|
||||
return overflows->length;
|
||||
}
|
||||
|
||||
void print_overflows (const hb_vector_t<overflow_record_t>& overflows)
|
||||
{
|
||||
if (!DEBUG_ENABLED(SUBSET_REPACK)) return;
|
||||
|
||||
update_incoming_edge_count ();
|
||||
for (const auto& o : overflows)
|
||||
{
|
||||
const auto& child = vertices_[o.link->objidx];
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, " overflow from %d => %d (%d incoming , %d outgoing)",
|
||||
o.parent,
|
||||
o.link->objidx,
|
||||
child.incoming_edges,
|
||||
child.obj.links.length);
|
||||
}
|
||||
}
|
||||
|
||||
void err_other_error () { this->successful = false; }
|
||||
|
||||
private:
|
||||
|
||||
bool check_success (bool success)
|
||||
{ return this->successful && (success || (err_other_error (), false)); }
|
||||
|
||||
/*
|
||||
* Creates a map from objid to # of incoming edges.
|
||||
*/
|
||||
void update_incoming_edge_count ()
|
||||
{
|
||||
if (!edge_count_invalid) return;
|
||||
|
||||
for (unsigned i = 0; i < vertices_.length; i++)
|
||||
vertices_[i].incoming_edges = 0;
|
||||
|
||||
for (const vertex_t& v : vertices_)
|
||||
{
|
||||
for (auto& l : v.obj.links)
|
||||
{
|
||||
vertices_[l.objidx].incoming_edges++;
|
||||
}
|
||||
}
|
||||
|
||||
edge_count_invalid = false;
|
||||
}
|
||||
|
||||
/*
|
||||
* compute the serialized start and end positions for each vertex.
|
||||
*/
|
||||
void update_positions ()
|
||||
{
|
||||
if (!positions_invalid) return;
|
||||
|
||||
unsigned current_pos = 0;
|
||||
for (int i = root_idx (); i >= 0; i--)
|
||||
{
|
||||
auto& v = vertices_[i];
|
||||
v.start = current_pos;
|
||||
current_pos += v.obj.tail - v.obj.head;
|
||||
v.end = current_pos;
|
||||
}
|
||||
|
||||
positions_invalid = false;
|
||||
}
|
||||
|
||||
/*
|
||||
* Finds the distance to each object in the graph
|
||||
* from the initial node.
|
||||
*/
|
||||
void update_distances ()
|
||||
{
|
||||
if (!distance_invalid) return;
|
||||
|
||||
// Uses Dijkstra's algorithm to find all of the shortest distances.
|
||||
// https://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
|
||||
//
|
||||
// Implementation Note:
|
||||
// Since our priority queue doesn't support fast priority decreases
|
||||
// we instead just add new entries into the queue when a priority changes.
|
||||
// Redundant ones are filtered out later on by the visited set.
|
||||
// According to https://www3.cs.stonybrook.edu/~rezaul/papers/TR-07-54.pdf
|
||||
// for practical performance this is faster then using a more advanced queue
|
||||
// (such as a fibonaacci queue) with a fast decrease priority.
|
||||
for (unsigned i = 0; i < vertices_.length; i++)
|
||||
{
|
||||
if (i == vertices_.length - 1)
|
||||
vertices_[i].distance = 0;
|
||||
else
|
||||
vertices_[i].distance = hb_int_max (int64_t);
|
||||
}
|
||||
|
||||
hb_priority_queue_t queue;
|
||||
queue.insert (0, vertices_.length - 1);
|
||||
|
||||
hb_set_t visited;
|
||||
|
||||
while (!queue.in_error () && !queue.is_empty ())
|
||||
{
|
||||
unsigned next_idx = queue.pop_minimum ().second;
|
||||
if (visited.has (next_idx)) continue;
|
||||
const auto& next = vertices_[next_idx];
|
||||
int64_t next_distance = vertices_[next_idx].distance;
|
||||
visited.add (next_idx);
|
||||
|
||||
for (const auto& link : next.obj.links)
|
||||
{
|
||||
if (visited.has (link.objidx)) continue;
|
||||
|
||||
const auto& child = vertices_[link.objidx].obj;
|
||||
int64_t child_weight = child.tail - child.head +
|
||||
(!link.is_wide ? (1 << 16) : ((int64_t) 1 << 32));
|
||||
int64_t child_distance = next_distance + child_weight;
|
||||
|
||||
if (child_distance < vertices_[link.objidx].distance)
|
||||
{
|
||||
vertices_[link.objidx].distance = child_distance;
|
||||
queue.insert (child_distance, link.objidx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
check_success (!queue.in_error ());
|
||||
if (!check_success (queue.is_empty ()))
|
||||
{
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "Graph is not fully connected.");
|
||||
return;
|
||||
}
|
||||
|
||||
distance_invalid = false;
|
||||
}
|
||||
|
||||
int64_t compute_offset (
|
||||
unsigned parent_idx,
|
||||
const hb_serialize_context_t::object_t::link_t& link) const
|
||||
{
|
||||
const auto& parent = vertices_[parent_idx];
|
||||
const auto& child = vertices_[link.objidx];
|
||||
int64_t offset = 0;
|
||||
switch ((hb_serialize_context_t::whence_t) link.whence) {
|
||||
case hb_serialize_context_t::whence_t::Head:
|
||||
offset = child.start - parent.start; break;
|
||||
case hb_serialize_context_t::whence_t::Tail:
|
||||
offset = child.start - parent.end; break;
|
||||
case hb_serialize_context_t::whence_t::Absolute:
|
||||
offset = child.start; break;
|
||||
}
|
||||
|
||||
assert (offset >= link.bias);
|
||||
offset -= link.bias;
|
||||
return offset;
|
||||
}
|
||||
|
||||
bool is_valid_offset (int64_t offset,
|
||||
const hb_serialize_context_t::object_t::link_t& link) const
|
||||
{
|
||||
if (link.is_signed)
|
||||
{
|
||||
if (link.is_wide)
|
||||
return offset >= -((int64_t) 1 << 31) && offset < ((int64_t) 1 << 31);
|
||||
else
|
||||
return offset >= -(1 << 15) && offset < (1 << 15);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (link.is_wide)
|
||||
return offset >= 0 && offset < ((int64_t) 1 << 32);
|
||||
else
|
||||
return offset >= 0 && offset < (1 << 16);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Updates all objidx's in all links using the provided mapping.
|
||||
*/
|
||||
void remap_obj_indices (const hb_vector_t<unsigned>& id_map,
|
||||
hb_vector_t<vertex_t>* sorted_graph) const
|
||||
{
|
||||
for (unsigned i = 0; i < sorted_graph->length; i++)
|
||||
{
|
||||
for (unsigned j = 0; j < (*sorted_graph)[i].obj.links.length; j++)
|
||||
{
|
||||
auto& link = (*sorted_graph)[i].obj.links[j];
|
||||
link.objidx = id_map[link.objidx];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
template <typename O> void
|
||||
serialize_link_of_type (const hb_serialize_context_t::object_t::link_t& link,
|
||||
char* head,
|
||||
hb_serialize_context_t* c) const
|
||||
{
|
||||
OT::Offset<O>* offset = reinterpret_cast<OT::Offset<O>*> (head + link.position);
|
||||
*offset = 0;
|
||||
c->add_link (*offset,
|
||||
// serializer has an extra nil object at the start of the
|
||||
// object array. So all id's are +1 of what our id's are.
|
||||
link.objidx + 1,
|
||||
(hb_serialize_context_t::whence_t) link.whence,
|
||||
link.bias);
|
||||
}
|
||||
|
||||
void serialize_link (const hb_serialize_context_t::object_t::link_t& link,
|
||||
char* head,
|
||||
hb_serialize_context_t* c) const
|
||||
{
|
||||
if (link.is_wide)
|
||||
{
|
||||
if (link.is_signed)
|
||||
{
|
||||
serialize_link_of_type<OT::HBINT32> (link, head, c);
|
||||
} else {
|
||||
serialize_link_of_type<OT::HBUINT32> (link, head, c);
|
||||
}
|
||||
} else {
|
||||
if (link.is_signed)
|
||||
{
|
||||
serialize_link_of_type<OT::HBINT16> (link, head, c);
|
||||
} else {
|
||||
serialize_link_of_type<OT::HBUINT16> (link, head, c);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public:
|
||||
// TODO(garretrieger): make private, will need to move most of offset overflow code into graph.
|
||||
hb_vector_t<vertex_t> vertices_;
|
||||
private:
|
||||
hb_vector_t<clone_buffer_t> clone_buffers_;
|
||||
bool edge_count_invalid;
|
||||
bool distance_invalid;
|
||||
bool positions_invalid;
|
||||
bool successful;
|
||||
};
|
||||
|
||||
|
||||
/*
|
||||
* Attempts to modify the topological sorting of the provided object graph to
|
||||
* eliminate offset overflows in the links between objects of the graph. If a
|
||||
* non-overflowing ordering is found the updated graph is serialized it into the
|
||||
* provided serialization context.
|
||||
*
|
||||
* If necessary the structure of the graph may be modified in ways that do not
|
||||
* affect the functionality of the graph. For example shared objects may be
|
||||
* duplicated.
|
||||
*/
|
||||
inline void
|
||||
hb_resolve_overflows (const hb_vector_t<hb_serialize_context_t::object_t *>& packed,
|
||||
hb_serialize_context_t* c) {
|
||||
// Kahn sort is ~twice as fast as shortest distance sort and works for many fonts
|
||||
// so try it first to save time.
|
||||
graph_t sorted_graph (packed);
|
||||
sorted_graph.sort_kahn ();
|
||||
if (!sorted_graph.will_overflow ())
|
||||
{
|
||||
sorted_graph.serialize (c);
|
||||
return;
|
||||
}
|
||||
|
||||
sorted_graph.sort_shortest_distance ();
|
||||
|
||||
unsigned round = 0;
|
||||
hb_vector_t<graph_t::overflow_record_t> overflows;
|
||||
// TODO(garretrieger): select a good limit for max rounds.
|
||||
while (!sorted_graph.in_error ()
|
||||
&& sorted_graph.will_overflow (&overflows)
|
||||
&& round++ < 10) {
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "=== Over flow resolution round %d ===", round);
|
||||
sorted_graph.print_overflows (overflows);
|
||||
|
||||
bool resolution_attempted = false;
|
||||
hb_set_t priority_bumped_parents;
|
||||
// Try resolving the furthest overflows first.
|
||||
for (int i = overflows.length - 1; i >= 0; i--)
|
||||
{
|
||||
const graph_t::overflow_record_t& r = overflows[i];
|
||||
const auto& child = sorted_graph.vertices_[r.link->objidx];
|
||||
if (child.is_shared ())
|
||||
{
|
||||
// The child object is shared, we may be able to eliminate the overflow
|
||||
// by duplicating it.
|
||||
sorted_graph.duplicate (r.parent, r.link->objidx);
|
||||
resolution_attempted = true;
|
||||
|
||||
// Stop processing overflows for this round so that object order can be
|
||||
// updated to account for the newly added object.
|
||||
break;
|
||||
}
|
||||
|
||||
if (child.is_leaf () && !priority_bumped_parents.has (r.parent))
|
||||
{
|
||||
// This object is too far from it's parent, attempt to move it closer.
|
||||
//
|
||||
// TODO(garretrieger): initially limiting this to leaf's since they can be
|
||||
// moved closer with fewer consequences. However, this can
|
||||
// likely can be used for non-leafs as well.
|
||||
// TODO(garretrieger): add a maximum priority, don't try to raise past this.
|
||||
// TODO(garretrieger): also try lowering priority of the parent. Make it
|
||||
// get placed further up in the ordering, closer to it's children.
|
||||
// this is probably preferable if the total size of the parent object
|
||||
// is < then the total size of the children (and the parent can be moved).
|
||||
// Since in that case moving the parent will cause a smaller increase in
|
||||
// the length of other offsets.
|
||||
sorted_graph.raise_childrens_priority (r.parent);
|
||||
priority_bumped_parents.add (r.parent);
|
||||
resolution_attempted = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO(garretrieger): add additional offset resolution strategies
|
||||
// - Promotion to extension lookups.
|
||||
// - Table splitting.
|
||||
}
|
||||
|
||||
if (resolution_attempted)
|
||||
{
|
||||
sorted_graph.sort_shortest_distance ();
|
||||
continue;
|
||||
}
|
||||
|
||||
DEBUG_MSG (SUBSET_REPACK, nullptr, "No resolution available :(");
|
||||
c->err (HB_SERIALIZE_ERROR_OFFSET_OVERFLOW);
|
||||
return;
|
||||
}
|
||||
|
||||
if (sorted_graph.in_error ())
|
||||
{
|
||||
c->err (HB_SERIALIZE_ERROR_OTHER);
|
||||
return;
|
||||
}
|
||||
sorted_graph.serialize (c);
|
||||
}
|
||||
|
||||
|
||||
#endif /* HB_REPACKER_HH */
|
|
@ -105,7 +105,7 @@
|
|||
#define HB_SANITIZE_MAX_EDITS 32
|
||||
#endif
|
||||
#ifndef HB_SANITIZE_MAX_OPS_FACTOR
|
||||
#define HB_SANITIZE_MAX_OPS_FACTOR 8
|
||||
#define HB_SANITIZE_MAX_OPS_FACTOR 64
|
||||
#endif
|
||||
#ifndef HB_SANITIZE_MAX_OPS_MIN
|
||||
#define HB_SANITIZE_MAX_OPS_MIN 16384
|
||||
|
@ -233,7 +233,7 @@ struct hb_sanitize_context_t :
|
|||
(this->start <= p &&
|
||||
p <= this->end &&
|
||||
(unsigned int) (this->end - p) >= len &&
|
||||
this->max_ops-- > 0);
|
||||
(this->max_ops -= len) > 0);
|
||||
|
||||
DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
|
||||
"check_range [%p..%p]"
|
||||
|
|
|
@ -41,6 +41,16 @@
|
|||
* Serialize
|
||||
*/
|
||||
|
||||
enum hb_serialize_error_t {
|
||||
HB_SERIALIZE_ERROR_NONE = 0x00000000u,
|
||||
HB_SERIALIZE_ERROR_OTHER = 0x00000001u,
|
||||
HB_SERIALIZE_ERROR_OFFSET_OVERFLOW = 0x00000002u,
|
||||
HB_SERIALIZE_ERROR_OUT_OF_ROOM = 0x00000004u,
|
||||
HB_SERIALIZE_ERROR_INT_OVERFLOW = 0x00000008u,
|
||||
HB_SERIALIZE_ERROR_ARRAY_OVERFLOW = 0x00000010u
|
||||
};
|
||||
HB_MARK_AS_FLAG_T (hb_serialize_error_t);
|
||||
|
||||
struct hb_serialize_context_t
|
||||
{
|
||||
typedef unsigned objidx_t;
|
||||
|
@ -51,6 +61,8 @@ struct hb_serialize_context_t
|
|||
Absolute /* Absolute: from the start of the serialize buffer. */
|
||||
};
|
||||
|
||||
|
||||
|
||||
struct object_t
|
||||
{
|
||||
void fini () { links.fini (); }
|
||||
|
@ -117,30 +129,54 @@ struct hb_serialize_context_t
|
|||
object_pool.fini ();
|
||||
}
|
||||
|
||||
bool in_error () const { return !this->successful; }
|
||||
bool in_error () const { return bool (errors); }
|
||||
|
||||
bool successful () const { return !bool (errors); }
|
||||
|
||||
HB_NODISCARD bool ran_out_of_room () const { return errors & HB_SERIALIZE_ERROR_OUT_OF_ROOM; }
|
||||
HB_NODISCARD bool offset_overflow () const { return errors & HB_SERIALIZE_ERROR_OFFSET_OVERFLOW; }
|
||||
HB_NODISCARD bool only_offset_overflow () const { return errors == HB_SERIALIZE_ERROR_OFFSET_OVERFLOW; }
|
||||
|
||||
void reset (void *start_, unsigned int size)
|
||||
{
|
||||
start = (char*) start_;
|
||||
end = start + size;
|
||||
reset ();
|
||||
current = nullptr;
|
||||
}
|
||||
|
||||
void reset ()
|
||||
{
|
||||
this->successful = true;
|
||||
this->ran_out_of_room = false;
|
||||
this->errors = HB_SERIALIZE_ERROR_NONE;
|
||||
this->head = this->start;
|
||||
this->tail = this->end;
|
||||
this->debug_depth = 0;
|
||||
|
||||
fini ();
|
||||
this->packed.push (nullptr);
|
||||
this->packed_map.init ();
|
||||
}
|
||||
|
||||
bool check_success (bool success)
|
||||
{ return this->successful && (success || (err_other_error (), false)); }
|
||||
bool check_success (bool success,
|
||||
hb_serialize_error_t err_type = HB_SERIALIZE_ERROR_OTHER)
|
||||
{
|
||||
return successful ()
|
||||
&& (success || err (err_type));
|
||||
}
|
||||
|
||||
template <typename T1, typename T2>
|
||||
bool check_equal (T1 &&v1, T2 &&v2)
|
||||
{ return check_success ((long long) v1 == (long long) v2); }
|
||||
bool check_equal (T1 &&v1, T2 &&v2, hb_serialize_error_t err_type)
|
||||
{
|
||||
if ((long long) v1 != (long long) v2)
|
||||
{
|
||||
return err (err_type);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
template <typename T1, typename T2>
|
||||
bool check_assign (T1 &v1, T2 &&v2)
|
||||
{ return check_equal (v1 = v2, v2); }
|
||||
bool check_assign (T1 &v1, T2 &&v2, hb_serialize_error_t err_type)
|
||||
{ return check_equal (v1 = v2, v2, err_type); }
|
||||
|
||||
template <typename T> bool propagate_error (T &&obj)
|
||||
{ return check_success (!hb_deref (obj).in_error ()); }
|
||||
|
@ -167,12 +203,18 @@ struct hb_serialize_context_t
|
|||
"end [%p..%p] serialized %u bytes; %s",
|
||||
this->start, this->end,
|
||||
(unsigned) (this->head - this->start),
|
||||
this->successful ? "successful" : "UNSUCCESSFUL");
|
||||
successful () ? "successful" : "UNSUCCESSFUL");
|
||||
|
||||
propagate_error (packed, packed_map);
|
||||
|
||||
if (unlikely (!current)) return;
|
||||
if (unlikely (in_error())) return;
|
||||
if (unlikely (in_error()))
|
||||
{
|
||||
// Offset overflows that occur before link resolution cannot be handled
|
||||
// by repacking, so set a more general error.
|
||||
if (offset_overflow ()) err (HB_SERIALIZE_ERROR_OTHER);
|
||||
return;
|
||||
}
|
||||
|
||||
assert (!current->next);
|
||||
|
||||
|
@ -351,7 +393,7 @@ struct hb_serialize_context_t
|
|||
for (const object_t::link_t &link : parent->links)
|
||||
{
|
||||
const object_t* child = packed[link.objidx];
|
||||
if (unlikely (!child)) { err_other_error(); return; }
|
||||
if (unlikely (!child)) { err (HB_SERIALIZE_ERROR_OTHER); return; }
|
||||
unsigned offset = 0;
|
||||
switch ((whence_t) link.whence) {
|
||||
case Head: offset = child->head - parent->head; break;
|
||||
|
@ -398,19 +440,19 @@ struct hb_serialize_context_t
|
|||
Type *start_embed (const Type &obj) const
|
||||
{ return start_embed (hb_addressof (obj)); }
|
||||
|
||||
/* Following two functions exist to allow setting breakpoint on. */
|
||||
void err_ran_out_of_room () { this->ran_out_of_room = true; }
|
||||
void err_other_error () { this->successful = false; }
|
||||
bool err (hb_serialize_error_t err_type)
|
||||
{
|
||||
return !bool ((errors = (errors | err_type)));
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
Type *allocate_size (unsigned int size)
|
||||
{
|
||||
if (unlikely (!this->successful)) return nullptr;
|
||||
if (unlikely (in_error ())) return nullptr;
|
||||
|
||||
if (this->tail - this->head < ptrdiff_t (size))
|
||||
{
|
||||
err_ran_out_of_room ();
|
||||
this->successful = false;
|
||||
err (HB_SERIALIZE_ERROR_OUT_OF_ROOM);
|
||||
return nullptr;
|
||||
}
|
||||
memset (this->head, 0, size);
|
||||
|
@ -497,7 +539,7 @@ struct hb_serialize_context_t
|
|||
/* Output routines. */
|
||||
hb_bytes_t copy_bytes () const
|
||||
{
|
||||
assert (this->successful);
|
||||
assert (successful ());
|
||||
/* Copy both items from head side and tail side... */
|
||||
unsigned int len = (this->head - this->start)
|
||||
+ (this->end - this->tail);
|
||||
|
@ -520,20 +562,22 @@ struct hb_serialize_context_t
|
|||
(char *) b.arrayZ, free);
|
||||
}
|
||||
|
||||
const hb_vector_t<object_t *>& object_graph() const
|
||||
{ return packed; }
|
||||
|
||||
private:
|
||||
template <typename T>
|
||||
void assign_offset (const object_t* parent, const object_t::link_t &link, unsigned offset)
|
||||
{
|
||||
auto &off = * ((BEInt<T> *) (parent->head + link.position));
|
||||
assert (0 == off);
|
||||
check_assign (off, offset);
|
||||
check_assign (off, offset, HB_SERIALIZE_ERROR_OFFSET_OVERFLOW);
|
||||
}
|
||||
|
||||
public: /* TODO Make private. */
|
||||
char *start, *head, *tail, *end;
|
||||
unsigned int debug_depth;
|
||||
bool successful;
|
||||
bool ran_out_of_room;
|
||||
hb_serialize_error_t errors;
|
||||
|
||||
private:
|
||||
|
||||
|
@ -550,5 +594,4 @@ struct hb_serialize_context_t
|
|||
hb_hashmap_t<const object_t *, objidx_t, nullptr, 0> packed_map;
|
||||
};
|
||||
|
||||
|
||||
#endif /* HB_SERIALIZE_HH */
|
||||
|
|
|
@ -87,6 +87,8 @@ struct hb_set_digest_lowest_bits_t
|
|||
}
|
||||
}
|
||||
template <typename T>
|
||||
void add_array (const hb_array_t<const T>& arr) { add_array (&arr, arr.len ()); }
|
||||
template <typename T>
|
||||
bool add_sorted_array (const T *array, unsigned int count, unsigned int stride=sizeof(T))
|
||||
{
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
|
@ -96,6 +98,8 @@ struct hb_set_digest_lowest_bits_t
|
|||
}
|
||||
return true;
|
||||
}
|
||||
template <typename T>
|
||||
bool add_sorted_array (const hb_sorted_array_t<const T>& arr) { return add_sorted_array (&arr, arr.len ()); }
|
||||
|
||||
bool may_have (hb_codepoint_t g) const
|
||||
{ return !!(mask & mask_for (g)); }
|
||||
|
@ -135,12 +139,16 @@ struct hb_set_digest_combiner_t
|
|||
tail.add_array (array, count, stride);
|
||||
}
|
||||
template <typename T>
|
||||
void add_array (const hb_array_t<const T>& arr) { add_array (&arr, arr.len ()); }
|
||||
template <typename T>
|
||||
bool add_sorted_array (const T *array, unsigned int count, unsigned int stride=sizeof(T))
|
||||
{
|
||||
head.add_sorted_array (array, count, stride);
|
||||
tail.add_sorted_array (array, count, stride);
|
||||
return true;
|
||||
}
|
||||
template <typename T>
|
||||
bool add_sorted_array (const hb_sorted_array_t<const T>& arr) { return add_sorted_array (&arr, arr.len ()); }
|
||||
|
||||
bool may_have (hb_codepoint_t g) const
|
||||
{
|
||||
|
|
|
@ -337,6 +337,8 @@ struct hb_set_t
|
|||
while (count && (g = *array, start <= g && g < end));
|
||||
}
|
||||
}
|
||||
template <typename T>
|
||||
void add_array (const hb_array_t<const T>& arr) { add_array (&arr, arr.len ()); }
|
||||
|
||||
/* Might return false if array looks unsorted.
|
||||
* Used for faster rejection of corrupt data. */
|
||||
|
@ -368,6 +370,8 @@ struct hb_set_t
|
|||
}
|
||||
return true;
|
||||
}
|
||||
template <typename T>
|
||||
bool add_sorted_array (const hb_sorted_array_t<const T>& arr) { return add_sorted_array (&arr, arr.len ()); }
|
||||
|
||||
void del (hb_codepoint_t g)
|
||||
{
|
||||
|
@ -828,7 +832,7 @@ struct hb_set_t
|
|||
hb_codepoint_t get_max () const
|
||||
{
|
||||
unsigned int count = pages.length;
|
||||
for (int i = count - 1; i >= 0; i++)
|
||||
for (int i = count - 1; i >= 0; i--)
|
||||
if (!page_at (i).is_empty ())
|
||||
return page_map[(unsigned) i].major * page_t::PAGE_BITS + page_at (i).get_max ();
|
||||
return INVALID;
|
||||
|
|
|
@ -43,6 +43,7 @@ uint64_t const _hb_NullPool[(HB_NULL_POOL_SIZE + sizeof (uint64_t) - 1) / sizeof
|
|||
/*thread_local*/ uint64_t _hb_CrapPool[(HB_NULL_POOL_SIZE + sizeof (uint64_t) - 1) / sizeof (uint64_t)] = {};
|
||||
|
||||
DEFINE_NULL_NAMESPACE_BYTES (OT, Index) = {0xFF,0xFF};
|
||||
DEFINE_NULL_NAMESPACE_BYTES (OT, VarIdx) = {0xFF,0xFF,0xFF,0xFF};
|
||||
DEFINE_NULL_NAMESPACE_BYTES (OT, LangSys) = {0x00,0x00, 0xFF,0xFF, 0x00,0x00};
|
||||
DEFINE_NULL_NAMESPACE_BYTES (OT, RangeRecord) = {0x00,0x01, 0x00,0x00, 0x00, 0x00};
|
||||
DEFINE_NULL_NAMESPACE_BYTES (OT, CmapSubtableLongGroup) = {0x00,0x00,0x00,0x01, 0x00,0x00,0x00,0x00, 0x00,0x00,0x00,0x00};
|
||||
|
|
|
@ -56,9 +56,6 @@ hb_subset_input_create_or_fail ()
|
|||
|
||||
hb_tag_t default_drop_tables[] = {
|
||||
// Layout disabled by default
|
||||
HB_TAG ('G', 'S', 'U', 'B'),
|
||||
HB_TAG ('G', 'P', 'O', 'S'),
|
||||
HB_TAG ('G', 'D', 'E', 'F'),
|
||||
HB_TAG ('m', 'o', 'r', 'x'),
|
||||
HB_TAG ('m', 'o', 'r', 't'),
|
||||
HB_TAG ('k', 'e', 'r', 'x'),
|
||||
|
|
|
@ -39,6 +39,7 @@
|
|||
#include "hb-ot-stat-table.hh"
|
||||
|
||||
|
||||
typedef hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> script_langsys_map;
|
||||
#ifndef HB_NO_SUBSET_CFF
|
||||
static inline void
|
||||
_add_cff_seac_components (const OT::cff1::accelerator_t &cff,
|
||||
|
@ -70,7 +71,8 @@ static inline void
|
|||
_gsub_closure_glyphs_lookups_features (hb_face_t *face,
|
||||
hb_set_t *gids_to_retain,
|
||||
hb_map_t *gsub_lookups,
|
||||
hb_map_t *gsub_features)
|
||||
hb_map_t *gsub_features,
|
||||
script_langsys_map *gsub_langsys)
|
||||
{
|
||||
hb_set_t lookup_indices;
|
||||
hb_ot_layout_collect_lookups (face,
|
||||
|
@ -96,7 +98,13 @@ _gsub_closure_glyphs_lookups_features (hb_face_t *face,
|
|||
nullptr,
|
||||
nullptr,
|
||||
&feature_indices);
|
||||
|
||||
gsub->prune_features (gsub_lookups, &feature_indices);
|
||||
hb_map_t duplicate_feature_map;
|
||||
gsub->find_duplicate_features (gsub_lookups, &feature_indices, &duplicate_feature_map);
|
||||
|
||||
feature_indices.clear ();
|
||||
gsub->prune_langsys (&duplicate_feature_map, gsub_langsys, &feature_indices);
|
||||
_remap_indexes (&feature_indices, gsub_features);
|
||||
|
||||
gsub.destroy ();
|
||||
|
@ -106,7 +114,8 @@ static inline void
|
|||
_gpos_closure_lookups_features (hb_face_t *face,
|
||||
const hb_set_t *gids_to_retain,
|
||||
hb_map_t *gpos_lookups,
|
||||
hb_map_t *gpos_features)
|
||||
hb_map_t *gpos_features,
|
||||
script_langsys_map *gpos_langsys)
|
||||
{
|
||||
hb_set_t lookup_indices;
|
||||
hb_ot_layout_collect_lookups (face,
|
||||
|
@ -129,8 +138,15 @@ _gpos_closure_lookups_features (hb_face_t *face,
|
|||
nullptr,
|
||||
nullptr,
|
||||
&feature_indices);
|
||||
|
||||
gpos->prune_features (gpos_lookups, &feature_indices);
|
||||
hb_map_t duplicate_feature_map;
|
||||
gpos->find_duplicate_features (gpos_lookups, &feature_indices, &duplicate_feature_map);
|
||||
|
||||
feature_indices.clear ();
|
||||
gpos->prune_langsys (&duplicate_feature_map, gpos_langsys, &feature_indices);
|
||||
_remap_indexes (&feature_indices, gpos_features);
|
||||
|
||||
gpos.destroy ();
|
||||
}
|
||||
#endif
|
||||
|
@ -231,10 +247,10 @@ _populate_gids_to_retain (hb_subset_plan_t* plan,
|
|||
#ifndef HB_NO_SUBSET_LAYOUT
|
||||
if (close_over_gsub)
|
||||
// closure all glyphs/lookups/features needed for GSUB substitutions.
|
||||
_gsub_closure_glyphs_lookups_features (plan->source, plan->_glyphset_gsub, plan->gsub_lookups, plan->gsub_features);
|
||||
_gsub_closure_glyphs_lookups_features (plan->source, plan->_glyphset_gsub, plan->gsub_lookups, plan->gsub_features, plan->gsub_langsys);
|
||||
|
||||
if (close_over_gpos)
|
||||
_gpos_closure_lookups_features (plan->source, plan->_glyphset_gsub, plan->gpos_lookups, plan->gpos_features);
|
||||
_gpos_closure_lookups_features (plan->source, plan->_glyphset_gsub, plan->gpos_lookups, plan->gpos_features, plan->gpos_langsys);
|
||||
#endif
|
||||
_remove_invalid_gids (plan->_glyphset_gsub, plan->source->get_num_glyphs ());
|
||||
|
||||
|
@ -356,11 +372,21 @@ hb_subset_plan_create (hb_face_t *face,
|
|||
plan->reverse_glyph_map = hb_map_create ();
|
||||
plan->gsub_lookups = hb_map_create ();
|
||||
plan->gpos_lookups = hb_map_create ();
|
||||
|
||||
if (plan->check_success (plan->gsub_langsys = hb_object_create<script_langsys_map> ()))
|
||||
plan->gsub_langsys->init_shallow ();
|
||||
if (plan->check_success (plan->gpos_langsys = hb_object_create<script_langsys_map> ()))
|
||||
plan->gpos_langsys->init_shallow ();
|
||||
|
||||
plan->gsub_features = hb_map_create ();
|
||||
plan->gpos_features = hb_map_create ();
|
||||
plan->layout_variation_indices = hb_set_create ();
|
||||
plan->layout_variation_idx_map = hb_map_create ();
|
||||
|
||||
if (plan->in_error ()) {
|
||||
return plan;
|
||||
}
|
||||
|
||||
_populate_gids_to_retain (plan,
|
||||
input->unicodes,
|
||||
input->glyphs,
|
||||
|
@ -407,6 +433,25 @@ hb_subset_plan_destroy (hb_subset_plan_t *plan)
|
|||
hb_set_destroy (plan->layout_variation_indices);
|
||||
hb_map_destroy (plan->layout_variation_idx_map);
|
||||
|
||||
if (plan->gsub_langsys)
|
||||
{
|
||||
for (auto _ : plan->gsub_langsys->iter ())
|
||||
hb_set_destroy (_.second);
|
||||
|
||||
hb_object_destroy (plan->gsub_langsys);
|
||||
plan->gsub_langsys->fini_shallow ();
|
||||
free (plan->gsub_langsys);
|
||||
}
|
||||
|
||||
if (plan->gpos_langsys)
|
||||
{
|
||||
for (auto _ : plan->gpos_langsys->iter ())
|
||||
hb_set_destroy (_.second);
|
||||
|
||||
hb_object_destroy (plan->gpos_langsys);
|
||||
plan->gpos_langsys->fini_shallow ();
|
||||
free (plan->gpos_langsys);
|
||||
}
|
||||
|
||||
free (plan);
|
||||
}
|
||||
|
|
|
@ -79,7 +79,11 @@ struct hb_subset_plan_t
|
|||
hb_map_t *gsub_lookups;
|
||||
hb_map_t *gpos_lookups;
|
||||
|
||||
//active features we'd like to retain
|
||||
//active langsys we'd like to retain
|
||||
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *gsub_langsys;
|
||||
hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *gpos_langsys;
|
||||
|
||||
//active features after removing redundant langsys and prune_features
|
||||
hb_map_t *gsub_features;
|
||||
hb_map_t *gpos_features;
|
||||
|
||||
|
|
|
@ -50,6 +50,7 @@
|
|||
#include "hb-ot-layout-gpos-table.hh"
|
||||
#include "hb-ot-var-gvar-table.hh"
|
||||
#include "hb-ot-var-hvar-table.hh"
|
||||
#include "hb-repacker.hh"
|
||||
|
||||
|
||||
static unsigned
|
||||
|
@ -64,69 +65,132 @@ _plan_estimate_subset_table_size (hb_subset_plan_t *plan, unsigned table_len)
|
|||
return 512 + (unsigned) (table_len * sqrt ((double) dst_glyphs / src_glyphs));
|
||||
}
|
||||
|
||||
/*
|
||||
* Repack the serialization buffer if any offset overflows exist.
|
||||
*/
|
||||
static hb_blob_t*
|
||||
_repack (hb_tag_t tag, const hb_serialize_context_t& c)
|
||||
{
|
||||
if (tag != HB_OT_TAG_GPOS
|
||||
&& tag != HB_OT_TAG_GSUB)
|
||||
{
|
||||
// Check for overflow in a non-handled table.
|
||||
return c.successful () ? c.copy_blob () : nullptr;
|
||||
}
|
||||
|
||||
if (!c.offset_overflow ())
|
||||
return c.copy_blob ();
|
||||
|
||||
hb_vector_t<char> buf;
|
||||
int buf_size = c.end - c.start;
|
||||
if (unlikely (!buf.alloc (buf_size)))
|
||||
return nullptr;
|
||||
|
||||
hb_serialize_context_t repacked ((void *) buf, buf_size);
|
||||
hb_resolve_overflows (c.object_graph (), &repacked);
|
||||
|
||||
if (unlikely (repacked.in_error ()))
|
||||
// TODO(garretrieger): refactor so we can share the resize/retry logic with the subset
|
||||
// portion.
|
||||
return nullptr;
|
||||
|
||||
return repacked.copy_blob ();
|
||||
}
|
||||
|
||||
template<typename TableType>
|
||||
static
|
||||
bool
|
||||
_try_subset (const TableType *table,
|
||||
hb_vector_t<char>* buf,
|
||||
unsigned buf_size,
|
||||
hb_subset_context_t* c /* OUT */)
|
||||
{
|
||||
c->serializer->start_serialize<TableType> ();
|
||||
|
||||
bool needed = table->subset (c);
|
||||
if (!c->serializer->ran_out_of_room ())
|
||||
{
|
||||
c->serializer->end_serialize ();
|
||||
return needed;
|
||||
}
|
||||
|
||||
buf_size += (buf_size >> 1) + 32;
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c ran out of room; reallocating to %u bytes.",
|
||||
HB_UNTAG (c->table_tag), buf_size);
|
||||
|
||||
if (unlikely (!buf->alloc (buf_size)))
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c failed to reallocate %u bytes.",
|
||||
HB_UNTAG (c->table_tag), buf_size);
|
||||
return needed;
|
||||
}
|
||||
|
||||
c->serializer->reset (buf->arrayZ, buf_size);
|
||||
return _try_subset (table, buf, buf_size, c);
|
||||
}
|
||||
|
||||
template<typename TableType>
|
||||
static bool
|
||||
_subset (hb_subset_plan_t *plan)
|
||||
{
|
||||
bool result = false;
|
||||
hb_blob_t *source_blob = hb_sanitize_context_t ().reference_table<TableType> (plan->source);
|
||||
const TableType *table = source_blob->as<TableType> ();
|
||||
|
||||
hb_tag_t tag = TableType::tableTag;
|
||||
if (source_blob->data)
|
||||
if (!source_blob->data)
|
||||
{
|
||||
hb_vector_t<char> buf;
|
||||
/* TODO Not all tables are glyph-related. 'name' table size for example should not be
|
||||
* affected by number of glyphs. Accommodate that. */
|
||||
unsigned buf_size = _plan_estimate_subset_table_size (plan, source_blob->length);
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c initial estimated table size: %u bytes.", HB_UNTAG (tag), buf_size);
|
||||
if (unlikely (!buf.alloc (buf_size)))
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c failed to allocate %u bytes.", HB_UNTAG (tag), buf_size);
|
||||
hb_blob_destroy (source_blob);
|
||||
return false;
|
||||
}
|
||||
retry:
|
||||
hb_serialize_context_t serializer ((void *) buf, buf_size);
|
||||
serializer.start_serialize<TableType> ();
|
||||
hb_subset_context_t c (source_blob, plan, &serializer, tag);
|
||||
bool needed = table->subset (&c);
|
||||
if (serializer.ran_out_of_room)
|
||||
{
|
||||
buf_size += (buf_size >> 1) + 32;
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c ran out of room; reallocating to %u bytes.", HB_UNTAG (tag), buf_size);
|
||||
if (unlikely (!buf.alloc (buf_size)))
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c failed to reallocate %u bytes.", HB_UNTAG (tag), buf_size);
|
||||
hb_blob_destroy (source_blob);
|
||||
return false;
|
||||
}
|
||||
goto retry;
|
||||
}
|
||||
serializer.end_serialize ();
|
||||
|
||||
result = !serializer.in_error ();
|
||||
|
||||
if (result)
|
||||
{
|
||||
if (needed)
|
||||
{
|
||||
hb_blob_t *dest_blob = serializer.copy_blob ();
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c final subset table size: %u bytes.", HB_UNTAG (tag), dest_blob->length);
|
||||
result = c.plan->add_table (tag, dest_blob);
|
||||
hb_blob_destroy (dest_blob);
|
||||
}
|
||||
else
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c::subset table subsetted to empty.", HB_UNTAG (tag));
|
||||
}
|
||||
}
|
||||
DEBUG_MSG (SUBSET, nullptr,
|
||||
"OT::%c%c%c%c::subset sanitize failed on source table.", HB_UNTAG (tag));
|
||||
hb_blob_destroy (source_blob);
|
||||
return false;
|
||||
}
|
||||
else
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c::subset sanitize failed on source table.", HB_UNTAG (tag));
|
||||
|
||||
hb_vector_t<char> buf;
|
||||
/* TODO Not all tables are glyph-related. 'name' table size for example should not be
|
||||
* affected by number of glyphs. Accommodate that. */
|
||||
unsigned buf_size = _plan_estimate_subset_table_size (plan, source_blob->length);
|
||||
DEBUG_MSG (SUBSET, nullptr,
|
||||
"OT::%c%c%c%c initial estimated table size: %u bytes.", HB_UNTAG (tag), buf_size);
|
||||
if (unlikely (!buf.alloc (buf_size)))
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c failed to allocate %u bytes.", HB_UNTAG (tag), buf_size);
|
||||
hb_blob_destroy (source_blob);
|
||||
return false;
|
||||
}
|
||||
|
||||
bool needed = false;
|
||||
hb_serialize_context_t serializer (buf.arrayZ, buf_size);
|
||||
{
|
||||
hb_subset_context_t c (source_blob, plan, &serializer, tag);
|
||||
needed = _try_subset (table, &buf, buf_size, &c);
|
||||
}
|
||||
hb_blob_destroy (source_blob);
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c::subset %s", HB_UNTAG (tag), result ? "success" : "FAILED!");
|
||||
|
||||
if (serializer.in_error () && !serializer.only_offset_overflow ())
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c::subset FAILED!", HB_UNTAG (tag));
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!needed)
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c::subset table subsetted to empty.", HB_UNTAG (tag));
|
||||
return true;
|
||||
}
|
||||
|
||||
bool result = false;
|
||||
hb_blob_t *dest_blob = _repack (tag, serializer);
|
||||
if (dest_blob)
|
||||
{
|
||||
DEBUG_MSG (SUBSET, nullptr,
|
||||
"OT::%c%c%c%c final subset table size: %u bytes.",
|
||||
HB_UNTAG (tag), dest_blob->length);
|
||||
result = plan->add_table (tag, dest_blob);
|
||||
hb_blob_destroy (dest_blob);
|
||||
}
|
||||
|
||||
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c::subset %s",
|
||||
HB_UNTAG (tag), result ? "success" : "FAILED!");
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -241,8 +305,10 @@ hb_subset (hb_face_t *source, hb_subset_input_t *input)
|
|||
if (unlikely (!input || !source)) return hb_face_get_empty ();
|
||||
|
||||
hb_subset_plan_t *plan = hb_subset_plan_create (source, input);
|
||||
if (unlikely (plan->in_error ()))
|
||||
if (unlikely (plan->in_error ())) {
|
||||
hb_subset_plan_destroy (plan);
|
||||
return hb_face_get_empty ();
|
||||
}
|
||||
|
||||
hb_set_t tags_set;
|
||||
bool success = true;
|
||||
|
|
|
@ -289,8 +289,8 @@ DECLARE_NULL_INSTANCE (hb_unicode_funcs_t);
|
|||
#define HB_MODIFIED_COMBINING_CLASS_CCC15 18 /* tsere */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC16 19 /* segol */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC17 20 /* patah */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC18 21 /* qamats */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC19 14 /* holam */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC18 21 /* qamats & qamats qatan */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC19 14 /* holam & holam haser for vav*/
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC20 24 /* qubuts */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC21 12 /* dagesh */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC22 25 /* meteg */
|
||||
|
|
|
@ -177,6 +177,11 @@ struct hb_vector_t
|
|||
Type *push (T&& v)
|
||||
{
|
||||
Type *p = push ();
|
||||
if (p == &Crap (Type))
|
||||
// If push failed to allocate then don't copy v, since this may cause
|
||||
// the created copy to leak memory since we won't have stored a
|
||||
// reference to it.
|
||||
return p;
|
||||
*p = hb_forward<T> (v);
|
||||
return p;
|
||||
}
|
||||
|
|
|
@ -53,14 +53,14 @@ HB_BEGIN_DECLS
|
|||
*
|
||||
* The micro component of the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_MICRO 0
|
||||
#define HB_VERSION_MICRO 1
|
||||
|
||||
/**
|
||||
* HB_VERSION_STRING:
|
||||
*
|
||||
* A string literal containing the library version available at compile-time.
|
||||
*/
|
||||
#define HB_VERSION_STRING "2.8.0"
|
||||
#define HB_VERSION_STRING "2.8.1"
|
||||
|
||||
/**
|
||||
* HB_VERSION_ATLEAST:
|
||||
|
|
|
@ -117,6 +117,9 @@
|
|||
#pragma GCC diagnostic ignored "-Wshadow" // TODO fix
|
||||
#pragma GCC diagnostic ignored "-Wunsafe-loop-optimizations" // TODO fix
|
||||
#pragma GCC diagnostic ignored "-Wunused-parameter" // TODO fix
|
||||
#if defined(__GNUC__) && !defined(__clang__)
|
||||
#pragma GCC diagnostic ignored "-Wunused-result" // TODO fix
|
||||
#endif
|
||||
#endif
|
||||
|
||||
/* Ignored intentionally. */
|
||||
|
@ -335,7 +338,6 @@ extern "C" void hb_free_impl(void *ptr);
|
|||
#else
|
||||
# define HB_NODISCARD
|
||||
#endif
|
||||
#define hb_success_t HB_NODISCARD bool
|
||||
|
||||
/* https://github.com/harfbuzz/harfbuzz/issues/1852 */
|
||||
#if defined(__clang__) && !(defined(_AIX) && (defined(__IBMCPP__) || defined(__ibmxl__)))
|
||||
|
|
|
@ -477,6 +477,8 @@ if get_option('tests').enabled()
|
|||
compiled_tests = {
|
||||
'test-algs': ['test-algs.cc', 'hb-static.cc'],
|
||||
'test-array': ['test-array.cc'],
|
||||
'test-repacker': ['test-repacker.cc', 'hb-static.cc'],
|
||||
'test-priority-queue': ['test-priority-queue.cc', 'hb-static.cc'],
|
||||
'test-iter': ['test-iter.cc', 'hb-static.cc'],
|
||||
'test-meta': ['test-meta.cc', 'hb-static.cc'],
|
||||
'test-number': ['test-number.cc', 'hb-number.cc'],
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
/*
|
||||
* Copyright © 2020 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Garret Rieger
|
||||
*/
|
||||
|
||||
#include "hb.hh"
|
||||
#include "hb-priority-queue.hh"
|
||||
|
||||
static void
|
||||
test_insert ()
|
||||
{
|
||||
hb_priority_queue_t queue;
|
||||
assert (queue.is_empty ());
|
||||
|
||||
queue.insert (10, 0);
|
||||
assert (!queue.is_empty ());
|
||||
assert (queue.minimum () == hb_pair (10, 0));
|
||||
|
||||
queue.insert (20, 1);
|
||||
assert (queue.minimum () == hb_pair (10, 0));
|
||||
|
||||
queue.insert (5, 2);
|
||||
assert (queue.minimum () == hb_pair (5, 2));
|
||||
|
||||
queue.insert (15, 3);
|
||||
assert (queue.minimum () == hb_pair (5, 2));
|
||||
|
||||
queue.insert (1, 4);
|
||||
assert (queue.minimum () == hb_pair (1, 4));
|
||||
}
|
||||
|
||||
static void
|
||||
test_extract ()
|
||||
{
|
||||
hb_priority_queue_t queue;
|
||||
queue.insert (0, 0);
|
||||
queue.insert (60, 6);
|
||||
queue.insert (30, 3);
|
||||
queue.insert (40 ,4);
|
||||
queue.insert (20, 2);
|
||||
queue.insert (50, 5);
|
||||
queue.insert (70, 7);
|
||||
queue.insert (10, 1);
|
||||
|
||||
for (int i = 0; i < 8; i++)
|
||||
{
|
||||
assert (!queue.is_empty ());
|
||||
assert (queue.minimum () == hb_pair (i * 10, i));
|
||||
assert (queue.pop_minimum () == hb_pair (i * 10, i));
|
||||
}
|
||||
|
||||
assert (queue.is_empty ());
|
||||
}
|
||||
|
||||
static void
|
||||
test_extract_empty ()
|
||||
{
|
||||
hb_priority_queue_t queue;
|
||||
assert (queue.pop_minimum () == hb_pair (0, 0));
|
||||
}
|
||||
|
||||
int
|
||||
main (int argc, char **argv)
|
||||
{
|
||||
test_insert ();
|
||||
test_extract ();
|
||||
test_extract_empty ();
|
||||
}
|
|
@ -0,0 +1,485 @@
|
|||
/*
|
||||
* Copyright © 2020 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Garret Rieger
|
||||
*/
|
||||
|
||||
#include <string>
|
||||
|
||||
#include "hb-repacker.hh"
|
||||
#include "hb-open-type.hh"
|
||||
|
||||
static void start_object(const char* tag,
|
||||
unsigned len,
|
||||
hb_serialize_context_t* c)
|
||||
{
|
||||
c->push ();
|
||||
char* obj = c->allocate_size<char> (len);
|
||||
strncpy (obj, tag, len);
|
||||
}
|
||||
|
||||
|
||||
static unsigned add_object(const char* tag,
|
||||
unsigned len,
|
||||
hb_serialize_context_t* c)
|
||||
{
|
||||
start_object (tag, len, c);
|
||||
return c->pop_pack (false);
|
||||
}
|
||||
|
||||
|
||||
static void add_offset (unsigned id,
|
||||
hb_serialize_context_t* c)
|
||||
{
|
||||
OT::Offset16* offset = c->start_embed<OT::Offset16> ();
|
||||
c->extend_min (offset);
|
||||
c->add_link (*offset, id);
|
||||
}
|
||||
|
||||
static void
|
||||
populate_serializer_simple (hb_serialize_context_t* c)
|
||||
{
|
||||
c->start_serialize<char> ();
|
||||
|
||||
unsigned obj_1 = add_object ("ghi", 3, c);
|
||||
unsigned obj_2 = add_object ("def", 3, c);
|
||||
|
||||
start_object ("abc", 3, c);
|
||||
add_offset (obj_2, c);
|
||||
add_offset (obj_1, c);
|
||||
c->pop_pack ();
|
||||
|
||||
c->end_serialize();
|
||||
}
|
||||
|
||||
static void
|
||||
populate_serializer_with_overflow (hb_serialize_context_t* c)
|
||||
{
|
||||
std::string large_string(50000, 'a');
|
||||
c->start_serialize<char> ();
|
||||
|
||||
unsigned obj_1 = add_object (large_string.c_str(), 10000, c);
|
||||
unsigned obj_2 = add_object (large_string.c_str(), 20000, c);
|
||||
unsigned obj_3 = add_object (large_string.c_str(), 50000, c);
|
||||
|
||||
start_object ("abc", 3, c);
|
||||
add_offset (obj_3, c);
|
||||
add_offset (obj_2, c);
|
||||
add_offset (obj_1, c);
|
||||
c->pop_pack ();
|
||||
|
||||
c->end_serialize();
|
||||
}
|
||||
|
||||
static void
|
||||
populate_serializer_with_dedup_overflow (hb_serialize_context_t* c)
|
||||
{
|
||||
std::string large_string(70000, 'a');
|
||||
c->start_serialize<char> ();
|
||||
|
||||
unsigned obj_1 = add_object ("def", 3, c);
|
||||
|
||||
start_object (large_string.c_str(), 60000, c);
|
||||
add_offset (obj_1, c);
|
||||
unsigned obj_2 = c->pop_pack (false);
|
||||
|
||||
start_object (large_string.c_str(), 10000, c);
|
||||
add_offset (obj_2, c);
|
||||
add_offset (obj_1, c);
|
||||
c->pop_pack (false);
|
||||
|
||||
c->end_serialize();
|
||||
}
|
||||
|
||||
static void
|
||||
populate_serializer_complex_1 (hb_serialize_context_t* c)
|
||||
{
|
||||
c->start_serialize<char> ();
|
||||
|
||||
unsigned obj_4 = add_object ("jkl", 3, c);
|
||||
unsigned obj_3 = add_object ("ghi", 3, c);
|
||||
|
||||
start_object ("def", 3, c);
|
||||
add_offset (obj_3, c);
|
||||
unsigned obj_2 = c->pop_pack (false);
|
||||
|
||||
start_object ("abc", 3, c);
|
||||
add_offset (obj_2, c);
|
||||
add_offset (obj_4, c);
|
||||
c->pop_pack ();
|
||||
|
||||
c->end_serialize();
|
||||
}
|
||||
|
||||
static void
|
||||
populate_serializer_complex_2 (hb_serialize_context_t* c)
|
||||
{
|
||||
c->start_serialize<char> ();
|
||||
|
||||
unsigned obj_5 = add_object ("mn", 2, c);
|
||||
|
||||
unsigned obj_4 = add_object ("jkl", 3, c);
|
||||
|
||||
start_object ("ghi", 3, c);
|
||||
add_offset (obj_4, c);
|
||||
unsigned obj_3 = c->pop_pack (false);
|
||||
|
||||
start_object ("def", 3, c);
|
||||
add_offset (obj_3, c);
|
||||
unsigned obj_2 = c->pop_pack (false);
|
||||
|
||||
start_object ("abc", 3, c);
|
||||
add_offset (obj_2, c);
|
||||
add_offset (obj_4, c);
|
||||
add_offset (obj_5, c);
|
||||
c->pop_pack ();
|
||||
|
||||
c->end_serialize();
|
||||
}
|
||||
|
||||
static void
|
||||
populate_serializer_complex_3 (hb_serialize_context_t* c)
|
||||
{
|
||||
c->start_serialize<char> ();
|
||||
|
||||
unsigned obj_6 = add_object ("opqrst", 6, c);
|
||||
|
||||
unsigned obj_5 = add_object ("mn", 2, c);
|
||||
|
||||
start_object ("jkl", 3, c);
|
||||
add_offset (obj_6, c);
|
||||
unsigned obj_4 = c->pop_pack (false);
|
||||
|
||||
start_object ("ghi", 3, c);
|
||||
add_offset (obj_4, c);
|
||||
unsigned obj_3 = c->pop_pack (false);
|
||||
|
||||
start_object ("def", 3, c);
|
||||
add_offset (obj_3, c);
|
||||
unsigned obj_2 = c->pop_pack (false);
|
||||
|
||||
start_object ("abc", 3, c);
|
||||
add_offset (obj_2, c);
|
||||
add_offset (obj_4, c);
|
||||
add_offset (obj_5, c);
|
||||
c->pop_pack ();
|
||||
|
||||
c->end_serialize();
|
||||
}
|
||||
|
||||
static void test_sort_kahn_1 ()
|
||||
{
|
||||
size_t buffer_size = 100;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_complex_1 (&c);
|
||||
|
||||
graph_t graph (c.object_graph ());
|
||||
graph.sort_kahn ();
|
||||
|
||||
assert(strncmp (graph.object (3).head, "abc", 3) == 0);
|
||||
assert(graph.object (3).links.length == 2);
|
||||
assert(graph.object (3).links[0].objidx == 2);
|
||||
assert(graph.object (3).links[1].objidx == 1);
|
||||
|
||||
assert(strncmp (graph.object (2).head, "def", 3) == 0);
|
||||
assert(graph.object (2).links.length == 1);
|
||||
assert(graph.object (2).links[0].objidx == 0);
|
||||
|
||||
assert(strncmp (graph.object (1).head, "jkl", 3) == 0);
|
||||
assert(graph.object (1).links.length == 0);
|
||||
|
||||
assert(strncmp (graph.object (0).head, "ghi", 3) == 0);
|
||||
assert(graph.object (0).links.length == 0);
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
||||
static void test_sort_kahn_2 ()
|
||||
{
|
||||
size_t buffer_size = 100;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_complex_2 (&c);
|
||||
|
||||
graph_t graph (c.object_graph ());
|
||||
graph.sort_kahn ();
|
||||
|
||||
|
||||
assert(strncmp (graph.object (4).head, "abc", 3) == 0);
|
||||
assert(graph.object (4).links.length == 3);
|
||||
assert(graph.object (4).links[0].objidx == 3);
|
||||
assert(graph.object (4).links[1].objidx == 0);
|
||||
assert(graph.object (4).links[2].objidx == 2);
|
||||
|
||||
assert(strncmp (graph.object (3).head, "def", 3) == 0);
|
||||
assert(graph.object (3).links.length == 1);
|
||||
assert(graph.object (3).links[0].objidx == 1);
|
||||
|
||||
assert(strncmp (graph.object (2).head, "mn", 2) == 0);
|
||||
assert(graph.object (2).links.length == 0);
|
||||
|
||||
assert(strncmp (graph.object (1).head, "ghi", 3) == 0);
|
||||
assert(graph.object (1).links.length == 1);
|
||||
assert(graph.object (1).links[0].objidx == 0);
|
||||
|
||||
assert(strncmp (graph.object (0).head, "jkl", 3) == 0);
|
||||
assert(graph.object (0).links.length == 0);
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
||||
static void test_sort_shortest ()
|
||||
{
|
||||
size_t buffer_size = 100;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_complex_2 (&c);
|
||||
|
||||
graph_t graph (c.object_graph ());
|
||||
graph.sort_shortest_distance ();
|
||||
|
||||
assert(strncmp (graph.object (4).head, "abc", 3) == 0);
|
||||
assert(graph.object (4).links.length == 3);
|
||||
assert(graph.object (4).links[0].objidx == 2);
|
||||
assert(graph.object (4).links[1].objidx == 0);
|
||||
assert(graph.object (4).links[2].objidx == 3);
|
||||
|
||||
assert(strncmp (graph.object (3).head, "mn", 2) == 0);
|
||||
assert(graph.object (3).links.length == 0);
|
||||
|
||||
assert(strncmp (graph.object (2).head, "def", 3) == 0);
|
||||
assert(graph.object (2).links.length == 1);
|
||||
assert(graph.object (2).links[0].objidx == 1);
|
||||
|
||||
assert(strncmp (graph.object (1).head, "ghi", 3) == 0);
|
||||
assert(graph.object (1).links.length == 1);
|
||||
assert(graph.object (1).links[0].objidx == 0);
|
||||
|
||||
assert(strncmp (graph.object (0).head, "jkl", 3) == 0);
|
||||
assert(graph.object (0).links.length == 0);
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
||||
static void test_duplicate_leaf ()
|
||||
{
|
||||
size_t buffer_size = 100;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_complex_2 (&c);
|
||||
|
||||
graph_t graph (c.object_graph ());
|
||||
graph.duplicate (4, 1);
|
||||
|
||||
assert(strncmp (graph.object (5).head, "abc", 3) == 0);
|
||||
assert(graph.object (5).links.length == 3);
|
||||
assert(graph.object (5).links[0].objidx == 3);
|
||||
assert(graph.object (5).links[1].objidx == 4);
|
||||
assert(graph.object (5).links[2].objidx == 0);
|
||||
|
||||
assert(strncmp (graph.object (4).head, "jkl", 3) == 0);
|
||||
assert(graph.object (4).links.length == 0);
|
||||
|
||||
assert(strncmp (graph.object (3).head, "def", 3) == 0);
|
||||
assert(graph.object (3).links.length == 1);
|
||||
assert(graph.object (3).links[0].objidx == 2);
|
||||
|
||||
assert(strncmp (graph.object (2).head, "ghi", 3) == 0);
|
||||
assert(graph.object (2).links.length == 1);
|
||||
assert(graph.object (2).links[0].objidx == 1);
|
||||
|
||||
assert(strncmp (graph.object (1).head, "jkl", 3) == 0);
|
||||
assert(graph.object (1).links.length == 0);
|
||||
|
||||
assert(strncmp (graph.object (0).head, "mn", 2) == 0);
|
||||
assert(graph.object (0).links.length == 0);
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
||||
static void test_duplicate_interior ()
|
||||
{
|
||||
size_t buffer_size = 100;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_complex_3 (&c);
|
||||
|
||||
graph_t graph (c.object_graph ());
|
||||
graph.duplicate (3, 2);
|
||||
|
||||
assert(strncmp (graph.object (6).head, "abc", 3) == 0);
|
||||
assert(graph.object (6).links.length == 3);
|
||||
assert(graph.object (6).links[0].objidx == 4);
|
||||
assert(graph.object (6).links[1].objidx == 2);
|
||||
assert(graph.object (6).links[2].objidx == 1);
|
||||
|
||||
assert(strncmp (graph.object (5).head, "jkl", 3) == 0);
|
||||
assert(graph.object (5).links.length == 1);
|
||||
assert(graph.object (5).links[0].objidx == 0);
|
||||
|
||||
assert(strncmp (graph.object (4).head, "def", 3) == 0);
|
||||
assert(graph.object (4).links.length == 1);
|
||||
assert(graph.object (4).links[0].objidx == 3);
|
||||
|
||||
assert(strncmp (graph.object (3).head, "ghi", 3) == 0);
|
||||
assert(graph.object (3).links.length == 1);
|
||||
assert(graph.object (3).links[0].objidx == 5);
|
||||
|
||||
assert(strncmp (graph.object (2).head, "jkl", 3) == 0);
|
||||
assert(graph.object (2).links.length == 1);
|
||||
assert(graph.object (2).links[0].objidx == 0);
|
||||
|
||||
assert(strncmp (graph.object (1).head, "mn", 2) == 0);
|
||||
assert(graph.object (1).links.length == 0);
|
||||
|
||||
assert(strncmp (graph.object (0).head, "opqrst", 6) == 0);
|
||||
assert(graph.object (0).links.length == 0);
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
||||
static void
|
||||
test_serialize ()
|
||||
{
|
||||
size_t buffer_size = 100;
|
||||
void* buffer_1 = malloc (buffer_size);
|
||||
hb_serialize_context_t c1 (buffer_1, buffer_size);
|
||||
populate_serializer_simple (&c1);
|
||||
hb_bytes_t expected = c1.copy_bytes ();
|
||||
|
||||
void* buffer_2 = malloc (buffer_size);
|
||||
hb_serialize_context_t c2 (buffer_2, buffer_size);
|
||||
|
||||
graph_t graph (c1.object_graph ());
|
||||
graph.serialize (&c2);
|
||||
hb_bytes_t actual = c2.copy_bytes ();
|
||||
|
||||
assert (actual == expected);
|
||||
|
||||
actual.free ();
|
||||
expected.free ();
|
||||
free (buffer_1);
|
||||
free (buffer_2);
|
||||
}
|
||||
|
||||
static void test_will_overflow_1 ()
|
||||
{
|
||||
size_t buffer_size = 100;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_complex_2 (&c);
|
||||
graph_t graph (c.object_graph ());
|
||||
|
||||
assert (!graph.will_overflow (nullptr));
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
||||
static void test_will_overflow_2 ()
|
||||
{
|
||||
size_t buffer_size = 160000;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_with_overflow (&c);
|
||||
graph_t graph (c.object_graph ());
|
||||
|
||||
assert (graph.will_overflow (nullptr));
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
||||
static void test_will_overflow_3 ()
|
||||
{
|
||||
size_t buffer_size = 160000;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_with_dedup_overflow (&c);
|
||||
graph_t graph (c.object_graph ());
|
||||
|
||||
assert (graph.will_overflow (nullptr));
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
||||
static void test_resolve_overflows_via_sort ()
|
||||
{
|
||||
size_t buffer_size = 160000;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_with_overflow (&c);
|
||||
graph_t graph (c.object_graph ());
|
||||
|
||||
void* out_buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t out (out_buffer, buffer_size);
|
||||
|
||||
hb_resolve_overflows (c.object_graph (), &out);
|
||||
assert (!out.offset_overflow ());
|
||||
hb_bytes_t result = out.copy_bytes ();
|
||||
assert (result.length == (80000 + 3 + 3 * 2));
|
||||
|
||||
result.free ();
|
||||
free (buffer);
|
||||
free (out_buffer);
|
||||
}
|
||||
|
||||
static void test_resolve_overflows_via_duplication ()
|
||||
{
|
||||
size_t buffer_size = 160000;
|
||||
void* buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t c (buffer, buffer_size);
|
||||
populate_serializer_with_dedup_overflow (&c);
|
||||
graph_t graph (c.object_graph ());
|
||||
|
||||
void* out_buffer = malloc (buffer_size);
|
||||
hb_serialize_context_t out (out_buffer, buffer_size);
|
||||
|
||||
hb_resolve_overflows (c.object_graph (), &out);
|
||||
assert (!out.offset_overflow ());
|
||||
hb_bytes_t result = out.copy_bytes ();
|
||||
assert (result.length == (10000 + 2 * 2 + 60000 + 2 + 3 * 2));
|
||||
|
||||
result.free ();
|
||||
free (buffer);
|
||||
free (out_buffer);
|
||||
}
|
||||
|
||||
// TODO(garretrieger): update will_overflow tests to check the overflows array.
|
||||
// TODO(garretrieger): add a test(s) using a real font.
|
||||
// TODO(garretrieger): add tests for priority raising.
|
||||
|
||||
int
|
||||
main (int argc, char **argv)
|
||||
{
|
||||
test_serialize ();
|
||||
test_sort_kahn_1 ();
|
||||
test_sort_kahn_2 ();
|
||||
test_sort_shortest ();
|
||||
test_will_overflow_1 ();
|
||||
test_will_overflow_2 ();
|
||||
test_will_overflow_3 ();
|
||||
test_resolve_overflows_via_sort ();
|
||||
test_resolve_overflows_via_duplication ();
|
||||
test_duplicate_leaf ();
|
||||
test_duplicate_interior ();
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
MY_TEMP_DIR=`mktemp -d -t harfbuzz_update.XXXXXX` || exit 1
|
||||
|
||||
VERSION=2.8.0
|
||||
VERSION=2.8.1
|
||||
|
||||
git clone https://github.com/harfbuzz/harfbuzz ${MY_TEMP_DIR}/harfbuzz
|
||||
git -C ${MY_TEMP_DIR}/harfbuzz checkout ${VERSION}
|
||||
|
|
Загрузка…
Ссылка в новой задаче