Merge branch 'master' into cff-subset

pull/1113/head
Michiharu Ariza 6 years ago
commit 26c291aaa0
  1. 1
      .circleci/config.yml
  2. 2
      src/Makefile.sources
  3. 184
      src/hb-aat-layout-common.hh
  4. 416
      src/hb-aat-layout-just-table.hh
  5. 508
      src/hb-aat-layout-kerx-table.hh
  6. 71
      src/hb-aat-layout-morx-table.hh
  7. 31
      src/hb-aat-layout.cc
  8. 12
      src/hb-aat-layout.hh
  9. 139
      src/hb-kern.hh
  10. 11
      src/hb-open-type.hh
  11. 654
      src/hb-ot-kern-table.hh
  12. 60
      src/hb-ot-layout-gpos-table.hh
  13. 70
      src/hb-ot-layout.cc
  14. 24
      src/hb-ot-layout.hh
  15. 13
      src/hb-ot-shape-fallback.cc
  16. 134
      src/hb-ot-shape.cc
  17. 2
      src/hb-ot-shape.hh
  18. 24
      test/fuzzing/hb-shape-fuzzer.cc

@ -225,6 +225,7 @@ jobs:
crosscompile-notest-djgpp:
docker:
# https://gist.github.com/ebraminio/8551fc74f27951e668102baa2f6b1175
- image: quay.io/ebraminio/djgpp
steps:
- checkout

@ -16,6 +16,7 @@ HB_BASE_sources = \
hb-font.hh \
hb-font.cc \
hb-iter.hh \
hb-kern.hh \
hb-map.hh \
hb-map.cc \
hb-machinery.hh \
@ -104,6 +105,7 @@ HB_OT_sources = \
hb-aat-layout-ankr-table.hh \
hb-aat-layout-bsln-table.hh \
hb-aat-layout-feat-table.hh \
hb-aat-layout-just-table.hh \
hb-aat-layout-kerx-table.hh \
hb-aat-layout-morx-table.hh \
hb-aat-layout-trak-table.hh \

@ -58,6 +58,11 @@ struct LookupFormat0
TRACE_SANITIZE (this);
return_trace (arrayZ.sanitize (c, c->get_num_glyphs ()));
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (arrayZ.sanitize (c, c->get_num_glyphs (), base));
}
protected:
HBUINT16 format; /* Format identifier--format = 0 */
@ -80,6 +85,11 @@ struct LookupSegmentSingle
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && value.sanitize (c));
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && value.sanitize (c, base));
}
GlyphID last; /* Last GlyphID in this segment */
GlyphID first; /* First GlyphID in this segment */
@ -105,6 +115,11 @@ struct LookupFormat2
TRACE_SANITIZE (this);
return_trace (segments.sanitize (c));
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (segments.sanitize (c, base));
}
protected:
HBUINT16 format; /* Format identifier--format = 2 */
@ -135,6 +150,14 @@ struct LookupSegmentArray
first <= last &&
valuesZ.sanitize (c, base, last - first + 1));
}
template <typename T2>
inline bool sanitize (hb_sanitize_context_t *c, const void *base, T2 user_data) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
first <= last &&
valuesZ.sanitize (c, base, last - first + 1, user_data));
}
GlyphID last; /* Last GlyphID in this segment */
GlyphID first; /* First GlyphID in this segment */
@ -162,6 +185,11 @@ struct LookupFormat4
TRACE_SANITIZE (this);
return_trace (segments.sanitize (c, this));
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (segments.sanitize (c, this, base));
}
protected:
HBUINT16 format; /* Format identifier--format = 4 */
@ -183,6 +211,11 @@ struct LookupSingle
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && value.sanitize (c));
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && value.sanitize (c, base));
}
GlyphID glyph; /* Last GlyphID */
T value; /* The lookup value (only one) */
@ -207,6 +240,11 @@ struct LookupFormat6
TRACE_SANITIZE (this);
return_trace (entries.sanitize (c));
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (entries.sanitize (c, base));
}
protected:
HBUINT16 format; /* Format identifier--format = 6 */
@ -233,6 +271,11 @@ struct LookupFormat8
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount));
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount, base));
}
protected:
HBUINT16 format; /* Format identifier--format = 8 */
@ -328,6 +371,20 @@ struct Lookup
default:return_trace (true);
}
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format) {
case 0: return_trace (u.format0.sanitize (c, base));
case 2: return_trace (u.format2.sanitize (c, base));
case 4: return_trace (u.format4.sanitize (c, base));
case 6: return_trace (u.format6.sanitize (c, base));
case 8: return_trace (u.format8.sanitize (c, base));
case 10: return_trace (false); /* No need to support format10 apparently */
default:return_trace (true);
}
}
protected:
union {
@ -365,7 +422,7 @@ namespace AAT {
enum { DELETED_GLYPH = 0xFFFF };
/*
* Extended State Table
* (Extended) State Table
*/
template <typename T>
@ -430,13 +487,13 @@ struct StateTable
CLASS_END_OF_LINE = 3,
};
inline unsigned int new_state (unsigned int newState) const
{ return Types::extended ? newState : (newState - stateArrayTable) / nClasses; }
inline int new_state (unsigned int newState) const
{ return Types::extended ? newState : ((int) newState - (int) stateArrayTable) / nClasses; }
inline unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
{
if (unlikely (glyph_id == DELETED_GLYPH)) return CLASS_DELETED_GLYPH;
return (this+classTable).get_class (glyph_id, num_glyphs);
return (this+classTable).get_class (glyph_id, num_glyphs, 1);
}
inline const Entry<Extra> *get_entries () const
@ -444,7 +501,7 @@ struct StateTable
return (this+entryTable).arrayZ;
}
inline const Entry<Extra> *get_entryZ (unsigned int state, unsigned int klass) const
inline const Entry<Extra> *get_entryZ (int state, unsigned int klass) const
{
if (unlikely (klass >= nClasses)) return nullptr;
@ -452,6 +509,7 @@ struct StateTable
const Entry<Extra> *entries = (this+entryTable).arrayZ;
unsigned int entry = states[state * nClasses + klass];
DEBUG_MSG (APPLY, nullptr, "e%u", entry);
return &entries[entry];
}
@ -467,28 +525,69 @@ struct StateTable
const Entry<Extra> *entries = (this+entryTable).arrayZ;
unsigned int num_classes = nClasses;
if (unlikely (hb_unsigned_mul_overflows (num_classes, states[0].static_size)))
return_trace (false);
unsigned int row_stride = num_classes * states[0].static_size;
unsigned int num_states = 1;
/* Apple 'kern' table has this peculiarity:
*
* "Because the stateTableOffset in the state table header is (strictly
* speaking) redundant, some 'kern' tables use it to record an initial
* state where that should not be StartOfText. To determine if this is
* done, calculate what the stateTableOffset should be. If it's different
* from the actual stateTableOffset, use it as the initial state."
*
* We implement this by calling the initial state zero, but allow *negative*
* states if the start state indeed was not the first state. Since the code
* is shared, this will also apply to 'mort' table. The 'kerx' / 'morx'
* tables are not affected since those address states by index, not offset.
*/
int min_state = 0;
int max_state = 0;
unsigned int num_entries = 0;
unsigned int state = 0;
int state_pos = 0;
int state_neg = 0;
unsigned int entry = 0;
while (state < num_states)
while (min_state < state_neg || state_pos <= max_state)
{
if (unlikely (hb_unsigned_mul_overflows (num_classes, states[0].static_size)))
return_trace (false);
if (min_state < state_neg)
{
/* Negative states. */
if (unlikely (hb_unsigned_mul_overflows (min_state, num_classes)))
return_trace (false);
if (unlikely (!c->check_array (&states[min_state * num_classes], -min_state, row_stride)))
return_trace (false);
if ((c->max_ops -= state_neg - min_state) < 0)
return_trace (false);
{ /* Sweep new states. */
const HBUSHORT *stop = &states[min_state * num_classes];
if (unlikely (stop > states))
return_trace (false);
for (const HBUSHORT *p = states; stop < p; p--)
num_entries = MAX<unsigned int> (num_entries, *(p - 1) + 1);
state_neg = min_state;
}
}
if (unlikely (!c->check_array (states,
num_states,
num_classes * states[0].static_size)))
return_trace (false);
if ((c->max_ops -= num_states - state) < 0)
return_trace (false);
{ /* Sweep new states. */
const HBUSHORT *stop = &states[num_states * num_classes];
for (const HBUSHORT *p = &states[state * num_classes]; p < stop; p++)
num_entries = MAX<unsigned int> (num_entries, *p + 1);
state = num_states;
if (state_pos <= max_state)
{
/* Positive states. */
if (unlikely (!c->check_array (states, max_state + 1, row_stride)))
return_trace (false);
if ((c->max_ops -= max_state - state_pos + 1) < 0)
return_trace (false);
{ /* Sweep new states. */
if (unlikely (hb_unsigned_mul_overflows ((max_state + 1), num_classes)))
return_trace (false);
const HBUSHORT *stop = &states[(max_state + 1) * num_classes];
if (unlikely (stop < states))
return_trace (false);
for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++)
num_entries = MAX<unsigned int> (num_entries, *p + 1);
state_pos = max_state + 1;
}
}
if (unlikely (!c->check_array (entries, num_entries)))
@ -499,8 +598,9 @@ struct StateTable
const Entry<Extra> *stop = &entries[num_entries];
for (const Entry<Extra> *p = &entries[entry]; p < stop; p++)
{
unsigned int newState = new_state (p->newState);
num_states = MAX<unsigned int> (num_states, newState + 1);
int newState = new_state (p->newState);
min_state = MIN (min_state, newState);
max_state = MAX (max_state, newState);
}
entry = num_entries;
}
@ -528,36 +628,36 @@ struct StateTable
struct ClassTable
{
inline unsigned int get_class (hb_codepoint_t glyph_id) const
inline unsigned int get_class (hb_codepoint_t glyph_id, unsigned int outOfRange) const
{
return firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount ? classArrayZ[glyph_id - firstGlyph] : 1;
unsigned int i = glyph_id - firstGlyph;
return i >= classArray.len ? outOfRange : classArray.arrayZ[i];
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && classArrayZ.sanitize (c, glyphCount));
return_trace (c->check_struct (this) && classArray.sanitize (c));
}
protected:
GlyphID firstGlyph; /* First glyph index included in the trimmed array. */
HBUINT16 glyphCount; /* Total number of glyphs (equivalent to the last
* glyph minus the value of firstGlyph plus 1). */
UnsizedArrayOf<HBUINT8>
classArrayZ; /* The class codes (indexed by glyph index minus
* firstGlyph). */
GlyphID firstGlyph; /* First glyph index included in the trimmed array. */
ArrayOf<HBUINT8> classArray; /* The class codes (indexed by glyph index minus
* firstGlyph). */
public:
DEFINE_SIZE_ARRAY (4, classArrayZ);
DEFINE_SIZE_ARRAY (4, classArray);
};
struct MortTypes
struct ObsoleteTypes
{
static const bool extended = false;
typedef HBUINT16 HBUINT;
typedef HBUINT8 HBUSHORT;
struct ClassType : ClassTable
{
inline unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs HB_UNUSED) const
inline unsigned int get_class (hb_codepoint_t glyph_id,
unsigned int num_glyphs HB_UNUSED,
unsigned int outOfRange) const
{
return ClassTable::get_class (glyph_id);
return ClassTable::get_class (glyph_id, outOfRange);
}
};
template <typename T>
@ -575,17 +675,19 @@ struct MortTypes
return offsetToIndex (2 * offset, base, array);
}
};
struct MorxTypes
struct ExtendedTypes
{
static const bool extended = true;
typedef HBUINT32 HBUINT;
typedef HBUINT16 HBUSHORT;
struct ClassType : Lookup<HBUINT16>
{
inline unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
inline unsigned int get_class (hb_codepoint_t glyph_id,
unsigned int num_glyphs,
unsigned int outOfRange) const
{
const HBUINT16 *v = get_value (glyph_id, num_glyphs);
return v ? *v : 1;
return v ? *v : outOfRange;
}
};
template <typename T>
@ -620,13 +722,14 @@ struct StateTableDriver
if (!c->in_place)
buffer->clear_output ();
unsigned int state = StateTable<Types, EntryData>::STATE_START_OF_TEXT;
int state = StateTable<Types, EntryData>::STATE_START_OF_TEXT;
bool last_was_dont_advance = false;
for (buffer->idx = 0; buffer->successful;)
{
unsigned int klass = buffer->idx < buffer->len ?
machine.get_class (buffer->info[buffer->idx].codepoint, num_glyphs) :
(unsigned) StateTable<Types, EntryData>::CLASS_END_OF_TEXT;
DEBUG_MSG (APPLY, nullptr, "c%u at %u", klass, buffer->idx);
const Entry<EntryData> *entry = machine.get_entryZ (state, klass);
if (unlikely (!entry))
break;
@ -659,6 +762,7 @@ struct StateTableDriver
last_was_dont_advance = (entry->flags & context_t::DontAdvance) && buffer->max_ops-- > 0;
state = machine.new_state (entry->newState);
DEBUG_MSG (APPLY, nullptr, "s%d", state);
if (buffer->idx == buffer->len)
break;

@ -0,0 +1,416 @@
/*
* Copyright © 2018 Ebrahim Byagowi
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*/
#ifndef HB_AAT_LAYOUT_JUST_TABLE_HH
#define HB_AAT_LAYOUT_JUST_TABLE_HH
#include "hb-aat-layout-common.hh"
#include "hb-ot-layout.hh"
#include "hb-open-type.hh"
#include "hb-aat-layout-morx-table.hh"
/*
* just -- Justification
* https://developer.apple.com/fonts/TrueType-Reference-Manual/RM06/Chap6just.html
*/
#define HB_AAT_TAG_just HB_TAG('j','u','s','t')
namespace AAT {
using namespace OT;
struct ActionSubrecordHeader
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)));
}
HBUINT16 actionClass; /* The JustClass value associated with this
* ActionSubrecord. */
HBUINT16 actionType; /* The type of postcompensation action. */
HBUINT16 actionLength; /* Length of this ActionSubrecord record, which
* must be a multiple of 4. */
public:
DEFINE_SIZE_STATIC (6);
};
struct DecompositionAction
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)));
}
ActionSubrecordHeader
header;
Fixed lowerLimit; /* If the distance factor is less than this value,
* then the ligature is decomposed. */
Fixed upperLimit; /* If the distance factor is greater than this value,
* then the ligature is decomposed. */
HBUINT16 order; /* Numerical order in which this ligature will
* be decomposed; you may want infrequent ligatures
* to decompose before more frequent ones. The ligatures
* on the line of text will decompose in increasing
* value of this field. */
ArrayOf<HBUINT16>
decomposedglyphs;
/* Number of 16-bit glyph indexes that follow;
* the ligature will be decomposed into these glyphs.
*
* Array of decomposed glyphs. */
public:
DEFINE_SIZE_ARRAY (18, decomposedglyphs);
};
struct UnconditionalAddGlyphAction
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
protected:
ActionSubrecordHeader
header;
GlyphID addGlyph; /* Glyph that should be added if the distance factor
* is growing. */
public:
DEFINE_SIZE_STATIC (8);
};
struct ConditionalAddGlyphAction
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)));
}
protected:
ActionSubrecordHeader
header;
Fixed substThreshold; /* Distance growth factor (in ems) at which
* this glyph is replaced and the growth factor
* recalculated. */
GlyphID addGlyph; /* Glyph to be added as kashida. If this value is
* 0xFFFF, no extra glyph will be added. Note that
* generally when a glyph is added, justification
* will need to be redone. */
GlyphID substGlyph; /* Glyph to be substituted for this glyph if the
* growth factor equals or exceeds the value of
* substThreshold. */
public:
DEFINE_SIZE_STATIC (14);
};
struct DuctileGlyphAction
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)));
}
protected:
ActionSubrecordHeader
header;
HBUINT32 variationAxis; /* The 4-byte tag identifying the ductile axis.
* This would normally be 0x64756374 ('duct'),
* but you may use any axis the font contains. */
Fixed minimumLimit; /* The lowest value for the ductility axis tha
* still yields an acceptable appearance. Normally
* this will be 1.0. */
Fixed noStretchValue; /* This is the default value that corresponds to
* no change in appearance. Normally, this will
* be 1.0. */
Fixed maximumLimit; /* The highest value for the ductility axis that
* still yields an acceptable appearance. */
public:
DEFINE_SIZE_STATIC (22);
};
struct RepeatedAddGlyphAction
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)));
}
protected:
ActionSubrecordHeader
header;
HBUINT16 flags; /* Currently unused; set to 0. */
GlyphID glyph; /* Glyph that should be added if the distance factor
* is growing. */
public:
DEFINE_SIZE_STATIC (10);
};
struct ActionSubrecord
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
switch (u.header.actionType)
{
case 0: return_trace (u.decompositionAction.sanitize (c));
case 1: return_trace (u.unconditionalAddGlyphAction.sanitize (c));
case 2: return_trace (u.conditionalAddGlyphAction.sanitize (c));
// case 3: return_trace (u.stretchGlyphAction.sanitize (c));
case 4: return_trace (u.decompositionAction.sanitize (c));
case 5: return_trace (u.decompositionAction.sanitize (c));
default: return_trace (true);
}
}
inline unsigned int get_length () const { return u.header.actionLength; }
protected:
union {
ActionSubrecordHeader header;
DecompositionAction decompositionAction;
UnconditionalAddGlyphAction unconditionalAddGlyphAction;
ConditionalAddGlyphAction conditionalAddGlyphAction;
/* StretchGlyphAction stretchGlyphAction; -- Not supported by CoreText */
DuctileGlyphAction ductileGlyphAction;
RepeatedAddGlyphAction repeatedAddGlyphAction;
} u; /* Data. The format of this data depends on
* the value of the actionType field. */
public:
DEFINE_SIZE_UNION (6, header);
};
struct PostcompensationActionChain
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
unsigned int offset = min_size;
for (unsigned int i = 0; i < count; i++)
{
const ActionSubrecord& subrecord = StructAtOffset<ActionSubrecord> (this, offset);
if (unlikely (!subrecord.sanitize (c))) return_trace (false);
offset += subrecord.get_length ();
}
return_trace (true);
}
protected:
HBUINT32 count;
public:
DEFINE_SIZE_STATIC (4);
};
struct JustWidthDeltaEntry
{
enum Flags
{
Reserved1 =0xE000,/* Reserved. You should set these bits to zero. */
UnlimiteGap =0x1000,/* The glyph can take unlimited gap. When this
* glyph participates in the justification process,
* it and any other glyphs on the line having this
* bit set absorb all the remaining gap. */
Reserved2 =0x0FF0,/* Reserved. You should set these bits to zero. */
Priority =0x000F /* The justification priority of the glyph. */
};
enum Priority
{
Kashida = 0, /* Kashida priority. This is the highest priority
* during justification. */
Whitespace = 1, /* Whitespace priority. Any whitespace glyphs (as
* identified in the glyph properties table) will
* get this priority. */
InterCharacter = 2, /* Inter-character priority. Give this to any
* remaining glyphs. */
NullPriority = 3 /* Null priority. You should set this priority for
* glyphs that only participate in justification
* after the above priorities. Normally all glyphs
* have one of the previous three values. If you
* don't want a glyph to participate in justification,
* and you don't want to set its factors to zero,
* you may instead assign it to the null priority. */
};
protected:
Fixed beforeGrowLimit;/* The ratio by which the advance width of the
* glyph is permitted to grow on the left or top side. */
Fixed beforeShrinkLimit;
/* The ratio by which the advance width of the
* glyph is permitted to shrink on the left or top side. */
Fixed afterGrowLimit; /* The ratio by which the advance width of the glyph
* is permitted to shrink on the left or top side. */
Fixed afterShrinkLimit;
/* The ratio by which the advance width of the glyph
* is at most permitted to shrink on the right or
* bottom side. */
HBUINT16 growFlags; /* Flags controlling the grow case. */
HBUINT16 shrinkFlags; /* Flags controlling the shrink case. */
public:
DEFINE_SIZE_STATIC (20);
};
struct WidthDeltaPair
{
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)));
}
protected:
HBUINT32 justClass; /* The justification category associated
* with the wdRecord field. Only 7 bits of
* this field are used. (The other bits are
* used as padding to guarantee longword
* alignment of the following record). */
JustWidthDeltaEntry
wdRecord; /* The actual width delta record. */
public:
DEFINE_SIZE_STATIC (24);
};
struct WidthDeltaCluster : OT::LArrayOf<WidthDeltaPair> {};
struct JustificationCategory
{
typedef void EntryData;
enum Flags
{
SetMark =0x8000,/* If set, make the current glyph the marked
* glyph. */
DontAdvance =0x4000,/* If set, don't advance to the next glyph before
* going to the new state. */
MarkCategory =0x3F80,/* The justification category for the marked
* glyph if nonzero. */
CurrentCategory =0x007F /* The justification category for the current
* glyph if nonzero. */
};
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
morphHeader.sanitize (c) &&
stHeader.sanitize (c)));
}
protected:
ChainSubtable<ObsoleteTypes>
morphHeader; /* Metamorphosis-style subtable header. */
StateTable<ObsoleteTypes, EntryData>
stHeader; /* The justification insertion state table header */
public:
DEFINE_SIZE_STATIC (30);
};
struct JustificationHeader
{
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
justClassTable.sanitize (c, base, base) &&
wdcTable.sanitize (c, base) &&
pcTable.sanitize (c, base) &&
lookupTable.sanitize (c, base)));
}
protected:
OffsetTo<JustificationCategory>
justClassTable; /* Offset to the justification category state table. */
OffsetTo<WidthDeltaCluster>
wdcTable; /* Offset from start of justification table to start
* of the subtable containing the width delta factors
* for the glyphs in your font.
*
* The width delta clusters table. */
OffsetTo<PostcompensationActionChain>
pcTable; /* Offset from start of justification table to start
* of postcompensation subtable (set to zero if none).
*
* The postcompensation subtable, if present in the font. */
Lookup<OffsetTo<WidthDeltaCluster> >
lookupTable; /* Lookup table associating glyphs with width delta
* clusters. See the description of Width Delta Clusters
* table for details on how to interpret the lookup values. */
public:
DEFINE_SIZE_MIN (8);
};
struct just
{
static const hb_tag_t tableTag = HB_AAT_TAG_just;
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (unlikely (c->check_struct (this) &&
horizData.sanitize (c, this, this) &&
vertData.sanitize (c, this, this)));
}
protected:
FixedVersion<>version; /* Version of the justification table
* (0x00010000u for version 1.0). */
HBUINT16 format; /* Format of the justification table (set to 0). */
OffsetTo<JustificationHeader>
horizData; /* Byte offset from the start of the justification table
* to the header for tables that contain justification
* information for horizontal text.
* If you are not including this information,
* store 0. */
OffsetTo<JustificationHeader>
vertData; /* ditto, vertical */
public:
DEFINE_SIZE_STATIC (10);
};
} /* namespace AAT */
#endif /* HB_AAT_LAYOUT_JUST_TABLE_HH */

@ -28,10 +28,8 @@
#ifndef HB_AAT_LAYOUT_KERX_TABLE_HH
#define HB_AAT_LAYOUT_KERX_TABLE_HH
#include "hb-open-type.hh"
#include "hb-aat-layout-common.hh"
#include "hb-ot-layout-gpos-table.hh"
#include "hb-ot-kern-table.hh"
#include "hb-kern.hh"
#include "hb-aat-layout-ankr-table.hh"
/*
* kerx -- Extended Kerning
@ -55,29 +53,54 @@ kerxTupleKern (int value,
unsigned int offset = value;
const FWORD *pv = &StructAtOffset<FWORD> (base, offset);
if (unlikely (!pv->sanitize (&c->sanitizer))) return 0;
if (unlikely (!c->sanitizer.check_array (pv, tupleCount))) return 0;
return *pv;
}
struct KerxSubTableHeader
struct hb_glyph_pair_t
{
hb_codepoint_t left;
hb_codepoint_t right;
};
struct KernPair
{
inline int get_kerning (void) const
{ return value; }
inline int cmp (const hb_glyph_pair_t &o) const
{
int ret = left.cmp (o.left);
if (ret) return ret;
return right.cmp (o.right);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)));
return_trace (c->check_struct (this));
}
protected:
GlyphID left;
GlyphID right;
FWORD value;
public:
HBUINT32 length;
HBUINT32 coverage;
HBUINT32 tupleCount;
public:
DEFINE_SIZE_STATIC (12);
DEFINE_SIZE_STATIC (6);
};
template <typename KernSubTableHeader>
struct KerxSubTableFormat0
{
inline int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const
{
hb_glyph_pair_t pair = {left, right};
int i = pairs.bsearch (pair);
if (i == -1) return 0;
return pairs[i].get_kerning ();
}
inline int get_kerning (hb_codepoint_t left, hb_codepoint_t right,
hb_aat_apply_context_t *c) const
{
@ -85,7 +108,7 @@ struct KerxSubTableFormat0
int i = pairs.bsearch (pair);
if (i == -1) return 0;
int v = pairs[i].get_kerning ();
return kerxTupleKern (v, header.tupleCount, this, c);
return kerxTupleKern (v, header.tuple_count (), this, c);
}
inline bool apply (hb_aat_apply_context_t *c) const
@ -95,8 +118,11 @@ struct KerxSubTableFormat0
if (!c->plan->requested_kerning)
return false;
if (header.coverage & header.Backwards)
return false;
accelerator_t accel (*this, c);
hb_kern_machine_t<accelerator_t> machine (accel);
hb_kern_machine_t<accelerator_t> machine (accel, header.coverage & header.CrossStream);
machine.kern (c->font, c->buffer, c->plan->kern_mask);
return_trace (true);
@ -119,20 +145,33 @@ struct KerxSubTableFormat0
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
pairs.sanitize (c)));
return_trace (likely (pairs.sanitize (c)));
}
protected:
KerxSubTableHeader header;
BinSearchArrayOf<KernPair, HBUINT32>
KernSubTableHeader header;
BinSearchArrayOf<KernPair, typename KernSubTableHeader::Types::HBUINT>
pairs; /* Sorted kern records. */
public:
DEFINE_SIZE_ARRAY (28, pairs);
DEFINE_SIZE_ARRAY (KernSubTableHeader::static_size + 16, pairs);
};
struct KerxSubTableFormat1
template <bool extended>
struct Format1Entry;
template <>
struct Format1Entry<true>
{
enum Flags
{
Push = 0x8000, /* If set, push this glyph on the kerning stack. */
DontAdvance = 0x4000, /* If set, don't advance to the next glyph
* before going to the new state. */
Reset = 0x2000, /* If set, reset the kerning data (clear the stack) */
Reserved = 0x1FFF, /* Not used; set to 0. */
};
struct EntryData
{
HBUINT16 kernActionIndex;/* Index into the kerning value array. If
@ -142,44 +181,78 @@ struct KerxSubTableFormat1
DEFINE_SIZE_STATIC (2);
};
static inline bool performAction (const Entry<EntryData> *entry)
{ return entry->data.kernActionIndex != 0xFFFF; }
static inline unsigned int kernActionIndex (const Entry<EntryData> *entry)
{ return entry->data.kernActionIndex; }
};
template <>
struct Format1Entry<false>
{
enum Flags
{
Push = 0x8000, /* If set, push this glyph on the kerning stack. */
DontAdvance = 0x4000, /* If set, don't advance to the next glyph
* before going to the new state. */
Offset = 0x3FFF, /* Byte offset from beginning of subtable to the
* value table for the glyphs on the kerning stack. */
Reset = 0x0000, /* Not supported? */
};
typedef void EntryData;
static inline bool performAction (const Entry<EntryData> *entry)
{ return entry->flags & Offset; }
static inline unsigned int kernActionIndex (const Entry<EntryData> *entry)
{ return entry->flags & Offset; }
};
template <typename KernSubTableHeader>
struct KerxSubTableFormat1
{
typedef typename KernSubTableHeader::Types Types;
typedef typename Types::HBUINT HBUINT;
typedef Format1Entry<Types::extended> Format1EntryT;
typedef typename Format1EntryT::EntryData EntryData;
struct driver_context_t
{
static const bool in_place = true;
enum Flags
enum
{
Push = 0x8000, /* If set, push this glyph on the kerning stack. */
DontAdvance = 0x4000, /* If set, don't advance to the next glyph
* before going to the new state. */
Reset = 0x2000, /* If set, reset the kerning data (clear the stack) */
Reserved = 0x1FFF, /* Not used; set to 0. */
DontAdvance = Format1EntryT::DontAdvance,
};
inline driver_context_t (const KerxSubTableFormat1 *table,
inline driver_context_t (const KerxSubTableFormat1 *table_,
hb_aat_apply_context_t *c_) :
c (c_),
table (table_),
/* Apparently the offset kernAction is from the beginning of the state-machine,
* similar to offsets in morx table, NOT from beginning of this table, like
* other subtables in kerx. Discovered via testing. */
kernAction (&table->machine + table->kernAction),
depth (0) {}
depth (0),
crossStream (table->header.coverage & table->header.CrossStream) {}
inline bool is_actionable (StateTableDriver<MorxTypes, EntryData> *driver HB_UNUSED,
inline bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED,
const Entry<EntryData> *entry)
{
return entry->data.kernActionIndex != 0xFFFF;
return Format1EntryT::performAction (entry);
}
inline bool transition (StateTableDriver<MorxTypes, EntryData> *driver,
inline bool transition (StateTableDriver<Types, EntryData> *driver,
const Entry<EntryData> *entry)
{
hb_buffer_t *buffer = driver->buffer;
unsigned int flags = entry->flags;
if (flags & Reset)
{
if (flags & Format1EntryT::Reset)
depth = 0;
}
if (flags & Push)
if (flags & Format1EntryT::Push)
{
if (likely (depth < ARRAY_LENGTH (stack)))
stack[depth++] = buffer->idx;
@ -187,9 +260,11 @@ struct KerxSubTableFormat1
depth = 0; /* Probably not what CoreText does, but better? */
}
if (entry->data.kernActionIndex != 0xFFFF)
if (Format1EntryT::performAction (entry))
{
const FWORD *actions = &kernAction[entry->data.kernActionIndex];
unsigned int kern_idx = Format1EntryT::kernActionIndex (entry);
kern_idx = Types::offsetToIndex (kern_idx, &table->machine, kernAction.arrayZ);
const FWORD *actions = &kernAction[kern_idx];
if (!c->sanitizer.check_array (actions, depth))
{
depth = 0;
@ -197,31 +272,78 @@ struct KerxSubTableFormat1
}
hb_mask_t kern_mask = c->plan->kern_mask;
for (unsigned int i = 0; i < depth; i++)
/* From Apple 'kern' spec:
* "Each pops one glyph from the kerning stack and applies the kerning value to it.
* The end of the list is marked by an odd value... */
unsigned int tuple_count = table->header.tuple_count ();
tuple_count = tuple_count ? tuple_count : 1;
bool last = false;
while (!last && depth--)
{
/* Apparently, when spec says "Each pops one glyph from the kerning stack
* and applies the kerning value to it.", it doesn't mean it in that order.
* The deepest item in the stack corresponds to the first item in the action
* list. Discovered by testing. */
unsigned int idx = stack[i];
int v = *actions++;
if (idx < buffer->len && buffer->info[idx].mask & kern_mask)
unsigned int idx = stack[depth];
int v = *actions;
actions += tuple_count;
if (idx >= buffer->len) continue;
/* "The end of the list is marked by an odd value..." */
last = v & 1;
v &= ~1;
hb_glyph_position_t &o = buffer->pos[idx];
/* Testing shows that CoreText only applies kern (cross-stream or not)
* if none has been applied by previous subtables. That is, it does
* NOT seem to accumulate as otherwise implied by specs. */
/* The following flag is undocumented in the spec, but described
* in the 'kern' table example. */
if (v == -0x8000)
{
o.attach_type() = ATTACH_TYPE_NONE;
o.attach_chain() = 0;
o.x_offset = o.y_offset = 0;
}
else if (HB_DIRECTION_IS_HORIZONTAL (buffer->props.direction))
{
if (HB_DIRECTION_IS_HORIZONTAL (buffer->props.direction))
if (crossStream)
{
buffer->pos[idx].x_advance += c->font->em_scale_x (v);
if (HB_DIRECTION_IS_BACKWARD (buffer->props.direction))
if (buffer->pos[idx].attach_type() && !buffer->pos[idx].y_offset)
{
o.y_offset = c->font->em_scale_y (v);
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
}
}
else if (buffer->info[idx].mask & kern_mask)
{
if (!buffer->pos[idx].x_offset)
{
buffer->pos[idx].x_advance += c->font->em_scale_x (v);
buffer->pos[idx].x_offset += c->font->em_scale_x (v);
}
}
}
else
{
if (crossStream)
{
/* CoreText doesn't do crossStream kerning in vertical. We do. */
if (buffer->pos[idx].attach_type() && !buffer->pos[idx].x_offset)
{
o.x_offset = c->font->em_scale_x (v);
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
}
}
else
else if (buffer->info[idx].mask & kern_mask)
{
buffer->pos[idx].y_advance += c->font->em_scale_y (v);
if (HB_DIRECTION_IS_BACKWARD (buffer->props.direction))
if (!buffer->pos[idx].y_offset)
{
buffer->pos[idx].y_advance += c->font->em_scale_y (v);
buffer->pos[idx].y_offset += c->font->em_scale_y (v);
}
}
}
}
depth = 0;
}
return true;
@ -229,24 +351,24 @@ struct KerxSubTableFormat1
private:
hb_aat_apply_context_t *c;
const KerxSubTableFormat1 *table;
const UnsizedArrayOf<FWORD> &kernAction;
unsigned int stack[8];
unsigned int depth;
bool crossStream;
};
inline bool apply (hb_aat_apply_context_t *c) const
{
TRACE_APPLY (this);
if (!c->plan->requested_kerning)
if (!c->plan->requested_kerning &&
!(header.coverage & header.CrossStream))
return false;
if (header.tupleCount)
return_trace (false); /* TODO kerxTupleKern */
driver_context_t dc (this, c);
StateTableDriver<MorxTypes, EntryData> driver (machine, c->buffer, c->font->face);
StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->font->face);
driver.drive (&dc);
return_trace (true);
@ -261,25 +383,29 @@ struct KerxSubTableFormat1
}
protected:
KerxSubTableHeader header;
StateTable<MorxTypes, EntryData> machine;
LOffsetTo<UnsizedArrayOf<FWORD>, false> kernAction;
KernSubTableHeader header;
StateTable<Types, EntryData> machine;
OffsetTo<UnsizedArrayOf<FWORD>, HBUINT, false>kernAction;
public:
DEFINE_SIZE_STATIC (32);
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 5 * sizeof (HBUINT));
};
template <typename KernSubTableHeader>
struct KerxSubTableFormat2
{
typedef typename KernSubTableHeader::Types Types;
typedef typename Types::HBUINT HBUINT;
inline int get_kerning (hb_codepoint_t left, hb_codepoint_t right,
hb_aat_apply_context_t *c) const
{
unsigned int num_glyphs = c->sanitizer.get_num_glyphs ();
unsigned int l = (this+leftClassTable).get_value_or_null (left, num_glyphs);
unsigned int r = (this+rightClassTable).get_value_or_null (right, num_glyphs);
unsigned int l = (this+leftClassTable).get_class (left, num_glyphs, 0);
unsigned int r = (this+rightClassTable).get_class (right, num_glyphs, 0);
unsigned int offset = l + r;
const FWORD *v = &StructAtOffset<FWORD> (&(this+array), offset);
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
return kerxTupleKern (*v, header.tupleCount, this, c);
return kerxTupleKern (*v, header.tuple_count (), this, c);
}
inline bool apply (hb_aat_apply_context_t *c) const
@ -289,8 +415,11 @@ struct KerxSubTableFormat2
if (!c->plan->requested_kerning)
return false;
if (header.coverage & header.Backwards)
return false;
accelerator_t accel (*this, c);
hb_kern_machine_t<accelerator_t> machine (accel);
hb_kern_machine_t<accelerator_t> machine (accel, header.coverage & header.CrossStream);
machine.kern (c->font, c->buffer, c->plan->kern_mask);
return_trace (true);
@ -318,24 +447,33 @@ struct KerxSubTableFormat2
c->check_range (this, array)));
}
/* Note:
* OT kern table specifies ClassTable as having 16-bit entries, whereas
* AAT kern table specifies them as having 8bit entries.
* I've not seen any fonts with this format in kern table.
* We follow AAT. */
protected:
KerxSubTableHeader header;
HBUINT32 rowWidth; /* The width, in bytes, of a row in the table. */
LOffsetTo<Lookup<HBUINT16>, false>
KernSubTableHeader header;
HBUINT rowWidth; /* The width, in bytes, of a row in the table. */
OffsetTo<typename Types::ClassType, HBUINT, false>
leftClassTable; /* Offset from beginning of this subtable to
* left-hand class table. */
LOffsetTo<Lookup<HBUINT16>, false>
OffsetTo<typename Types::ClassType, HBUINT, false>
rightClassTable;/* Offset from beginning of this subtable to
* right-hand class table. */
LOffsetTo<UnsizedArrayOf<FWORD>, false>
OffsetTo<UnsizedArrayOf<FWORD>, HBUINT, false>
array; /* Offset from beginning of this subtable to
* the start of the kerning array. */
public:
DEFINE_SIZE_STATIC (28);
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 4 * sizeof (HBUINT));
};
template <typename KernSubTableHeader>
struct KerxSubTableFormat4
{
typedef ExtendedTypes Types;
struct EntryData
{
HBUINT16 ankrActionIndex;/* Either 0xFFFF (for no action) or the index of
@ -372,16 +510,15 @@ struct KerxSubTableFormat4
mark_set (false),
mark (0) {}
inline bool is_actionable (StateTableDriver<MorxTypes, EntryData> *driver HB_UNUSED,
inline bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED,
const Entry<EntryData> *entry)
{
return entry->data.ankrActionIndex != 0xFFFF;
}
inline bool transition (StateTableDriver<MorxTypes, EntryData> *driver,
inline bool transition (StateTableDriver<Types, EntryData> *driver,
const Entry<EntryData> *entry)
{
hb_buffer_t *buffer = driver->buffer;
unsigned int flags = entry->flags;
if (mark_set && entry->data.ankrActionIndex != 0xFFFF && buffer->idx < buffer->len)
{
@ -457,7 +594,7 @@ struct KerxSubTableFormat4
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
}
if (flags & Mark)
if (entry->flags & Mark)
{
mark_set = true;
mark = buffer->idx;
@ -480,7 +617,7 @@ struct KerxSubTableFormat4
driver_context_t dc (this, c);
StateTableDriver<MorxTypes, EntryData> driver (machine, c->buffer, c->font->face);
StateTableDriver<Types, EntryData> driver (machine, c->buffer, c->font->face);
driver.drive (&dc);
return_trace (true);
@ -495,14 +632,14 @@ struct KerxSubTableFormat4
}
protected:
KerxSubTableHeader header;
StateTable<MorxTypes, EntryData>
machine;
HBUINT32 flags;
KernSubTableHeader header;
StateTable<Types, EntryData> machine;
HBUINT32 flags;
public:
DEFINE_SIZE_STATIC (32);
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 20);
};
template <typename KernSubTableHeader>
struct KerxSubTableFormat6
{
enum Flags
@ -518,7 +655,7 @@ struct KerxSubTableFormat6
unsigned int num_glyphs = c->sanitizer.get_num_glyphs ();
if (is_long ())
{
const U::Long &t = u.l;
const typename U::Long &t = u.l;
unsigned int l = (this+t.rowIndexTable).get_value_or_null (left, num_glyphs);
unsigned int r = (this+t.columnIndexTable).get_value_or_null (right, num_glyphs);
unsigned int offset = l + r;
@ -526,17 +663,17 @@ struct KerxSubTableFormat6
if (unlikely (hb_unsigned_mul_overflows (offset, sizeof (FWORD32)))) return 0;
const FWORD32 *v = &StructAtOffset<FWORD32> (&(this+t.array), offset * sizeof (FWORD32));
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
return kerxTupleKern (*v, header.tupleCount, &(this+vector), c);
return kerxTupleKern (*v, header.tuple_count (), &(this+vector), c);
}
else
{
const U::Short &t = u.s;
const typename U::Short &t = u.s;
unsigned int l = (this+t.rowIndexTable).get_value_or_null (left, num_glyphs);
unsigned int r = (this+t.columnIndexTable).get_value_or_null (right, num_glyphs);
unsigned int offset = l + r;
const FWORD *v = &StructAtOffset<FWORD> (&(this+t.array), offset * sizeof (FWORD));
if (unlikely (!v->sanitize (&c->sanitizer))) return 0;
return kerxTupleKern (*v, header.tupleCount, &(this+vector), c);
return kerxTupleKern (*v, header.tuple_count (), &(this+vector), c);
}
}
@ -547,8 +684,11 @@ struct KerxSubTableFormat6
if (!c->plan->requested_kerning)
return false;
if (header.coverage & header.Backwards)
return false;
accelerator_t accel (*this, c);
hb_kern_machine_t<accelerator_t> machine (accel);
hb_kern_machine_t<accelerator_t> machine (accel, header.coverage & header.CrossStream);
machine.kern (c->font, c->buffer, c->plan->kern_mask);
return_trace (true);
@ -568,7 +708,7 @@ struct KerxSubTableFormat6
u.s.columnIndexTable.sanitize (c, this) &&
c->check_range (this, u.s.array)
)) &&
(header.tupleCount == 0 ||
(header.tuple_count () == 0 ||
c->check_range (this, vector))));
}
@ -586,7 +726,7 @@ struct KerxSubTableFormat6
};
protected:
KerxSubTableHeader header;
KernSubTableHeader header;
HBUINT32 flags;
HBUINT16 rowCount;
HBUINT16 columnCount;
@ -607,30 +747,52 @@ struct KerxSubTableFormat6
} u;
LOffsetTo<UnsizedArrayOf<FWORD>, false> vector;
public:
DEFINE_SIZE_STATIC (36);
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 24);
};
struct KerxTable
struct KerxSubTableHeader
{
friend struct kerx;
typedef ExtendedTypes Types;
inline unsigned int get_size (void) const { return u.header.length; }
inline unsigned int get_type (void) const { return u.header.coverage & SubtableType; }
inline unsigned int tuple_count (void) const { return tupleCount; }
inline bool is_horizontal (void) const { return !(coverage & Vertical); }
enum Coverage
{
Vertical = 0x80000000, /* Set if table has vertical kerning values. */
CrossStream = 0x40000000, /* Set if table has cross-stream kerning values. */
Variation = 0x20000000, /* Set if table has variation kerning values. */
Backwards = 0x10000000, /* If clear, process the glyphs forwards, that
* is, from first to last in the glyph stream.
* If we, process them from last to first.
* This flag only applies to state-table based
* 'kerx' subtables (types 1 and 4). */
Reserved = 0x0FFFFF00, /* Reserved, set to zero. */
SubtableType = 0x000000FF, /* Subtable type. */
Vertical = 0x80000000u, /* Set if table has vertical kerning values. */
CrossStream = 0x40000000u, /* Set if table has cross-stream kerning values. */
Variation = 0x20000000u, /* Set if table has variation kerning values. */
Backwards = 0x10000000u, /* If clear, process the glyphs forwards, that
* is, from first to last in the glyph stream.
* If we, process them from last to first.
* This flag only applies to state-table based
* 'kerx' subtables (types 1 and 4). */
Reserved = 0x0FFFFF00u, /* Reserved, set to zero. */
SubtableType= 0x000000FFu, /* Subtable type. */
};
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)));
}
public:
HBUINT32 length;
HBUINT32 coverage;
HBUINT32 tupleCount;
public:
DEFINE_SIZE_STATIC (12);
};
struct KerxSubTable
{
friend struct kerx;
inline unsigned int get_size (void) const { return u.header.length; }
inline unsigned int get_type (void) const { return u.header.coverage & u.header.SubtableType; }
template <typename context_t>
inline typename context_t::return_t dispatch (context_t *c) const
{
@ -656,16 +818,16 @@ struct KerxTable
return_trace (dispatch (c));
}
protected:
public:
union {
KerxSubTableHeader header;
KerxSubTableFormat0 format0;
KerxSubTableFormat1 format1;
KerxSubTableFormat2 format2;
KerxSubTableFormat4 format4;
KerxSubTableFormat6 format6;
KerxSubTableHeader header;
KerxSubTableFormat0<KerxSubTableHeader> format0;
KerxSubTableFormat1<KerxSubTableHeader> format1;
KerxSubTableFormat2<KerxSubTableHeader> format2;
KerxSubTableFormat4<KerxSubTableHeader> format4;
KerxSubTableFormat6<KerxSubTableHeader> format6;
} u;
public:
public:
DEFINE_SIZE_MIN (12);
};
@ -674,73 +836,142 @@ public:
* The 'kerx' Table
*/
struct kerx
template <typename T>
struct KerxTable
{
static const hb_tag_t tableTag = HB_AAT_TAG_kerx;
/* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */
inline const T* thiz (void) const { return static_cast<const T *> (this); }
inline bool has_data (void) const { return version != 0; }
inline bool has_cross_stream (void) const
{
typedef typename T::SubTable SubTable;
const SubTable *st = &thiz()->firstSubTable;
unsigned int count = thiz()->tableCount;
for (unsigned int i = 0; i < count; i++)
{
if (st->u.header.coverage & st->u.header.CrossStream)
return true;
st = &StructAfter<SubTable> (*st);
}
return false;
}
inline int get_h_kerning (hb_codepoint_t left, hb_codepoint_t right) const
{
typedef typename T::SubTable SubTable;
int v = 0;
const SubTable *st = &thiz()->firstSubTable;
unsigned int count = thiz()->tableCount;
for (unsigned int i = 0; i < count; i++)
{
if ((st->u.header.coverage & (st->u.header.Variation | st->u.header.CrossStream)) ||
!st->u.header.is_horizontal ())
continue;
v += st->get_kerning (left, right);
st = &StructAfter<SubTable> (*st);
}
return v;
}
inline void apply (hb_aat_apply_context_t *c) const
inline bool apply (AAT::hb_aat_apply_context_t *c) const
{
typedef typename T::SubTable SubTable;
bool ret = false;
bool seenCrossStream = false;
c->set_lookup_index (0);
const KerxTable *table = &firstTable;
unsigned int count = tableCount;
const SubTable *st = &thiz()->firstSubTable;
unsigned int count = thiz()->tableCount;
for (unsigned int i = 0; i < count; i++)
{
bool reverse;
if (table->u.header.coverage & (KerxTable::CrossStream))
goto skip; /* We do NOT handle cross-stream. */
if (!T::Types::extended && (st->u.header.coverage & st->u.header.Variation))
goto skip;
if (HB_DIRECTION_IS_VERTICAL (c->buffer->props.direction) !=
bool (table->u.header.coverage & KerxTable::Vertical))
if (HB_DIRECTION_IS_HORIZONTAL (c->buffer->props.direction) != st->u.header.is_horizontal ())
goto skip;
reverse = bool (table->u.header.coverage & KerxTable::Backwards) !=
reverse = bool (st->u.header.coverage & st->u.header.Backwards) !=
HB_DIRECTION_IS_BACKWARD (c->buffer->props.direction);
if (!c->buffer->message (c->font, "start kerx subtable %d", c->lookup_index))
if (!c->buffer->message (c->font, "start %c%c%c%c subtable %d", HB_UNTAG (thiz()->tableTag), c->lookup_index))
goto skip;
if (!seenCrossStream &&
(st->u.header.coverage & st->u.header.CrossStream))
{
/* Attach all glyphs into a chain. */
seenCrossStream = true;
hb_glyph_position_t *pos = c->buffer->pos;
unsigned int count = c->buffer->len;
for (unsigned int i = 0; i < count; i++)
{
pos[i].attach_type() = ATTACH_TYPE_CURSIVE;
pos[i].attach_chain() = HB_DIRECTION_IS_FORWARD (c->buffer->props.direction) ? -1 : +1;
/* We intentionally don't set HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT,
* since there needs to be a non-zero attachment for post-positioning to
* be needed. */
}
}
if (reverse)
c->buffer->reverse ();
c->sanitizer.set_object (*table);
c->sanitizer.set_object (*st);
/* XXX Reverse-kern is not working yet...
* hb_kern_machine_t would need to know that it's reverse-kerning.
* Or better yet, make it work in reverse as well, so we don't have
* to reverse and reverse back? */
table->dispatch (c);
ret |= st->dispatch (c);
if (reverse)
c->buffer->reverse ();
(void) c->buffer->message (c->font, "end kerx subtable %d", c->lookup_index);
(void) c->buffer->message (c->font, "end %c%c%c%c subtable %d", HB_UNTAG (thiz()->tableTag), c->lookup_index);
skip:
table = &StructAfter<KerxTable> (*table);
st = &StructAfter<SubTable> (*st);
c->set_lookup_index (c->lookup_index + 1);
}
return ret;
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!version.sanitize (c) || version < 2 ||
!tableCount.sanitize (c))
if (unlikely (!thiz()->version.sanitize (c) ||
thiz()->version < T::minVersion ||
!thiz()->tableCount.sanitize (c)))
return_trace (false);
const KerxTable *table = &firstTable;
unsigned int count = tableCount;
typedef typename T::SubTable SubTable;
const SubTable *st = &thiz()->firstSubTable;
unsigned int count = thiz()->tableCount;
for (unsigned int i = 0; i < count; i++)
{
if (!table->sanitize (c))
if (unlikely (!st->sanitize (c)))
return_trace (false);
table = &StructAfter<KerxTable> (*table);
st = &StructAfter<SubTable> (*st);
}
return_trace (true);
}
};
struct kerx : KerxTable<kerx>
{
friend struct KerxTable<kerx>;
static const hb_tag_t tableTag = HB_AAT_TAG_kerx;
static const uint16_t minVersion = 2;
typedef KerxSubTableHeader SubTableHeader;
typedef SubTableHeader::Types Types;
typedef KerxSubTable SubTable;
inline bool has_data (void) const { return version; }
protected:
HBUINT16 version; /* The version number of the extended kerning table
@ -748,13 +979,14 @@ struct kerx
HBUINT16 unused; /* Set to 0. */
HBUINT32 tableCount; /* The number of subtables included in the extended kerning
* table. */
KerxTable firstTable; /* Subtables. */
SubTable firstSubTable; /* Subtables. */
/*subtableGlyphCoverageArray*/ /* Only if version >= 3. We don't use. */
public:
DEFINE_SIZE_MIN (8);
};
} /* namespace AAT */

@ -375,21 +375,19 @@ struct LigatureEntry<true>
Reserved = 0x1FFF, /* These bits are reserved and should be set to 0. */
};
typedef struct
struct EntryData
{
HBUINT16 ligActionIndex; /* Index to the first ligActionTable entry
* for processing this group, if indicated
* by the flags. */
public:
DEFINE_SIZE_STATIC (2);
} EntryData;
};
template <typename Flags>
static inline bool performAction (Flags flags)
{ return flags & PerformAction; }
static inline bool performAction (const Entry<EntryData> *entry)
{ return entry->flags & PerformAction; }
template <typename Entry, typename Flags>
static inline unsigned int ligActionIndex (Entry &entry, Flags flags)
static inline unsigned int ligActionIndex (const Entry<EntryData> *entry)
{ return entry->data.ligActionIndex; }
};
template <>
@ -408,13 +406,11 @@ struct LigatureEntry<false>
typedef void EntryData;
template <typename Flags>
static inline bool performAction (Flags flags)
{ return flags & Offset; }
static inline bool performAction (const Entry<EntryData> *entry)
{ return entry->flags & Offset; }
template <typename Entry, typename Flags>
static inline unsigned int ligActionIndex (Entry &entry, Flags flags)
{ return flags & 0x3FFF; }
static inline unsigned int ligActionIndex (const Entry<EntryData> *entry)
{ return entry->flags & Offset; }
};
@ -428,11 +424,11 @@ struct LigatureSubtable
struct driver_context_t
{
static const bool in_place = false;
enum
{
DontAdvance = LigatureEntryT::DontAdvance,
};
static const bool in_place = false;
enum LigActionFlags
{
LigActionLast = 0x80000000, /* This is the last action in the list. This also
@ -458,16 +454,15 @@ struct LigatureSubtable
inline bool is_actionable (StateTableDriver<Types, EntryData> *driver HB_UNUSED,
const Entry<EntryData> *entry)
{
return LigatureEntryT::performAction (entry->flags);
return LigatureEntryT::performAction (entry);
}
inline bool transition (StateTableDriver<Types, EntryData> *driver,
const Entry<EntryData> *entry)
{
hb_buffer_t *buffer = driver->buffer;
unsigned int flags = entry->flags;
DEBUG_MSG (APPLY, nullptr, "Ligature transition at %d", buffer->idx);
if (flags & LigatureEntryT::SetComponent)
DEBUG_MSG (APPLY, nullptr, "Ligature transition at %u", buffer->idx);
if (entry->flags & LigatureEntryT::SetComponent)
{
if (unlikely (match_length >= ARRAY_LENGTH (match_positions)))
return false;
@ -477,16 +472,13 @@ struct LigatureSubtable
match_length--;
match_positions[match_length++] = buffer->out_len;
DEBUG_MSG (APPLY, nullptr, "Set component at %d", buffer->out_len);
DEBUG_MSG (APPLY, nullptr, "Set component at %u", buffer->out_len);
}
if (LigatureEntryT::performAction (flags))
if (LigatureEntryT::performAction (entry))
{
DEBUG_MSG (APPLY, nullptr, "Perform action with %d", match_length);
DEBUG_MSG (APPLY, nullptr, "Perform action with %u", match_length);
unsigned int end = buffer->out_len;
unsigned int action_idx = LigatureEntryT::ligActionIndex (entry, flags);
unsigned int action;
unsigned int ligature_idx = 0;
if (unlikely (!match_length))
return true;
@ -495,8 +487,13 @@ struct LigatureSubtable
return false; // TODO Work on previous instead?
unsigned int cursor = match_length;
unsigned int action_idx = LigatureEntryT::ligActionIndex (entry);
action_idx = Types::offsetToIndex (action_idx, table, ligAction.arrayZ);
const HBUINT32 *actionData = &ligAction[action_idx];
unsigned int ligature_idx = 0;
unsigned int action;
do
{
if (unlikely (!cursor))
@ -507,7 +504,7 @@ struct LigatureSubtable
break;
}
DEBUG_MSG (APPLY, nullptr, "Moving to stack position %d", cursor - 1);
DEBUG_MSG (APPLY, nullptr, "Moving to stack position %u", cursor - 1);
buffer->move_to (match_positions[--cursor]);
if (unlikely (!actionData->sanitize (&c->sanitizer))) return false;
@ -523,7 +520,7 @@ struct LigatureSubtable
if (unlikely (!componentData.sanitize (&c->sanitizer))) return false;
ligature_idx += componentData;
DEBUG_MSG (APPLY, nullptr, "Action store %d last %d",
DEBUG_MSG (APPLY, nullptr, "Action store %u last %u",
bool (action & LigActionStore),
bool (action & LigActionLast));
if (action & (LigActionStore | LigActionLast))
@ -533,7 +530,7 @@ struct LigatureSubtable
if (unlikely (!ligatureData.sanitize (&c->sanitizer))) return false;
hb_codepoint_t lig = ligatureData;
DEBUG_MSG (APPLY, nullptr, "Produced ligature %d", lig);
DEBUG_MSG (APPLY, nullptr, "Produced ligature %u", lig);
buffer->replace_glyph (lig);
unsigned int lig_end = match_positions[match_length - 1] + 1;
@ -1109,21 +1106,6 @@ struct mortmorx
}
}
inline static void remove_deleted_glyphs (hb_buffer_t *buffer)
{
if (unlikely (!buffer->successful)) return;
buffer->clear_output ();
for (buffer->idx = 0; buffer->idx < buffer->len && buffer->successful;)
{
if (unlikely (buffer->cur().codepoint == DELETED_GLYPH))
buffer->skip_glyph ();
else
buffer->next_glyph ();
}
buffer->swap_buffers ();
}
inline void apply (hb_aat_apply_context_t *c) const
{
if (unlikely (!c->buffer->successful)) return;
@ -1136,7 +1118,6 @@ struct mortmorx
if (unlikely (!c->buffer->successful)) return;
chain = &StructAfter<Chain<Types> > (*chain);
}
remove_deleted_glyphs (c->buffer);
}
inline bool sanitize (hb_sanitize_context_t *c) const
@ -1169,11 +1150,11 @@ struct mortmorx
DEFINE_SIZE_MIN (8);
};
struct morx : mortmorx<MorxTypes>
struct morx : mortmorx<ExtendedTypes>
{
static const hb_tag_t tableTag = HB_AAT_TAG_morx;
};
struct mort : mortmorx<MortTypes>
struct mort : mortmorx<ObsoleteTypes>
{
static const hb_tag_t tableTag = HB_AAT_TAG_mort;
};

@ -31,6 +31,7 @@
#include "hb-aat-layout-ankr-table.hh"
#include "hb-aat-layout-bsln-table.hh" // Just so we compile it; unused otherwise.
#include "hb-aat-layout-feat-table.hh" // Just so we compile it; unused otherwise.
#include "hb-aat-layout-just-table.hh" // Just so we compile it; unused otherwise.
#include "hb-aat-layout-kerx-table.hh"
#include "hb-aat-layout-morx-table.hh"
#include "hb-aat-layout-trak-table.hh"
@ -193,7 +194,7 @@ hb_aat_layout_compile_map (const hb_aat_map_builder_t *mapper,
}
hb_bool_t
bool
hb_aat_layout_has_substitution (hb_face_t *face)
{
return face->table.morx->has_data () ||
@ -224,8 +225,32 @@ hb_aat_layout_substitute (hb_ot_shape_plan_t *plan,
}
}
void
hb_aat_layout_zero_width_deleted_glyphs (hb_buffer_t *buffer)
{
unsigned int count = buffer->len;
hb_glyph_info_t *info = buffer->info;
hb_glyph_position_t *pos = buffer->pos;
for (unsigned int i = 0; i < count; i++)
if (unlikely (info[i].codepoint == AAT::DELETED_GLYPH))
pos[i].x_advance = pos[i].y_advance = pos[i].x_offset = pos[i].y_offset = 0;
}
static bool
is_deleted_glyph (const hb_glyph_info_t *info)
{
return info->codepoint == AAT::DELETED_GLYPH;
}
void
hb_aat_layout_remove_deleted_glyphs (hb_buffer_t *buffer)
{
hb_ot_layout_delete_glyphs_inplace (buffer, is_deleted_glyph);
}
hb_bool_t
bool
hb_aat_layout_has_positioning (hb_face_t *face)
{
return face->table.kerx->has_data ();
@ -248,7 +273,7 @@ hb_aat_layout_position (hb_ot_shape_plan_t *plan,
}
hb_bool_t
bool
hb_aat_layout_has_tracking (hb_face_t *face)
{
return face->table.trak->has_data ();

@ -56,7 +56,7 @@ HB_INTERNAL void
hb_aat_layout_compile_map (const hb_aat_map_builder_t *mapper,
hb_aat_map_t *map);
HB_INTERNAL hb_bool_t
HB_INTERNAL bool
hb_aat_layout_has_substitution (hb_face_t *face);
HB_INTERNAL void
@ -64,7 +64,13 @@ hb_aat_layout_substitute (hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
HB_INTERNAL hb_bool_t
HB_INTERNAL void
hb_aat_layout_zero_width_deleted_glyphs (hb_buffer_t *buffer);
HB_INTERNAL void
hb_aat_layout_remove_deleted_glyphs (hb_buffer_t *buffer);
HB_INTERNAL bool
hb_aat_layout_has_positioning (hb_face_t *face);
HB_INTERNAL void
@ -72,7 +78,7 @@ hb_aat_layout_position (hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer);
HB_INTERNAL hb_bool_t
HB_INTERNAL bool
hb_aat_layout_has_tracking (hb_face_t *face);
HB_INTERNAL void

@ -0,0 +1,139 @@
/*
* Copyright © 2017 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Behdad Esfahbod
*/
#ifndef HB_KERN_HH
#define HB_KERN_HH
#include "hb-open-type.hh"
#include "hb-aat-layout-common.hh"
#include "hb-ot-layout-gpos-table.hh"
namespace OT {
template <typename Driver>
struct hb_kern_machine_t
{
hb_kern_machine_t (const Driver &driver_,
bool crossStream_ = false) :
driver (driver_),
crossStream (crossStream_) {}
HB_NO_SANITIZE_SIGNED_INTEGER_OVERFLOW
inline void kern (hb_font_t *font,
hb_buffer_t *buffer,
hb_mask_t kern_mask,
bool scale = true) const
{
OT::hb_ot_apply_context_t c (1, font, buffer);
c.set_lookup_mask (kern_mask);
c.set_lookup_props (OT::LookupFlag::IgnoreMarks);
OT::hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c.iter_input;
skippy_iter.init (&c);
bool horizontal = HB_DIRECTION_IS_HORIZONTAL (buffer->props.direction);
unsigned int count = buffer->len;
hb_glyph_info_t *info = buffer->info;
hb_glyph_position_t *pos = buffer->pos;
for (unsigned int idx = 0; idx < count;)
{
if (!(info[idx].mask & kern_mask))
{
idx++;
continue;
}
skippy_iter.reset (idx, 1);
if (!skippy_iter.next ())
{
idx++;
continue;
}
unsigned int i = idx;
unsigned int j = skippy_iter.idx;
hb_position_t kern = driver.get_kerning (info[i].codepoint,
info[j].codepoint);
if (likely (!kern))
goto skip;
if (horizontal)
{
if (scale)
kern = font->em_scale_x (kern);
if (crossStream)
{
pos[j].y_offset = kern;
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
}
else
{
hb_position_t kern1 = kern >> 1;
hb_position_t kern2 = kern - kern1;
pos[i].x_advance += kern1;
pos[j].x_advance += kern2;
pos[j].x_offset += kern2;
}
}
else
{
if (scale)
kern = font->em_scale_y (kern);
if (crossStream)
{
pos[j].x_offset = kern;
buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
}
else
{
hb_position_t kern1 = kern >> 1;
hb_position_t kern2 = kern - kern1;
pos[i].y_advance += kern1;
pos[j].y_advance += kern2;
pos[j].y_offset += kern2;
}
}
buffer->unsafe_to_break (i, j + 1);
skip:
idx = skippy_iter.idx;
}
}
const Driver &driver;
bool crossStream;
};
} /* namespace OT */
#endif /* HB_KERN_HH */

@ -833,6 +833,17 @@ struct VarSizedBinSearchArrayOf
return_trace (false);
return_trace (true);
}
template <typename T>
inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
{
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
unsigned int count = header.nUnits;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!(*this)[i].sanitize (c, base, user_data)))
return_trace (false);
return_trace (true);
}
template <typename T>
inline const Type *bsearch (const T &key) const

@ -27,89 +27,7 @@
#ifndef HB_OT_KERN_TABLE_HH
#define HB_OT_KERN_TABLE_HH
#include "hb-open-type.hh"
#include "hb-ot-shape.hh"
#include "hb-ot-layout-gsubgpos.hh"
#include "hb-aat-layout-common.hh"
template <typename Driver>
struct hb_kern_machine_t
{
hb_kern_machine_t (const Driver &driver_) : driver (driver_) {}
HB_NO_SANITIZE_SIGNED_INTEGER_OVERFLOW
inline void kern (hb_font_t *font,
hb_buffer_t *buffer,
hb_mask_t kern_mask,
bool scale = true) const
{
OT::hb_ot_apply_context_t c (1, font, buffer);
c.set_lookup_mask (kern_mask);
c.set_lookup_props (OT::LookupFlag::IgnoreMarks);
OT::hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c.iter_input;
skippy_iter.init (&c);
bool horizontal = HB_DIRECTION_IS_HORIZONTAL (buffer->props.direction);
unsigned int count = buffer->len;
hb_glyph_info_t *info = buffer->info;
hb_glyph_position_t *pos = buffer->pos;
for (unsigned int idx = 0; idx < count;)
{
if (!(info[idx].mask & kern_mask))
{
idx++;
continue;
}
skippy_iter.reset (idx, 1);
if (!skippy_iter.next ())
{
idx++;
continue;
}
unsigned int i = idx;
unsigned int j = skippy_iter.idx;
hb_position_t kern = driver.get_kerning (info[i].codepoint,
info[j].codepoint);
if (likely (!kern))
goto skip;
if (horizontal)
{
if (scale)
kern = font->em_scale_x (kern);
hb_position_t kern1 = kern >> 1;
hb_position_t kern2 = kern - kern1;
pos[i].x_advance += kern1;
pos[j].x_advance += kern2;
pos[j].x_offset += kern2;
}
else
{
if (scale)
kern = font->em_scale_y (kern);
hb_position_t kern1 = kern >> 1;
hb_position_t kern2 = kern - kern1;
pos[i].y_advance += kern1;
pos[j].y_advance += kern2;
pos[j].y_offset += kern2;
}
buffer->unsafe_to_break (i, j + 1);
skip:
idx = skippy_iter.idx;
}
}
const Driver &driver;
};
#include "hb-aat-layout-kerx-table.hh"
/*
@ -123,293 +41,6 @@ struct hb_kern_machine_t
namespace OT {
struct hb_glyph_pair_t
{
hb_codepoint_t left;
hb_codepoint_t right;
};
struct KernPair
{
inline int get_kerning (void) const
{ return value; }
inline int cmp (const hb_glyph_pair_t &o) const
{
int ret = left.cmp (o.left);
if (ret) return ret;
return right.cmp (o.right);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
protected:
GlyphID left;
GlyphID right;
FWORD value;
public:
DEFINE_SIZE_STATIC (6);
};
template <typename KernSubTableHeader>
struct KernSubTableFormat0
{
inline int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const
{
hb_glyph_pair_t pair = {left, right};
int i = pairs.bsearch (pair);
if (i == -1)
return 0;
return pairs[i].get_kerning ();
}
inline bool apply (AAT::hb_aat_apply_context_t *c) const
{
TRACE_APPLY (this);
if (!c->plan->requested_kerning)
return false;
hb_kern_machine_t<KernSubTableFormat0> machine (*this);
machine.kern (c->font, c->buffer, c->plan->kern_mask);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (pairs.sanitize (c));
}
protected:
KernSubTableHeader header;
BinSearchArrayOf<KernPair> pairs; /* Array of kerning pairs. */
public:
DEFINE_SIZE_ARRAY (KernSubTableHeader::static_size + 8, pairs);
};
template <typename KernSubTableHeader>
struct KernSubTableFormat1
{
typedef void EntryData;
struct driver_context_t
{
static const bool in_place = true;
enum Flags
{
Push = 0x8000, /* If set, push this glyph on the kerning stack. */
DontAdvance = 0x4000, /* If set, don't advance to the next glyph
* before going to the new state. */
Offset = 0x3FFF, /* Byte offset from beginning of subtable to the
* value table for the glyphs on the kerning stack. */
};
inline driver_context_t (const KernSubTableFormat1 *table_,
AAT::hb_aat_apply_context_t *c_) :
c (c_),
table (table_),
/* Apparently the offset kernAction is from the beginning of the state-machine,
* similar to offsets in morx table, NOT from beginning of this table, like
* other subtables in kerx. Discovered via testing. */
kernAction (&table->machine + table->kernAction),
depth (0) {}
inline bool is_actionable (AAT::StateTableDriver<AAT::MortTypes, EntryData> *driver HB_UNUSED,
const AAT::Entry<EntryData> *entry)
{
return entry->flags & Offset;
}
inline bool transition (AAT::StateTableDriver<AAT::MortTypes, EntryData> *driver,
const AAT::Entry<EntryData> *entry)
{
hb_buffer_t *buffer = driver->buffer;
unsigned int flags = entry->flags;
if (flags & Push)
{
if (likely (depth < ARRAY_LENGTH (stack)))
stack[depth++] = buffer->idx;
else
depth = 0; /* Probably not what CoreText does, but better? */
}
if (entry->flags & Offset)
{
unsigned int kernIndex = AAT::MortTypes::offsetToIndex (entry->flags & Offset, &table->machine, kernAction.arrayZ);
const FWORD *actions = &kernAction[kernIndex];
if (!c->sanitizer.check_array (actions, depth))
{
depth = 0;
return false;
}
hb_mask_t kern_mask = c->plan->kern_mask;
for (unsigned int i = 0; i < depth; i++)
{
/* Apparently, when spec says "Each pops one glyph from the kerning stack
* and applies the kerning value to it.", it doesn't mean it in that order.
* The deepest item in the stack corresponds to the first item in the action
* list. Discovered by testing. */
unsigned int idx = stack[i];
int v = *actions++;
if (idx < buffer->len && buffer->info[idx].mask & kern_mask)
{
if (HB_DIRECTION_IS_HORIZONTAL (buffer->props.direction))
{
buffer->pos[idx].x_advance += c->font->em_scale_x (v);
if (HB_DIRECTION_IS_BACKWARD (buffer->props.direction))
buffer->pos[idx].x_offset += c->font->em_scale_x (v);
}
else
{
buffer->pos[idx].y_advance += c->font->em_scale_y (v);
if (HB_DIRECTION_IS_BACKWARD (buffer->props.direction))
buffer->pos[idx].y_offset += c->font->em_scale_y (v);
}
}
}
depth = 0;
}
return true;
}
private:
AAT::hb_aat_apply_context_t *c;
const KernSubTableFormat1 *table;
const UnsizedArrayOf<FWORD> &kernAction;
unsigned int stack[8];
unsigned int depth;
};
inline bool apply (AAT::hb_aat_apply_context_t *c) const
{
TRACE_APPLY (this);
if (!c->plan->requested_kerning)
return false;
driver_context_t dc (this, c);
AAT::StateTableDriver<AAT::MortTypes, EntryData> driver (machine, c->buffer, c->font->face);
driver.drive (&dc);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
/* The rest of array sanitizations are done at run-time. */
return_trace (likely (c->check_struct (this) &&
machine.sanitize (c)));
}
protected:
KernSubTableHeader header;
AAT::StateTable<AAT::MortTypes, EntryData> machine;
OffsetTo<UnsizedArrayOf<FWORD>, HBUINT16, false> kernAction;
public:
DEFINE_SIZE_STATIC (KernSubTableHeader::static_size + 10);
};
struct KernClassTable
{
inline unsigned int get_class (hb_codepoint_t g) const { return classes[g - firstGlyph]; }
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
classes.sanitize (c));
}
protected:
HBUINT16 firstGlyph; /* First glyph in class range. */
ArrayOf<HBUINT16> classes; /* Glyph classes. */
public:
DEFINE_SIZE_ARRAY (4, classes);
};
template <typename KernSubTableHeader>
struct KernSubTableFormat2
{
inline int get_kerning (hb_codepoint_t left, hb_codepoint_t right,
AAT::hb_aat_apply_context_t *c) const
{
/* This subtable is disabled. It's not cleaer to me *exactly* where the offests are
* based from. I *think* they should be based from beginning of kern subtable wrapper,
* *NOT* "this". Since we know of no fonts that use this subtable, we are disabling
* it. Someday fix it and re-enable. */
return 0;
unsigned int l = (this+leftClassTable).get_class (left);
unsigned int r = (this+rightClassTable).get_class (right);
unsigned int offset = l + r;
const FWORD *v = &StructAtOffset<FWORD> (&(this+array), offset);
#if 0
if (unlikely ((const char *) v < (const char *) &array ||
(const char *) v > (const char *) end - 2))
#endif
return 0;
return *v;
}
inline bool apply (AAT::hb_aat_apply_context_t *c) const
{
TRACE_APPLY (this);
if (!c->plan->requested_kerning)
return false;
accelerator_t accel (*this, c);
hb_kern_machine_t<accelerator_t> machine (accel);
machine.kern (c->font, c->buffer, c->plan->kern_mask);
return_trace (true);
}
struct accelerator_t
{
const KernSubTableFormat2 &table;
AAT::hb_aat_apply_context_t *c;
inline accelerator_t (const KernSubTableFormat2 &table_,
AAT::hb_aat_apply_context_t *c_) :
table (table_), c (c_) {}
inline int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const
{ return table.get_kerning (left, right, c); }
};
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (true); /* Disabled. See above. */
return_trace (c->check_struct (this) &&
leftClassTable.sanitize (c, this) &&
rightClassTable.sanitize (c, this) &&
array.sanitize (c, this));
}
protected:
KernSubTableHeader header;
HBUINT16 rowWidth; /* The width, in bytes, of a row in the table. */
OffsetTo<KernClassTable> leftClassTable; /* Offset from beginning of this subtable to
* left-hand class table. */
OffsetTo<KernClassTable> rightClassTable;/* Offset from beginning of this subtable to
* right-hand class table. */
OffsetTo<FWORD> array; /* Offset from beginning of this subtable to
* the start of the kerning array. */
public:
DEFINE_SIZE_MIN (KernSubTableHeader::static_size + 8);
};
template <typename KernSubTableHeader>
struct KernSubTableFormat3
{
@ -435,7 +66,10 @@ struct KernSubTableFormat3
if (!c->plan->requested_kerning)
return false;
hb_kern_machine_t<KernSubTableFormat3> machine (*this);
if (header.coverage & header.Backwards)
return false;
hb_kern_machine_t<KernSubTableFormat3> machine (*this, header.coverage & header.CrossStream);
machine.kern (c->font, c->buffer, c->plan->kern_mask);
return_trace (true);
@ -478,15 +112,6 @@ struct KernSubTable
inline unsigned int get_size (void) const { return u.header.length; }
inline unsigned int get_type (void) const { return u.header.format; }
inline bool is_simple (void) const
{ return !(u.header.coverage & (u.header.CrossStream | u.header.Variation)); }
inline bool is_horizontal (void) const
{ return (u.header.coverage & u.header.Direction) == u.header.DirectionHorizontal; }
inline bool is_override (void) const
{ return bool (u.header.coverage & u.header.Override); }
inline int get_kerning (hb_codepoint_t left, hb_codepoint_t right) const
{
switch (get_type ()) {
@ -503,9 +128,9 @@ struct KernSubTable
TRACE_DISPATCH (this, subtable_type);
switch (subtable_type) {
case 0: return_trace (c->dispatch (u.format0));
case 1: return_trace (c->dispatch (u.format1));
case 1: return_trace (u.header.apple ? c->dispatch (u.format1) : c->default_return_value ());
case 2: return_trace (c->dispatch (u.format2));
case 3: return_trace (c->dispatch (u.format3));
case 3: return_trace (u.header.apple ? c->dispatch (u.format3) : c->default_return_value ());
default: return_trace (c->default_return_value ());
}
}
@ -520,12 +145,12 @@ struct KernSubTable
return_trace (dispatch (c));
}
protected:
public:
union {
KernSubTableHeader header;
KernSubTableFormat0<KernSubTableHeader> format0;
KernSubTableFormat1<KernSubTableHeader> format1;
KernSubTableFormat2<KernSubTableHeader> format2;
AAT::KerxSubTableFormat0<KernSubTableHeader> format0;
AAT::KerxSubTableFormat1<KernSubTableHeader> format1;
AAT::KerxSubTableFormat2<KernSubTableHeader> format2;
KernSubTableFormat3<KernSubTableHeader> format3;
} u;
public:
@ -533,203 +158,152 @@ struct KernSubTable
};
template <typename T>
struct KernTable
struct KernOTSubTableHeader
{
/* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */
inline const T* thiz (void) const { return static_cast<const T *> (this); }
static const bool apple = false;
typedef AAT::ObsoleteTypes Types;
inline int get_h_kerning (hb_codepoint_t left, hb_codepoint_t right) const
{
typedef KernSubTable<typename T::SubTableHeader> SubTable;
int v = 0;
const SubTable *st = CastP<SubTable> (&thiz()->dataZ);
unsigned int count = thiz()->nTables;
for (unsigned int i = 0; i < count; i++)
{
if (!st->is_simple () || !st->is_horizontal ())
continue;
if (st->is_override ())
v = 0;
v += st->get_kerning (left, right);
st = &StructAfter<SubTable> (*st);
}
return v;
}
inline unsigned int tuple_count (void) const { return 0; }
inline bool is_horizontal (void) const { return (coverage & Horizontal); }
inline void apply (AAT::hb_aat_apply_context_t *c) const
enum Coverage
{
typedef KernSubTable<typename T::SubTableHeader> SubTable;
c->set_lookup_index (0);
const SubTable *st = CastP<SubTable> (&thiz()->dataZ);
unsigned int count = thiz()->nTables;
/* If there's an override subtable, skip subtables before that. */
unsigned int last_override = 0;
for (unsigned int i = 0; i < count; i++)
{
if (st->is_simple () && st->is_override ())
last_override = i;
st = &StructAfter<SubTable> (*st);
}
st = CastP<SubTable> (&thiz()->dataZ);
for (unsigned int i = 0; i < count; i++)
{
if (!st->is_simple ())
goto skip;
if (HB_DIRECTION_IS_HORIZONTAL (c->buffer->props.direction) != st->is_horizontal ())
goto skip;
if (i < last_override)
goto skip;
if (!c->buffer->message (c->font, "start kern subtable %d", c->lookup_index))
goto skip;
c->sanitizer.set_object (*st);
st->dispatch (c);
(void) c->buffer->message (c->font, "end kern subtable %d", c->lookup_index);
skip:
st = &StructAfter<SubTable> (*st);
}
}
Horizontal = 0x01u,
Minimum = 0x02u,
CrossStream = 0x04u,
Override = 0x08u,
/* Not supported: */
Backwards = 0x00u,
Variation = 0x00u,
};
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (thiz()) ||
thiz()->version != T::VERSION))
return_trace (false);
typedef KernSubTable<typename T::SubTableHeader> SubTable;
const SubTable *st = CastP<SubTable> (&thiz()->dataZ);
unsigned int count = thiz()->nTables;
for (unsigned int i = 0; i < count; i++)
{
if (unlikely (!st->sanitize (c)))
return_trace (false);
st = &StructAfter<SubTable> (*st);
}
return_trace (true);
return_trace (c->check_struct (this));
}
public:
HBUINT16 versionZ; /* Unused. */
HBUINT16 length; /* Length of the subtable (including this header). */
HBUINT8 format; /* Subtable format. */
HBUINT8 coverage; /* Coverage bits. */
public:
DEFINE_SIZE_STATIC (6);
};
struct KernOT : KernTable<KernOT>
struct KernOT : AAT::KerxTable<KernOT>
{
friend struct KernTable<KernOT>;
friend struct AAT::KerxTable<KernOT>;
static const uint16_t VERSION = 0x0000u;
struct SubTableHeader
{
enum Coverage
{
Direction = 0x01u,
Minimum = 0x02u,
CrossStream = 0x04u,
Override = 0x08u,
Variation = 0x00u, /* Not supported. */
DirectionHorizontal= 0x01u
};
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
static const hb_tag_t tableTag = HB_OT_TAG_kern;
static const uint16_t minVersion = 0;
public:
HBUINT16 versionZ; /* Unused. */
HBUINT16 length; /* Length of the subtable (including this header). */
HBUINT8 format; /* Subtable format. */
HBUINT8 coverage; /* Coverage bits. */
public:
DEFINE_SIZE_STATIC (6);
};
typedef KernOTSubTableHeader SubTableHeader;
typedef SubTableHeader::Types Types;
typedef KernSubTable<SubTableHeader> SubTable;
protected:
HBUINT16 version; /* Version--0x0000u */
HBUINT16 nTables; /* Number of subtables in the kerning table. */
UnsizedArrayOf<HBUINT8> dataZ;
HBUINT16 version; /* Version--0x0000u */
HBUINT16 tableCount; /* Number of subtables in the kerning table. */
SubTable firstSubTable; /* Subtables. */
public:
DEFINE_SIZE_ARRAY (4, dataZ);
DEFINE_SIZE_MIN (4);
};
struct KernAAT : KernTable<KernAAT>
struct KernAATSubTableHeader
{
friend struct KernTable<KernAAT>;
static const bool apple = true;
typedef AAT::ObsoleteTypes Types;
static const uint32_t VERSION = 0x00010000u;
inline unsigned int tuple_count (void) const { return 0; }
inline bool is_horizontal (void) const { return !(coverage & Vertical); }
struct SubTableHeader
enum Coverage
{
enum Coverage
{
Direction = 0x80u,
CrossStream = 0x40u,
Variation = 0x20u,
Vertical = 0x80u,
CrossStream = 0x40u,
Variation = 0x20u,
/* Not supported: */
Backwards = 0x00u,
};
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
Override = 0x00u, /* Not supported. */
public:
HBUINT32 length; /* Length of the subtable (including this header). */
HBUINT8 coverage; /* Coverage bits. */
HBUINT8 format; /* Subtable format. */
HBUINT16 tupleIndex; /* The tuple index (used for variations fonts).
* This value specifies which tuple this subtable covers.
* Note: We don't implement. */
public:
DEFINE_SIZE_STATIC (8);
};
DirectionHorizontal= 0x00u
};
struct KernAAT : AAT::KerxTable<KernAAT>
{
friend struct AAT::KerxTable<KernAAT>;
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
static const hb_tag_t tableTag = HB_OT_TAG_kern;
static const uint32_t minVersion = 0x00010000u;
public:
HBUINT32 length; /* Length of the subtable (including this header). */
HBUINT8 coverage; /* Coverage bits. */
HBUINT8 format; /* Subtable format. */
HBUINT16 tupleIndex; /* The tuple index (used for variations fonts).
* This value specifies which tuple this subtable covers. */
public:
DEFINE_SIZE_STATIC (8);
};
typedef KernAATSubTableHeader SubTableHeader;
typedef SubTableHeader::Types Types;
typedef KernSubTable<SubTableHeader> SubTable;
protected:
HBUINT32 version; /* Version--0x00010000u */
HBUINT32 nTables; /* Number of subtables in the kerning table. */
UnsizedArrayOf<HBUINT8> dataZ;
HBUINT32 version; /* Version--0x00010000u */
HBUINT32 tableCount; /* Number of subtables in the kerning table. */
SubTable firstSubTable; /* Subtables. */
public:
DEFINE_SIZE_ARRAY (8, dataZ);
DEFINE_SIZE_MIN (8);
};
struct kern
{
static const hb_tag_t tableTag = HB_OT_TAG_kern;
inline bool has_data (void) const
{ return u.version32 != 0; }
inline bool has_data (void) const { return u.version32; }
inline unsigned int get_type (void) const { return u.major; }
inline bool has_cross_stream (void) const
{
switch (get_type ()) {
case 0: return u.ot.has_cross_stream ();
case 1: return u.aat.has_cross_stream ();
default:return false;
}
}
inline int get_h_kerning (hb_codepoint_t left, hb_codepoint_t right) const
{
switch (u.major) {
switch (get_type ()) {
case 0: return u.ot.get_h_kerning (left, right);
case 1: return u.aat.get_h_kerning (left, right);
default:return 0;
}
}
inline void apply (AAT::hb_aat_apply_context_t *c) const
inline bool apply (AAT::hb_aat_apply_context_t *c) const
{ return dispatch (c); }
template <typename context_t>
inline typename context_t::return_t dispatch (context_t *c) const
{
/* TODO Switch to dispatch(). */
switch (u.major) {
case 0: u.ot.apply (c); return;
case 1: u.aat.apply (c); return;
default: return;
unsigned int subtable_type = get_type ();
TRACE_DISPATCH (this, subtable_type);
switch (subtable_type) {
case 0: return_trace (c->dispatch (u.ot));
case 1: return_trace (c->dispatch (u.aat));
default: return_trace (c->default_return_value ());
}
}
@ -737,11 +311,7 @@ struct kern
{
TRACE_SANITIZE (this);
if (!u.version32.sanitize (c)) return_trace (false);
switch (u.major) {
case 0: return_trace (u.ot.sanitize (c));
case 1: return_trace (u.aat.sanitize (c));
default:return_trace (true);
}
return_trace (dispatch (c));
}
protected:

@ -103,56 +103,58 @@ struct ValueFormat : HBUINT16
inline unsigned int get_size (void) const
{ return get_len () * Value::static_size; }
void apply_value (hb_ot_apply_context_t *c,
bool apply_value (hb_ot_apply_context_t *c,
const void *base,
const Value *values,
hb_glyph_position_t &glyph_pos) const
{
bool ret = false;
unsigned int format = *this;
if (!format) return;
if (!format) return ret;
hb_font_t *font = c->font;
hb_bool_t horizontal = HB_DIRECTION_IS_HORIZONTAL (c->direction);
bool horizontal = HB_DIRECTION_IS_HORIZONTAL (c->direction);
if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++));
if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++));
if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++, &ret));
if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++, &ret));
if (format & xAdvance) {
if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values));
if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
values++;
}
/* y_advance values grow downward but font-space grows upward, hence negation */
if (format & yAdvance) {
if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values));
if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
values++;
}
if (!has_device ()) return;
if (!has_device ()) return ret;
bool use_x_device = font->x_ppem || font->num_coords;
bool use_y_device = font->y_ppem || font->num_coords;
if (!use_x_device && !use_y_device) return;
if (!use_x_device && !use_y_device) return ret;
const VariationStore &store = c->var_store;
/* pixel -> fractional pixel */
if (format & xPlaDevice) {
if (use_x_device) glyph_pos.x_offset += (base + get_device (values)).get_x_delta (font, store);
if (use_x_device) glyph_pos.x_offset += (base + get_device (values, &ret)).get_x_delta (font, store);
values++;
}
if (format & yPlaDevice) {
if (use_y_device) glyph_pos.y_offset += (base + get_device (values)).get_y_delta (font, store);
if (use_y_device) glyph_pos.y_offset += (base + get_device (values, &ret)).get_y_delta (font, store);
values++;
}
if (format & xAdvDevice) {
if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values)).get_x_delta (font, store);
if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store);
values++;
}
if (format & yAdvDevice) {
/* y_advance values grow downward but font-space grows upward, hence negation */
if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values)).get_y_delta (font, store);
if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store);
values++;
}
return ret;
}
private:
@ -175,11 +177,17 @@ struct ValueFormat : HBUINT16
static inline OffsetTo<Device>& get_device (Value* value)
{ return *CastP<OffsetTo<Device> > (value); }
static inline const OffsetTo<Device>& get_device (const Value* value)
{ return *CastP<OffsetTo<Device> > (value); }
static inline const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
{
if (worked) *worked |= *value;
return *CastP<OffsetTo<Device> > (value);
}
static inline const HBINT16& get_short (const Value* value)
{ return *CastP<HBINT16> (value); }
static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
{
if (worked) *worked |= *value;
return *CastP<HBINT16> (value);
}
public:
@ -263,10 +271,10 @@ struct AnchorFormat2
unsigned int x_ppem = font->x_ppem;
unsigned int y_ppem = font->y_ppem;
hb_position_t cx = 0, cy = 0;
hb_bool_t ret;
bool ret;
ret = (x_ppem || y_ppem) &&
font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
*x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
*y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
}
@ -672,9 +680,10 @@ struct PairSet
min = mid + 1;
else
{
buffer->unsafe_to_break (buffer->idx, pos + 1);
valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos());
valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]);
/* Note the intentional use of "|" instead of short-circuit "||". */
if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) |
valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]))
buffer->unsafe_to_break (buffer->idx, pos + 1);
if (len2)
pos++;
buffer->idx = pos;
@ -837,10 +846,11 @@ struct PairPosFormat2
unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
valueFormat1.apply_value (c, this, v, buffer->cur_pos());
valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]);
/* Note the intentional use of "|" instead of short-circuit "||". */
if (valueFormat1.apply_value (c, this, v, buffer->cur_pos()) |
valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]))
buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
buffer->idx = skippy_iter.idx;
if (len2)

@ -57,12 +57,18 @@
* kern
*/
hb_bool_t
bool
hb_ot_layout_has_kerning (hb_face_t *face)
{
return face->table.kern->has_data ();
}
bool
hb_ot_layout_has_cross_kerning (hb_face_t *face)
{
return face->table.kern->has_cross_stream ();
}
void
hb_ot_layout_kern (hb_ot_shape_plan_t *plan,
hb_font_t *font,
@ -417,7 +423,7 @@ hb_ot_layout_table_get_feature_tags (hb_face_t *face,
return g.get_feature_tags (start_offset, feature_count, feature_tags);
}
hb_bool_t
bool
hb_ot_layout_table_find_feature (hb_face_t *face,
hb_tag_t table_tag,
hb_tag_t feature_tag,
@ -927,12 +933,12 @@ hb_ot_layout_lookup_would_substitute (hb_face_t *face,
zero_context);
}
hb_bool_t
bool
hb_ot_layout_lookup_would_substitute_fast (hb_face_t *face,
unsigned int lookup_index,
const hb_codepoint_t *glyphs,
unsigned int glyphs_length,
hb_bool_t zero_context)
bool zero_context)
{
if (unlikely (lookup_index >= face->table.GSUB->lookup_count)) return false;
OT::hb_would_apply_context_t c (face, glyphs, glyphs_length, (bool) zero_context);
@ -949,6 +955,56 @@ hb_ot_layout_substitute_start (hb_font_t *font,
_hb_ot_layout_set_glyph_props (font, buffer);
}
void
hb_ot_layout_delete_glyphs_inplace (hb_buffer_t *buffer,
bool (*filter) (const hb_glyph_info_t *info))
{
/* Merge clusters and delete filtered glyphs.
* NOTE! We can't use out-buffer as we have positioning data. */
unsigned int j = 0;
unsigned int count = buffer->len;
hb_glyph_info_t *info = buffer->info;
hb_glyph_position_t *pos = buffer->pos;
for (unsigned int i = 0; i < count; i++)
{
if (filter (&info[i]))
{
/* Merge clusters.
* Same logic as buffer->delete_glyph(), but for in-place removal. */
unsigned int cluster = info[i].cluster;
if (i + 1 < count && cluster == info[i + 1].cluster)
continue; /* Cluster survives; do nothing. */
if (j)
{
/* Merge cluster backward. */
if (cluster < info[j - 1].cluster)
{
unsigned int mask = info[i].mask;
unsigned int old_cluster = info[j - 1].cluster;
for (unsigned k = j; k && info[k - 1].cluster == old_cluster; k--)
buffer->set_cluster (info[k - 1], cluster, mask);
}
continue;
}
if (i + 1 < count)
buffer->merge_clusters (i, i + 2); /* Merge cluster forward. */
continue;
}
if (j != i)
{
info[j] = info[i];
pos[j] = pos[i];
}
j++;
}
buffer->len = j;
}
/**
* hb_ot_layout_lookup_substitute_closure:
*
@ -1275,10 +1331,8 @@ apply_backward (OT::hb_ot_apply_context_t *c,
if (accel.may_have (buffer->cur().codepoint) &&
(buffer->cur().mask & c->lookup_mask) &&
c->check_glyph_property (&buffer->cur(), c->lookup_props))
{
if (accel.apply (c))
ret = true;
}
ret |= accel.apply (c);
/* The reverse lookup doesn't "advance" cursor (for good reason). */
buffer->idx--;

@ -45,9 +45,12 @@ struct hb_ot_shape_plan_t;
* kern
*/
HB_INTERNAL hb_bool_t
HB_INTERNAL bool
hb_ot_layout_has_kerning (hb_face_t *face);
HB_INTERNAL bool
hb_ot_layout_has_cross_kerning (hb_face_t *face);
HB_INTERNAL void
hb_ot_layout_kern (hb_ot_shape_plan_t *plan,
hb_font_t *font,
@ -56,7 +59,7 @@ hb_ot_layout_kern (hb_ot_shape_plan_t *plan,
/* Private API corresponding to hb-ot-layout.h: */
HB_INTERNAL hb_bool_t
HB_INTERNAL bool
hb_ot_layout_table_find_feature (hb_face_t *face,
hb_tag_t table_tag,
hb_tag_t feature_tag,
@ -90,12 +93,12 @@ HB_MARK_AS_FLAG_T (hb_ot_layout_glyph_props_flags_t);
* GSUB/GPOS
*/
HB_INTERNAL hb_bool_t
HB_INTERNAL bool
hb_ot_layout_lookup_would_substitute_fast (hb_face_t *face,
unsigned int lookup_index,
const hb_codepoint_t *glyphs,
unsigned int glyphs_length,
hb_bool_t zero_context);
bool zero_context);
/* Should be called before all the substitute_lookup's are done. */
@ -103,6 +106,9 @@ HB_INTERNAL void
hb_ot_layout_substitute_start (hb_font_t *font,
hb_buffer_t *buffer);
HB_INTERNAL void
hb_ot_layout_delete_glyphs_inplace (hb_buffer_t *buffer,
bool (*filter) (const hb_glyph_info_t *info));
namespace OT {
struct hb_ot_apply_context_t;
@ -303,13 +309,13 @@ _hb_glyph_info_get_unicode_space_fallback_type (const hb_glyph_info_t *info)
static inline bool _hb_glyph_info_ligated (const hb_glyph_info_t *info);
static inline hb_bool_t
static inline bool
_hb_glyph_info_is_default_ignorable (const hb_glyph_info_t *info)
{
return (info->unicode_props() & UPROPS_MASK_IGNORABLE) &&
!_hb_glyph_info_ligated (info);
}
static inline hb_bool_t
static inline bool
_hb_glyph_info_is_default_ignorable_and_not_hidden (const hb_glyph_info_t *info)
{
return ((info->unicode_props() & (UPROPS_MASK_IGNORABLE|UPROPS_MASK_HIDDEN))
@ -363,17 +369,17 @@ _hb_glyph_info_is_unicode_format (const hb_glyph_info_t *info)
return _hb_glyph_info_get_general_category (info) ==
HB_UNICODE_GENERAL_CATEGORY_FORMAT;
}
static inline hb_bool_t
static inline bool
_hb_glyph_info_is_zwnj (const hb_glyph_info_t *info)
{
return _hb_glyph_info_is_unicode_format (info) && (info->unicode_props() & UPROPS_MASK_Cf_ZWNJ);
}
static inline hb_bool_t
static inline bool
_hb_glyph_info_is_zwj (const hb_glyph_info_t *info)
{
return _hb_glyph_info_is_unicode_format (info) && (info->unicode_props() & UPROPS_MASK_Cf_ZWJ);
}
static inline hb_bool_t
static inline bool
_hb_glyph_info_is_joiner (const hb_glyph_info_t *info)
{
return _hb_glyph_info_is_unicode_format (info) && (info->unicode_props() & (UPROPS_MASK_Cf_ZWNJ|UPROPS_MASK_Cf_ZWJ));

@ -25,7 +25,7 @@
*/
#include "hb-ot-shape-fallback.hh"
#include "hb-ot-kern-table.hh"
#include "hb-kern.hh"
static unsigned int
recategorize_combining_class (hb_codepoint_t u,
@ -464,9 +464,18 @@ _hb_ot_shape_fallback_kern (const hb_ot_shape_plan_t *plan,
!font->has_glyph_h_kerning_func () :
!font->has_glyph_v_kerning_func ())
return;
bool reverse = HB_DIRECTION_IS_BACKWARD (buffer->props.direction);
if (reverse)
buffer->reverse ();
hb_ot_shape_fallback_kern_driver_t driver (font, buffer);
hb_kern_machine_t<hb_ot_shape_fallback_kern_driver_t> machine (driver);
OT::hb_kern_machine_t<hb_ot_shape_fallback_kern_driver_t> machine (driver);
machine.kern (font, buffer, plan->kern_mask, false);
if (reverse)
buffer->reverse ();
}

@ -124,20 +124,19 @@ hb_ot_shape_planner_t::compile (hb_ot_shape_plan_t &plan,
else if (hb_aat_layout_has_positioning (face))
plan.apply_kerx = true;
if (plan.requested_kerning && !plan.apply_kerx && !has_gpos_kern)
if (!plan.apply_kerx && !has_gpos_kern)
{
/* Apparently Apple applies kerx if GPOS kern was not applied. */
if (hb_aat_layout_has_positioning (face))
plan.apply_kerx = true;
if (hb_ot_layout_has_kerning (face))
else if (hb_ot_layout_has_kerning (face))
plan.apply_kern = true;
else
plan.fallback_kerning = true;
}
bool has_kern_mark = plan.apply_kern && hb_ot_layout_has_cross_kerning (face);
plan.zero_marks = !plan.apply_kerx && !has_kern_mark;
plan.has_gpos_mark = !!plan.map.get_1_mask (HB_TAG ('m','a','r','k'));
if (!plan.apply_gpos && !plan.apply_kerx)
plan.fallback_mark_positioning = true;
plan.fallback_mark_positioning = !plan.apply_gpos && !plan.apply_kerx && !has_kern_mark;
/* Currently we always apply trak. */
plan.apply_trak = plan.requested_tracking && hb_aat_layout_has_tracking (face);
@ -477,7 +476,9 @@ hb_ensure_native_direction (hb_buffer_t *buffer)
}
/* Substitute */
/*
* Substitute
*/
static inline void
hb_ot_mirror_chars (const hb_ot_shape_context_t *c)
@ -583,10 +584,8 @@ hb_ot_shape_setup_masks (const hb_ot_shape_context_t *c)
}
static void
hb_ot_zero_width_default_ignorables (const hb_ot_shape_context_t *c)
hb_ot_zero_width_default_ignorables (const hb_buffer_t *buffer)
{
hb_buffer_t *buffer = c->buffer;
if (!(buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_DEFAULT_IGNORABLES) ||
(buffer->flags & HB_BUFFER_FLAG_PRESERVE_DEFAULT_IGNORABLES) ||
(buffer->flags & HB_BUFFER_FLAG_REMOVE_DEFAULT_IGNORABLES))
@ -602,21 +601,19 @@ hb_ot_zero_width_default_ignorables (const hb_ot_shape_context_t *c)
}
static void
hb_ot_hide_default_ignorables (const hb_ot_shape_context_t *c)
hb_ot_hide_default_ignorables (hb_buffer_t *buffer,
hb_font_t *font)
{
hb_buffer_t *buffer = c->buffer;
if (!(buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_DEFAULT_IGNORABLES) ||
(buffer->flags & HB_BUFFER_FLAG_PRESERVE_DEFAULT_IGNORABLES))
return;
unsigned int count = buffer->len;
hb_glyph_info_t *info = buffer->info;
hb_glyph_position_t *pos = buffer->pos;
hb_codepoint_t invisible = c->buffer->invisible;
hb_codepoint_t invisible = buffer->invisible;
if (!(buffer->flags & HB_BUFFER_FLAG_REMOVE_DEFAULT_IGNORABLES) &&
(invisible || c->font->get_nominal_glyph (' ', &invisible)))
(invisible || font->get_nominal_glyph (' ', &invisible)))
{
/* Replace default-ignorables with a zero-advance invisible glyph. */
for (unsigned int i = 0; i < count; i++)
@ -626,49 +623,7 @@ hb_ot_hide_default_ignorables (const hb_ot_shape_context_t *c)
}
}
else
{
/* Merge clusters and delete default-ignorables.
* NOTE! We can't use out-buffer as we have positioning data. */
unsigned int j = 0;
for (unsigned int i = 0; i < count; i++)
{
if (_hb_glyph_info_is_default_ignorable (&info[i]))
{
/* Merge clusters.
* Same logic as buffer->delete_glyph(), but for in-place removal. */
unsigned int cluster = info[i].cluster;
if (i + 1 < count && cluster == info[i + 1].cluster)
continue; /* Cluster survives; do nothing. */
if (j)
{
/* Merge cluster backward. */
if (cluster < info[j - 1].cluster)
{
unsigned int mask = info[i].mask;
unsigned int old_cluster = info[j - 1].cluster;
for (unsigned k = j; k && info[k - 1].cluster == old_cluster; k--)
buffer->set_cluster (info[k - 1], cluster, mask);
}
continue;
}
if (i + 1 < count)
buffer->merge_clusters (i, i + 2); /* Merge cluster forward. */
continue;
}
if (j != i)
{
info[j] = info[i];
pos[j] = pos[i];
}
j++;
}
buffer->len = j;
}
hb_ot_layout_delete_glyphs_inplace (buffer, _hb_glyph_info_is_default_ignorable);
}
@ -685,10 +640,10 @@ hb_ot_map_glyphs_fast (hb_buffer_t *buffer)
}
static inline void
hb_synthesize_glyph_classes (const hb_ot_shape_context_t *c)
hb_synthesize_glyph_classes (hb_buffer_t *buffer)
{
unsigned int count = c->buffer->len;
hb_glyph_info_t *info = c->buffer->info;
unsigned int count = buffer->len;
hb_glyph_info_t *info = buffer->info;
for (unsigned int i = 0; i < count; i++)
{
hb_ot_layout_glyph_props_flags_t klass;
@ -740,7 +695,7 @@ hb_ot_substitute_complex (const hb_ot_shape_context_t *c)
hb_ot_layout_substitute_start (c->font, buffer);
if (c->plan->fallback_glyph_classes)
hb_synthesize_glyph_classes (c);
hb_synthesize_glyph_classes (c->buffer);
if (unlikely (c->plan->apply_morx))
hb_aat_layout_substitute (c->plan, c->font, c->buffer);
@ -749,7 +704,7 @@ hb_ot_substitute_complex (const hb_ot_shape_context_t *c)
}
static inline void
hb_ot_substitute (const hb_ot_shape_context_t *c)
hb_ot_substitute_pre (const hb_ot_shape_context_t *c)
{
hb_ot_substitute_default (c);
@ -758,7 +713,21 @@ hb_ot_substitute (const hb_ot_shape_context_t *c)
hb_ot_substitute_complex (c);
}
/* Position */
static inline void
hb_ot_substitute_post (const hb_ot_shape_context_t *c)
{
hb_ot_hide_default_ignorables (c->buffer, c->font);
if (c->plan->apply_morx)
hb_aat_layout_remove_deleted_glyphs (c->buffer);
if (c->plan->shaper->postprocess_glyphs)
c->plan->shaper->postprocess_glyphs (c->plan, c->buffer, c->font);
}
/*
* Position
*/
static inline void
adjust_mark_offsets (hb_glyph_position_t *pos)
@ -853,7 +822,7 @@ hb_ot_position_complex (const hb_ot_shape_context_t *c)
hb_ot_layout_position_start (c->font, c->buffer);
if (!c->plan->apply_kerx)
if (c->plan->zero_marks)
switch (c->plan->shaper->zero_width_marks)
{
case HB_OT_SHAPE_ZERO_WIDTH_MARKS_BY_GDEF_EARLY:
@ -870,11 +839,15 @@ hb_ot_position_complex (const hb_ot_shape_context_t *c)
c->plan->position (c->font, c->buffer);
else if (c->plan->apply_kerx)
hb_aat_layout_position (c->plan, c->font, c->buffer);
else if (c->plan->apply_kern)
hb_ot_layout_kern (c->plan, c->font, c->buffer);
else
_hb_ot_shape_fallback_kern (c->plan, c->font, c->buffer);
if (c->plan->apply_trak)
hb_aat_layout_track (c->plan, c->font, c->buffer);
if (!c->plan->apply_kerx)
if (c->plan->zero_marks)
switch (c->plan->shaper->zero_width_marks)
{
case HB_OT_SHAPE_ZERO_WIDTH_MARKS_BY_GDEF_LATE:
@ -887,9 +860,11 @@ hb_ot_position_complex (const hb_ot_shape_context_t *c)
break;
}
/* Finishing off GPOS has to follow a certain order. */
/* Finish off. Has to follow a certain order. */
hb_ot_layout_position_finish_advances (c->font, c->buffer);
hb_ot_zero_width_default_ignorables (c);
hb_ot_zero_width_default_ignorables (c->buffer);
if (c->plan->apply_morx)
hb_aat_layout_zero_width_deleted_glyphs (c->buffer);
hb_ot_layout_position_finish_offsets (c->font, c->buffer);
/* The nil glyph_h_origin() func returns 0, so no need to apply it. */
@ -898,6 +873,9 @@ hb_ot_position_complex (const hb_ot_shape_context_t *c)
c->font->subtract_glyph_h_origin (info[i].codepoint,
&pos[i].x_offset,
&pos[i].y_offset);
if (c->plan->fallback_mark_positioning && c->plan->shaper->fallback_position)
_hb_ot_shape_fallback_mark_position (c->plan, c->font, c->buffer);
}
static inline void
@ -909,19 +887,9 @@ hb_ot_position (const hb_ot_shape_context_t *c)
hb_ot_position_complex (c);
if (c->plan->fallback_mark_positioning && c->plan->shaper->fallback_position)
_hb_ot_shape_fallback_mark_position (c->plan, c->font, c->buffer);
if (HB_DIRECTION_IS_BACKWARD (c->buffer->props.direction))
hb_buffer_reverse (c->buffer);
/* Visual fallback goes here. */
if (c->plan->apply_kern)
hb_ot_layout_kern (c->plan, c->font, c->buffer);
else if (c->plan->fallback_kerning)
_hb_ot_shape_fallback_kern (c->plan, c->font, c->buffer);
_hb_buffer_deallocate_gsubgpos_vars (c->buffer);
}
@ -987,13 +955,9 @@ hb_ot_shape_internal (hb_ot_shape_context_t *c)
if (c->plan->shaper->preprocess_text)
c->plan->shaper->preprocess_text (c->plan, c->buffer, c->font);
hb_ot_substitute (c);
hb_ot_substitute_pre (c);
hb_ot_position (c);
hb_ot_hide_default_ignorables (c);
if (c->plan->shaper->postprocess_glyphs)
c->plan->shaper->postprocess_glyphs (c->plan, c->buffer, c->font);
hb_ot_substitute_post (c);
hb_propagate_flags (c->buffer);

@ -51,8 +51,8 @@ struct hb_ot_shape_plan_t
bool requested_tracking : 1;
bool has_frac : 1;
bool has_gpos_mark : 1;
bool zero_marks : 1;
bool fallback_glyph_classes : 1;
bool fallback_kerning : 1;
bool fallback_mark_positioning : 1;
bool apply_gpos : 1;

@ -25,18 +25,20 @@ extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size)
hb_buffer_destroy (buffer);
}
uint32_t text32[16];
if (size > sizeof (text32)) {
memcpy(text32, data + size - sizeof (text32), sizeof (text32));
hb_buffer_t *buffer = hb_buffer_create ();
hb_buffer_add_utf32 (buffer, text32, sizeof (text32) / sizeof (text32[0]), 0, -1);
hb_buffer_guess_segment_properties (buffer);
hb_shape (font, buffer, NULL, 0);
hb_buffer_destroy (buffer);
uint32_t text32[16] = {0};
unsigned int len = sizeof (text32);
if (size < len)
len = size;
memcpy(text32, data + size - len, len);
/* Misc calls on face. */
test_face (face, text32[15]);
}
hb_buffer_t *buffer = hb_buffer_create ();
hb_buffer_add_utf32 (buffer, text32, sizeof (text32) / sizeof (text32[0]), 0, -1);
hb_buffer_guess_segment_properties (buffer);
hb_shape (font, buffer, NULL, 0);
hb_buffer_destroy (buffer);
/* Misc calls on face. */
test_face (face, text32[15]);
hb_font_destroy (font);
hb_face_destroy (face);

Loading…
Cancel
Save