Port some VAR arrays to UnsizedArrayOf<>

Fix avar sanitize().
pull/803/merge
Behdad Esfahbod 6 years ago
parent 1bc7a8d6c4
commit bc485a9812
  1. 59
      src/hb-ot-layout-gsubgpos.hh
  2. 10
      src/hb-ot-var-avar-table.hh
  3. 7
      src/hb-ot-var-fvar-table.hh
  4. 9
      src/hb-ot-var-hvar-table.hh
  5. 9
      src/hb-ot-var-mvar-table.hh

@ -1222,42 +1222,42 @@ struct Rule
inline bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
{
return context_intersects (glyphs,
inputCount, inputZ,
inputCount, inputZ.arrayZ,
lookup_context);
}
inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
{
TRACE_CLOSURE (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAtOffset<UnsizedArrayOf<LookupRecord> > (inputZ.arrayZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
context_closure_lookup (c,
inputCount, inputZ,
lookupCount, lookupRecord,
inputCount, inputZ.arrayZ,
lookupCount, lookupRecord.arrayZ,
lookup_context);
}
inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
{
TRACE_COLLECT_GLYPHS (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAtOffset<UnsizedArrayOf<LookupRecord> > (inputZ.arrayZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
context_collect_glyphs_lookup (c,
inputCount, inputZ,
lookupCount, lookupRecord,
inputCount, inputZ.arrayZ,
lookupCount, lookupRecord.arrayZ,
lookup_context);
}
inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
{
TRACE_WOULD_APPLY (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAtOffset<UnsizedArrayOf<LookupRecord> > (inputZ.arrayZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
return_trace (context_would_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
}
inline bool apply (hb_ot_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAtOffset<UnsizedArrayOf<LookupRecord> > (inputZ.arrayZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
}
public:
@ -1266,7 +1266,7 @@ struct Rule
TRACE_SANITIZE (this);
return_trace (inputCount.sanitize (c) &&
lookupCount.sanitize (c) &&
c->check_range (inputZ,
c->check_range (inputZ.arrayZ,
inputZ[0].static_size * (inputCount ? inputCount - 1 : 0) +
LookupRecord::static_size * lookupCount));
}
@ -1276,9 +1276,11 @@ struct Rule
* glyph sequence--includes the first
* glyph */
HBUINT16 lookupCount; /* Number of LookupRecords */
HBUINT16 inputZ[VAR]; /* Array of match inputs--start with
UnsizedArrayOf<HBUINT16>
inputZ; /* Array of match inputs--start with
* second glyph */
/*LookupRecord lookupRecordX[VAR];*/ /* Array of LookupRecords--in
/*UnsizedArrayOf<LookupRecord>
lookupRecordX;*/ /* Array of LookupRecords--in
* design order */
public:
DEFINE_SIZE_ARRAY (4, inputZ);
@ -1595,7 +1597,7 @@ struct ContextFormat3
this
};
return context_intersects (glyphs,
glyphCount, (const HBUINT16 *) (coverageZ + 1),
glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
lookup_context);
}
@ -1605,13 +1607,13 @@ struct ContextFormat3
if (!(this+coverageZ[0]).intersects (c->glyphs))
return;
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ.arrayZ, coverageZ[0].static_size * glyphCount);
struct ContextClosureLookupContext lookup_context = {
{intersects_coverage},
this
};
context_closure_lookup (c,
glyphCount, (const HBUINT16 *) (coverageZ + 1),
glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
lookupCount, lookupRecord,
lookup_context);
}
@ -1621,14 +1623,14 @@ struct ContextFormat3
TRACE_COLLECT_GLYPHS (this);
(this+coverageZ[0]).add_coverage (c->input);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ.arrayZ, coverageZ[0].static_size * glyphCount);
struct ContextCollectGlyphsLookupContext lookup_context = {
{collect_coverage},
this
};
context_collect_glyphs_lookup (c,
glyphCount, (const HBUINT16 *) (coverageZ + 1),
glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
lookupCount, lookupRecord,
lookup_context);
}
@ -1637,12 +1639,12 @@ struct ContextFormat3
{
TRACE_WOULD_APPLY (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ.arrayZ, coverageZ[0].static_size * glyphCount);
struct ContextApplyLookupContext lookup_context = {
{match_coverage},
this
};
return_trace (context_would_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
return_trace (context_would_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
}
inline const Coverage &get_coverage (void) const
@ -1654,12 +1656,12 @@ struct ContextFormat3
unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ.arrayZ, coverageZ[0].static_size * glyphCount);
struct ContextApplyLookupContext lookup_context = {
{match_coverage},
this
};
return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
}
inline bool subset (hb_subset_context_t *c) const
@ -1675,10 +1677,10 @@ struct ContextFormat3
if (!c->check_struct (this)) return_trace (false);
unsigned int count = glyphCount;
if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false);
if (!c->check_array (coverageZ.arrayZ, coverageZ[0].static_size, count)) return_trace (false);
for (unsigned int i = 0; i < count; i++)
if (!coverageZ[i].sanitize (c, this)) return_trace (false);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ.arrayZ, coverageZ[0].static_size * count);
return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
}
@ -1687,10 +1689,11 @@ struct ContextFormat3
HBUINT16 glyphCount; /* Number of glyphs in the input glyph
* sequence */
HBUINT16 lookupCount; /* Number of LookupRecords */
OffsetTo<Coverage>
coverageZ[VAR]; /* Array of offsets to Coverage
UnsizedArrayOf<OffsetTo<Coverage> >
coverageZ; /* Array of offsets to Coverage
* table in glyph sequence order */
/*LookupRecord lookupRecordX[VAR];*/ /* Array of LookupRecords--in
/*UnsizedArrayOf<LookupRecord>
lookupRecordX;*/ /* Array of LookupRecords--in
* design order */
public:
DEFINE_SIZE_ARRAY (6, coverageZ);

@ -105,10 +105,11 @@ struct avar
TRACE_SANITIZE (this);
if (unlikely (!(version.sanitize (c) &&
version.major == 1 &&
c->check_struct (this))))
c->check_struct (this),
c->check_array(axisSegmentMapsZ.arrayZ, sizeof (axisSegmentMapsZ[0]), axisCount))))
return_trace (false);
const SegmentMaps *map = axisSegmentMapsZ;
const SegmentMaps *map = axisSegmentMapsZ.arrayZ;
unsigned int count = axisCount;
for (unsigned int i = 0; i < count; i++)
{
@ -124,7 +125,7 @@ struct avar
{
unsigned int count = MIN<unsigned int> (coords_length, axisCount);
const SegmentMaps *map = axisSegmentMapsZ;
const SegmentMaps *map = axisSegmentMapsZ.arrayZ;
for (unsigned int i = 0; i < count; i++)
{
coords[i] = map->map (coords[i]);
@ -139,7 +140,8 @@ struct avar
HBUINT16 axisCount; /* The number of variation axes in the font. This
* must be the same number as axisCount in the
* 'fvar' table. */
SegmentMaps axisSegmentMapsZ[VAR];
UnsizedArrayOf<SegmentMaps>
axisSegmentMapsZ;
public:
DEFINE_SIZE_MIN (8);

@ -46,20 +46,21 @@ struct InstanceRecord
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
c->check_array (coordinates, coordinates[0].static_size, axis_count));
c->check_array (coordinatesZ.arrayZ, coordinatesZ[0].static_size, axis_count));
}
protected:
NameID subfamilyNameID;/* The name ID for entries in the 'name' table
* that provide subfamily names for this instance. */
HBUINT16 reserved; /* Reserved for future use — set to 0. */
Fixed coordinates[VAR];/* The coordinates array for this instance. */
UnsizedArrayOf<Fixed>
coordinatesZ; /* The coordinates array for this instance. */
//NameID postScriptNameIDX;/*Optional. The name ID for entries in the 'name'
// * table that provide PostScript names for this
// * instance. */
public:
DEFINE_SIZE_ARRAY (4, coordinates);
DEFINE_SIZE_ARRAY (4, coordinatesZ);
};
struct AxisRecord

@ -39,7 +39,7 @@ struct DeltaSetIndexMap
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
c->check_array (mapData, get_width (), mapCount));
c->check_array (mapDataZ.arrayZ, get_width (), mapCount));
}
unsigned int map (unsigned int v) const /* Returns 16.16 outer.inner. */
@ -55,7 +55,7 @@ struct DeltaSetIndexMap
unsigned int u = 0;
{ /* Fetch it. */
unsigned int w = get_width ();
const HBUINT8 *p = mapData + w * v;
const HBUINT8 *p = mapDataZ.arrayZ + w * v;
for (; w; w--)
u = (u << 8) + *p++;
}
@ -81,10 +81,11 @@ struct DeltaSetIndexMap
HBUINT16 format; /* A packed field that describes the compressed
* representation of delta-set indices. */
HBUINT16 mapCount; /* The number of mapping entries. */
HBUINT8 mapData[VAR]; /* The delta-set index mapping data. */
UnsizedArrayOf<HBUINT8>
mapDataZ; /* The delta-set index mapping data. */
public:
DEFINE_SIZE_ARRAY (4, mapData);
DEFINE_SIZE_ARRAY (4, mapDataZ);
};

@ -68,14 +68,14 @@ struct MVAR
c->check_struct (this) &&
valueRecordSize >= VariationValueRecord::static_size &&
varStore.sanitize (c, this) &&
c->check_array (values, valueRecordSize, valueRecordCount));
c->check_array (valuesZ.arrayZ, valueRecordSize, valueRecordCount));
}
inline float get_var (hb_tag_t tag,
int *coords, unsigned int coord_count) const
{
const VariationValueRecord *record;
record = (VariationValueRecord *) bsearch (&tag, values,
record = (VariationValueRecord *) bsearch (&tag, valuesZ.arrayZ,
valueRecordCount, valueRecordSize,
tag_compare);
if (!record)
@ -101,11 +101,12 @@ protected:
HBUINT16 valueRecordCount;/* The number of value records — may be zero. */
OffsetTo<VariationStore>
varStore; /* Offset to item variation store table. */
HBUINT8 values[VAR]; /* Array of value records. The records must be
UnsizedArrayOf<HBUINT8>
valuesZ; /* Array of value records. The records must be
* in binary order of their valueTag field. */
public:
DEFINE_SIZE_ARRAY (12, values);
DEFINE_SIZE_ARRAY (12, valuesZ);
};
} /* namespace OT */

Loading…
Cancel
Save