[sanitize] More hb_barrier() annotations

pull/4480/head
Behdad Esfahbod 1 year ago
parent 30672c7e75
commit 3a9262cc3d
  1. 10
      src/OT/Color/CPAL/CPAL.hh
  2. 1
      src/OT/Color/sbix/sbix.hh
  3. 1
      src/OT/Color/svg/svg.hh
  4. 1
      src/OT/Layout/Common/Coverage.hh
  5. 4
      src/OT/Layout/GDEF/GDEF.hh
  6. 1
      src/OT/Layout/GPOS/Anchor.hh
  7. 2
      src/OT/Layout/GPOS/AnchorMatrix.hh
  8. 1
      src/OT/Layout/GPOS/MarkMarkPosFormat1.hh
  9. 1
      src/OT/Layout/GPOS/PairPosFormat1.hh
  10. 6
      src/OT/Layout/GPOS/PairSet.hh
  11. 1
      src/OT/Layout/GPOS/SinglePosFormat1.hh
  12. 1
      src/OT/Layout/GPOS/ValueFormat.hh
  13. 2
      src/OT/Layout/GSUB/ReverseChainSingleSubstFormat1.hh
  14. 5
      src/OT/name/name.hh
  15. 3
      src/graph/classdef-graph.hh
  16. 3
      src/graph/coverage-graph.hh
  17. 4
      src/graph/gsubgpos-graph.hh
  18. 3
      src/graph/markbasepos-graph.hh
  19. 3
      src/graph/pairpos-graph.hh
  20. 1
      src/hb-aat-layout-ankr-table.hh
  21. 1
      src/hb-aat-layout-bsln-table.hh
  22. 6
      src/hb-aat-layout-common.hh
  23. 2
      src/hb-aat-layout-feat-table.hh
  24. 3
      src/hb-aat-layout-just-table.hh
  25. 17
      src/hb-aat-layout-kerx-table.hh
  26. 26
      src/hb-aat-layout-morx-table.hh
  27. 1
      src/hb-aat-layout-opbd-table.hh
  28. 2
      src/hb-aat-layout-trak-table.hh
  29. 5
      src/hb-aat-ltag-table.hh
  30. 6
      src/hb-open-file.hh
  31. 14
      src/hb-open-type.hh
  32. 19
      src/hb-ot-cff-common.hh
  33. 23
      src/hb-ot-cff1-table.hh
  34. 12
      src/hb-ot-cff2-table.hh

@ -214,13 +214,17 @@ struct CPAL
hb_set_t *nameids_to_retain /* OUT */) const
{
if (version == 1)
{
hb_barrier ();
v1 ().collect_name_ids (this, numPalettes, numColors, color_index_map, nameids_to_retain);
}
}
private:
const CPALV1Tail& v1 () const
{
if (version == 0) return Null (CPALV1Tail);
hb_barrier ();
return StructAfter<CPALV1Tail> (*this);
}
@ -312,7 +316,10 @@ struct CPAL
return_trace (false);
if (version == 1)
{
hb_barrier ();
return_trace (v1 ().serialize (c->serializer, numPalettes, numColors, this, color_index_map));
}
return_trace (true);
}
@ -323,7 +330,8 @@ struct CPAL
return_trace (c->check_struct (this) &&
(this+colorRecordsZ).sanitize (c, numColorRecords) &&
colorRecordIndicesZ.sanitize (c, numPalettes) &&
(version == 0 || v1 ().sanitize (c, this, numPalettes, numColors)));
(version == 0 ||
(hb_barrier () && v1 ().sanitize (c, this, numPalettes, numColors))));
}
protected:

@ -368,6 +368,7 @@ struct sbix
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version >= 1 &&
strikes.sanitize (c, this)));
}

@ -56,6 +56,7 @@ struct SVGDocumentIndexEntry
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
svgDoc.sanitize (c, base, svgDocLength));
}

@ -64,6 +64,7 @@ struct Coverage
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format)
{
case 1: return_trace (u.format1.sanitize (c));

@ -291,6 +291,7 @@ struct CaretValue
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
@ -556,6 +557,7 @@ struct MarkGlyphSets
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
default:return_trace (true);
@ -630,6 +632,7 @@ struct GDEFVersion1_2
attachList.sanitize (c, this) &&
ligCaretList.sanitize (c, this) &&
markAttachClassDef.sanitize (c, this) &&
hb_barrier () &&
(version.to_int () < 0x00010002u || markGlyphSetsDef.sanitize (c, this)) &&
(version.to_int () < 0x00010003u || varStore.sanitize (c, this)));
}
@ -750,6 +753,7 @@ struct GDEF
{
TRACE_SANITIZE (this);
if (unlikely (!u.version.sanitize (c))) return_trace (false);
hb_barrier ();
switch (u.version.major) {
case 1: return_trace (u.version1.sanitize (c));
#ifndef HB_NO_BEYOND_64K

@ -25,6 +25,7 @@ struct Anchor
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));

@ -18,6 +18,7 @@ struct AnchorMatrix
{
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return_trace (false);
hb_barrier ();
if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
unsigned int count = rows * cols;
if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
@ -25,6 +26,7 @@ struct AnchorMatrix
if (c->lazy_some_gpos)
return_trace (true);
hb_barrier ();
for (unsigned int i = 0; i < count; i++)
if (!matrixZ[i].sanitize (c, this)) return_trace (false);
return_trace (true);

@ -42,6 +42,7 @@ struct MarkMarkPosFormat1_2
mark1Coverage.sanitize (c, this) &&
mark2Coverage.sanitize (c, this) &&
mark1Array.sanitize (c, this) &&
hb_barrier () &&
mark2Array.sanitize (c, this, (unsigned int) classCount));
}

@ -36,6 +36,7 @@ struct PairPosFormat1_3
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return_trace (false);
hb_barrier ();
unsigned int len1 = valueFormat[0].get_len ();
unsigned int len2 = valueFormat[1].get_len ();

@ -45,10 +45,12 @@ struct PairSet : ValueBase
bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
{
TRACE_SANITIZE (this);
if (!(c->check_struct (this)
&& c->check_range (&firstPairValueRecord,
if (!(c->check_struct (this) &&
hb_barrier () &&
c->check_range (&firstPairValueRecord,
len,
closure->stride))) return_trace (false);
hb_barrier ();
unsigned int count = len;
const PairValueRecord *record = &firstPairValueRecord;

@ -28,6 +28,7 @@ struct SinglePosFormat1 : ValueBase
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
coverage.sanitize (c, this) &&
hb_barrier () &&
/* The coverage table may use a range to represent a set
* of glyphs, which means a small number of bytes can
* generate a large glyph set. Manually modify the

@ -390,6 +390,7 @@ struct ValueFormat : HBUINT16
if (c->lazy_some_gpos)
return_trace (true);
hb_barrier ();
return_trace (sanitize_values_stride_unsafe (c, base, values, count, size));
}

@ -33,9 +33,11 @@ struct ReverseChainSingleSubstFormat1
TRACE_SANITIZE (this);
if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
return_trace (false);
hb_barrier ();
const auto &lookahead = StructAfter<decltype (lookaheadX)> (backtrack);
if (!lookahead.sanitize (c, this))
return_trace (false);
hb_barrier ();
const auto &substitute = StructAfter<decltype (substituteX)> (lookahead);
return_trace (substitute.sanitize (c));
}

@ -242,7 +242,9 @@ struct NameRecord
bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && offset.sanitize (c, base, length));
return_trace (c->check_struct (this) &&
hb_barrier () &&
offset.sanitize (c, base, length));
}
HBUINT16 platformID; /* Platform ID. */
@ -465,6 +467,7 @@ struct name
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (format == 0 || format == 1) &&
c->check_array (nameRecordZ.arrayZ, count) &&
c->check_range (this, stringOffset) &&

@ -39,6 +39,7 @@ struct ClassDefFormat1 : public OT::ClassDefFormat1_3<SmallTypes>
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::ClassDefFormat1_3<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= min_size + classValue.get_size () - classValue.len.get_size ();
}
};
@ -50,6 +51,7 @@ struct ClassDefFormat2 : public OT::ClassDefFormat2_4<SmallTypes>
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::ClassDefFormat2_4<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
}
};
@ -114,6 +116,7 @@ struct ClassDef : public OT::ClassDef
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::ClassDef::min_size) return false;
hb_barrier ();
switch (u.format)
{
case 1: return ((ClassDefFormat1*)this)->sanitize (vertex);

@ -39,6 +39,7 @@ struct CoverageFormat1 : public OT::Layout::Common::CoverageFormat1_3<SmallTypes
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::Layout::Common::CoverageFormat1_3<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= min_size + glyphArray.get_size () - glyphArray.len.get_size ();
}
};
@ -50,6 +51,7 @@ struct CoverageFormat2 : public OT::Layout::Common::CoverageFormat2_4<SmallTypes
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
constexpr unsigned min_size = OT::Layout::Common::CoverageFormat2_4<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= min_size + rangeRecord.get_size () - rangeRecord.len.get_size ();
}
};
@ -138,6 +140,7 @@ struct Coverage : public OT::Layout::Common::Coverage
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::Layout::Common::Coverage::min_size) return false;
hb_barrier ();
switch (u.format)
{
case 1: return ((CoverageFormat1*)this)->sanitize (vertex);

@ -76,6 +76,7 @@ struct Lookup : public OT::Lookup
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::Lookup::min_size) return false;
hb_barrier ();
return vertex_len >= this->get_size ();
}
@ -351,6 +352,7 @@ struct LookupList : public OT::LookupList<T>
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < OT::LookupList<T>::min_size) return false;
hb_barrier ();
return vertex_len >= OT::LookupList<T>::item_size * this->len;
}
};
@ -364,6 +366,7 @@ struct GSTAR : public OT::GSUBGPOS
GSTAR* gstar = (GSTAR*) r.obj.head;
if (!gstar || !gstar->sanitize (r))
return nullptr;
hb_barrier ();
return gstar;
}
@ -383,6 +386,7 @@ struct GSTAR : public OT::GSUBGPOS
{
int64_t len = vertex.obj.tail - vertex.obj.head;
if (len < OT::GSUBGPOS::min_size) return false;
hb_barrier ();
return len >= get_size ();
}

@ -40,6 +40,7 @@ struct AnchorMatrix : public OT::Layout::GPOS_impl::AnchorMatrix
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < AnchorMatrix::min_size) return false;
hb_barrier ();
return vertex_len >= AnchorMatrix::min_size +
OT::Offset16::static_size * class_count * this->rows;
@ -128,6 +129,7 @@ struct MarkArray : public OT::Layout::GPOS_impl::MarkArray
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
unsigned min_size = MarkArray::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >= get_size ();
}
@ -495,6 +497,7 @@ struct MarkBasePos : public OT::Layout::GPOS_impl::MarkBasePos
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < u.format.get_size ()) return false;
hb_barrier ();
switch (u.format) {
case 1:

@ -42,6 +42,7 @@ struct PairPosFormat1 : public OT::Layout::GPOS_impl::PairPosFormat1_3<SmallType
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat1_3<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
return vertex_len >=
min_size + pairSet.get_size () - pairSet.len.get_size();
@ -198,6 +199,7 @@ struct PairPosFormat2 : public OT::Layout::GPOS_impl::PairPosFormat2_4<SmallType
size_t vertex_len = vertex.table_size ();
unsigned min_size = OT::Layout::GPOS_impl::PairPosFormat2_4<SmallTypes>::min_size;
if (vertex_len < min_size) return false;
hb_barrier ();
const unsigned class1_count = class1Count;
return vertex_len >=
@ -625,6 +627,7 @@ struct PairPos : public OT::Layout::GPOS_impl::PairPos
{
int64_t vertex_len = vertex.obj.tail - vertex.obj.head;
if (vertex_len < u.format.get_size ()) return false;
hb_barrier ();
switch (u.format) {
case 1:

@ -75,6 +75,7 @@ struct ankr
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version == 0 &&
c->check_range (this, anchorData) &&
lookupTable.sanitize (c, this, &(this+anchorData))));

@ -123,6 +123,7 @@ struct bsln
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this) && defaultBaseline < 32)))
return_trace (false);
hb_barrier ();
switch (format)
{

@ -191,6 +191,7 @@ struct LookupSegmentArray
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
first <= last &&
valuesZ.sanitize (c, base, last - first + 1));
}
@ -199,6 +200,7 @@ struct LookupSegmentArray
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
first <= last &&
valuesZ.sanitize (c, base, last - first + 1, std::forward<Ts> (ds)...));
}
@ -360,6 +362,7 @@ struct LookupFormat10
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
valueSize <= 4 &&
valueArrayZ.sanitize (c, glyphCount * valueSize));
}
@ -415,6 +418,7 @@ struct Lookup
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 0: return_trace (u.format0.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
@ -429,6 +433,7 @@ struct Lookup
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
hb_barrier ();
switch (u.format) {
case 0: return_trace (u.format0.sanitize (c, base));
case 2: return_trace (u.format2.sanitize (c, base));
@ -558,6 +563,7 @@ struct StateTable
{
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this) &&
hb_barrier () &&
nClasses >= 4 /* Ensure pre-defined classes fit. */ &&
classTable.sanitize (c, this)))) return_trace (false);

@ -138,6 +138,7 @@ struct FeatureName
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
(base+settingTableZ).sanitize (c, nSettings)));
}
@ -200,6 +201,7 @@ struct feat
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
namesZ.sanitize (c, featureNameCount, this)));
}

@ -185,6 +185,7 @@ struct ActionSubrecord
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (u.header.actionType)
{
@ -220,6 +221,7 @@ struct PostcompensationActionChain
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
unsigned int offset = min_size;
for (unsigned int i = 0; i < count; i++)
@ -389,6 +391,7 @@ struct just
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
horizData.sanitize (c, this, this) &&
vertData.sanitize (c, this, this)));

@ -432,6 +432,7 @@ struct KerxSubTableFormat2
return_trace (likely (c->check_struct (this) &&
leftClassTable.sanitize (c, this) &&
rightClassTable.sanitize (c, this) &&
hb_barrier () &&
c->check_range (this, array)));
}
@ -682,6 +683,7 @@ struct KerxSubTableFormat6
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
(is_long () ?
(
u.l.rowIndexTable.sanitize (c, this) &&
@ -795,9 +797,10 @@ struct KerxSubTable
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.header.sanitize (c) ||
u.header.length <= u.header.static_size ||
!c->check_range (this, u.header.length))
if (!(u.header.sanitize (c) &&
hb_barrier () &&
u.header.length >= u.header.static_size &&
c->check_range (this, u.header.length)))
return_trace (false);
return_trace (dispatch (c));
@ -944,9 +947,10 @@ struct KerxTable
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!thiz()->version.sanitize (c) ||
(unsigned) thiz()->version < (unsigned) T::minVersion ||
!thiz()->tableCount.sanitize (c)))
if (unlikely (!(thiz()->version.sanitize (c) &&
hb_barrier () &&
(unsigned) thiz()->version >= (unsigned) T::minVersion &&
thiz()->tableCount.sanitize (c))))
return_trace (false);
typedef typename T::SubTable SubTable;
@ -957,6 +961,7 @@ struct KerxTable
{
if (unlikely (!st->u.header.sanitize (c)))
return_trace (false);
hb_barrier ();
/* OpenType kern table has 2-byte subtable lengths. That's limiting.
* MS implementation also only supports one subtable, of format 0,
* anyway. Certain versions of some fonts, like Calibry, contain

@ -261,6 +261,7 @@ struct ContextualSubtable
replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
if (!(replacement->sanitize (&c->sanitizer) && hb_barrier ()) || !*replacement)
replacement = nullptr;
hb_barrier ();
}
if (replacement)
{
@ -289,6 +290,7 @@ struct ContextualSubtable
replacement = &subs_old[Types::wordOffsetToIndex (offset, table, subs_old.arrayZ)];
if (!(replacement->sanitize (&c->sanitizer) && hb_barrier ()) || !*replacement)
replacement = nullptr;
hb_barrier ();
}
if (replacement)
{
@ -336,6 +338,7 @@ struct ContextualSubtable
unsigned int num_entries = 0;
if (unlikely (!machine.sanitize (c, &num_entries))) return_trace (false);
hb_barrier ();
if (!Types::extended)
return_trace (substitutionTables.sanitize (c, this, 0));
@ -590,6 +593,7 @@ struct LigatureSubtable
TRACE_SANITIZE (this);
/* The rest of array sanitizations are done at run-time. */
return_trace (c->check_struct (this) && machine.sanitize (c) &&
hb_barrier () &&
ligAction && component && ligature);
}
@ -854,6 +858,7 @@ struct InsertionSubtable
TRACE_SANITIZE (this);
/* The rest of array sanitizations are done at run-time. */
return_trace (c->check_struct (this) && machine.sanitize (c) &&
hb_barrier () &&
insertionAction);
}
@ -949,9 +954,10 @@ struct ChainSubtable
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!length.sanitize (c) ||
length <= min_size ||
!c->check_range (this, length))
if (!(length.sanitize (c) &&
hb_barrier () &&
length >= min_size &&
c->check_range (this, length)))
return_trace (false);
hb_sanitize_with_object_t with (c, this);
@ -1094,9 +1100,10 @@ struct Chain
bool sanitize (hb_sanitize_context_t *c, unsigned int version HB_UNUSED) const
{
TRACE_SANITIZE (this);
if (!length.sanitize (c) ||
length < min_size ||
!c->check_range (this, length))
if (!(length.sanitize (c) &&
hb_barrier () &&
length >= min_size &&
c->check_range (this, length)))
return_trace (false);
if (!c->check_array (featureZ.arrayZ, featureCount))
@ -1108,6 +1115,7 @@ struct Chain
{
if (!subtable->sanitize (c))
return_trace (false);
hb_barrier ();
subtable = &StructAfter<ChainSubtable<Types>> (*subtable);
}
@ -1178,7 +1186,10 @@ struct mortmorx
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!version.sanitize (c) || !version || !chainCount.sanitize (c))
if (!(version.sanitize (c) &&
hb_barrier () &&
version &&
chainCount.sanitize (c)))
return_trace (false);
const Chain<Types> *chain = &firstChain;
@ -1187,6 +1198,7 @@ struct mortmorx
{
if (!chain->sanitize (c, version))
return_trace (false);
hb_barrier ();
chain = &StructAfter<Chain<Types>> (*chain);
}

@ -144,6 +144,7 @@ struct opbd
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this) || version.major != 1))
return_trace (false);
hb_barrier ();
switch (format)
{

@ -134,6 +134,7 @@ struct TrackData
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
sizeTable.sanitize (c, base, nSizes) &&
trackTable.sanitize (c, nTracks, base, nSizes)));
}
@ -203,6 +204,7 @@ struct trak
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version.major == 1 &&
horizData.sanitize (c, this, this) &&
vertData.sanitize (c, this, this)));

@ -46,7 +46,9 @@ struct FTStringRange
bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) && (base+tag).sanitize (c, length));
return_trace (c->check_struct (this) &&
hb_barrier () &&
(base+tag).sanitize (c, length));
}
protected:
@ -73,6 +75,7 @@ struct ltag
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
version >= 1 &&
tagRanges.sanitize (c, this)));
}

@ -267,6 +267,7 @@ struct TTCHeader
{
TRACE_SANITIZE (this);
if (unlikely (!u.header.version.sanitize (c))) return_trace (false);
hb_barrier ();
switch (u.header.version.major) {
case 2: /* version 2 is compatible with version 1 */
case 1: return_trace (u.version1.sanitize (c));
@ -302,6 +303,7 @@ struct ResourceRecord
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
offset.sanitize (c, data_base) &&
hb_barrier () &&
get_face (data_base).sanitize (c));
}
@ -337,6 +339,7 @@ struct ResourceTypeRecord
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
resourcesZ.sanitize (c, type_base,
get_resource_count (),
data_base));
@ -385,6 +388,7 @@ struct ResourceMap
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
typeList.sanitize (c, this,
&(this+typeList),
data_base));
@ -428,6 +432,7 @@ struct ResourceForkHeader
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
data.sanitize (c, this, dataLen) &&
map.sanitize (c, this, &(this+data)));
}
@ -508,6 +513,7 @@ struct OpenTypeFontFile
{
TRACE_SANITIZE (this);
if (unlikely (!u.tag.sanitize (c))) return_trace (false);
hb_barrier ();
switch (u.tag) {
case CFFTag: /* All the non-collection tags */
case TrueTag:

@ -418,6 +418,7 @@ struct OffsetTo : Offset<OffsetType, has_null>
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this))) return_trace (false);
hb_barrier ();
//if (unlikely (this->is_null ())) return_trace (true);
if (unlikely ((const char *) base + (unsigned) *this < (const char *) base)) return_trace (false);
return_trace (true);
@ -431,6 +432,7 @@ struct OffsetTo : Offset<OffsetType, has_null>
{
TRACE_SANITIZE (this);
return_trace (sanitize_shallow (c, base) &&
hb_barrier () &&
(this->is_null () ||
c->dispatch (StructAtOffset<Type> (base, *this), std::forward<Ts> (ds)...) ||
neuter (c)));
@ -536,6 +538,7 @@ struct UnsizedArrayOf
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
for (unsigned int i = 0; i < count; i++)
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
return_trace (false);
@ -725,6 +728,7 @@ struct ArrayOf
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
unsigned int count = len;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
@ -735,7 +739,9 @@ struct ArrayOf
bool sanitize_shallow (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (len.sanitize (c) && c->check_array_sized (arrayZ, len, sizeof (LenType)));
return_trace (len.sanitize (c) &&
hb_barrier () &&
c->check_array_sized (arrayZ, len, sizeof (LenType)));
}
public:
@ -866,6 +872,7 @@ struct HeadlessArrayOf
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
unsigned int count = get_length ();
for (unsigned int i = 0; i < count; i++)
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
@ -878,6 +885,7 @@ struct HeadlessArrayOf
{
TRACE_SANITIZE (this);
return_trace (lenP1.sanitize (c) &&
hb_barrier () &&
(!lenP1 || c->check_array_sized (arrayZ, lenP1 - 1, sizeof (LenType))));
}
@ -919,6 +927,7 @@ struct ArrayOfM1
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
unsigned int count = lenM1 + 1;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
@ -931,6 +940,7 @@ struct ArrayOfM1
{
TRACE_SANITIZE (this);
return_trace (lenM1.sanitize (c) &&
hb_barrier () &&
(c->check_array_sized (arrayZ, lenM1 + 1, sizeof (LenType))));
}
@ -1104,6 +1114,7 @@ struct VarSizedBinSearchArrayOf
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
hb_barrier ();
unsigned int count = get_length ();
for (unsigned int i = 0; i < count; i++)
if (unlikely (!(*this)[i].sanitize (c, std::forward<Ts> (ds)...)))
@ -1130,6 +1141,7 @@ struct VarSizedBinSearchArrayOf
{
TRACE_SANITIZE (this);
return_trace (header.sanitize (c) &&
hb_barrier () &&
Type::static_size <= header.unitSize &&
c->check_range (bytesZ.arrayZ,
header.nUnits,

@ -274,8 +274,10 @@ struct CFFIndex
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this) &&
hb_barrier () &&
(count == 0 || /* empty INDEX */
(count < count + 1u &&
hb_barrier () &&
c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
c->check_array (offsets, offSize, count + 1u) &&
c->check_array ((const HBUINT8*) data_base (), 1, offset_at (count))))));
@ -412,6 +414,7 @@ struct FDSelect0 {
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this))))
return_trace (false);
hb_barrier ();
if (unlikely (!c->check_array (fds, c->get_num_glyphs ())))
return_trace (false);
@ -438,7 +441,9 @@ struct FDSelect3_4_Range
bool sanitize (hb_sanitize_context_t *c, const void * /*nullptr*/, unsigned int fdcount) const
{
TRACE_SANITIZE (this);
return_trace (first < c->get_num_glyphs () && (fd < fdcount));
return_trace (c->check_struct (this) &&
hb_barrier () &&
first < c->get_num_glyphs () && (fd < fdcount));
}
GID_TYPE first;
@ -456,15 +461,20 @@ struct FDSelect3_4
bool sanitize (hb_sanitize_context_t *c, unsigned int fdcount) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this) || !ranges.sanitize (c, nullptr, fdcount) ||
(nRanges () == 0) || ranges[0].first != 0))
if (unlikely (!(c->check_struct (this) &&
ranges.sanitize (c, nullptr, fdcount) &&
hb_barrier () &&
(nRanges () != 0) &&
ranges[0].first == 0)))
return_trace (false);
for (unsigned int i = 1; i < nRanges (); i++)
if (unlikely (ranges[i - 1].first >= ranges[i].first))
return_trace (false);
if (unlikely (!sentinel().sanitize (c) || (sentinel() != c->get_num_glyphs ())))
if (unlikely (!(sentinel().sanitize (c) &&
hb_barrier () &&
(sentinel() == c->get_num_glyphs ()))))
return_trace (false);
return_trace (true);
@ -559,6 +569,7 @@ struct FDSelect
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (format)
{

@ -275,6 +275,7 @@ struct Encoding
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (table_format ())
{
@ -376,13 +377,13 @@ struct Charset1_2 {
bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs, unsigned *num_charset_entries) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
num_glyphs--;
unsigned i;
for (i = 0; num_glyphs > 0; i++)
{
if (unlikely (!ranges[i].sanitize (c) || (num_glyphs < ranges[i].nLeft + 1)))
if (unlikely (!(ranges[i].sanitize (c) &&
hb_barrier () &&
(num_glyphs >= ranges[i].nLeft + 1))))
return_trace (false);
num_glyphs -= (ranges[i].nLeft + 1);
}
@ -615,6 +616,7 @@ struct Charset
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (format)
{
@ -1055,6 +1057,7 @@ struct cff1
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (version.major == 1));
}
@ -1085,14 +1088,17 @@ struct cff1
nameIndex = &cff->nameIndex (cff);
if ((nameIndex == &Null (CFF1NameIndex)) || !nameIndex->sanitize (&sc))
goto fail;
hb_barrier ();
topDictIndex = &StructAtOffset<CFF1TopDictIndex> (nameIndex, nameIndex->get_size ());
if ((topDictIndex == &Null (CFF1TopDictIndex)) || !topDictIndex->sanitize (&sc) || (topDictIndex->count == 0))
goto fail;
hb_barrier ();
{ /* parse top dict */
const hb_ubytes_t topDictStr = (*topDictIndex)[0];
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
cff1_top_dict_interp_env_t env (topDictStr);
cff1_top_dict_interpreter_t top_interp (env);
if (unlikely (!top_interp.interpret (topDict))) goto fail;
@ -1104,6 +1110,7 @@ struct cff1
{
charset = &StructAtOffsetOrNull<Charset> (cff, topDict.CharsetOffset);
if (unlikely ((charset == &Null (Charset)) || !charset->sanitize (&sc, &num_charset_entries))) goto fail;
hb_barrier ();
}
fdCount = 1;
@ -1114,6 +1121,7 @@ struct cff1
if (unlikely ((fdArray == &Null (CFF1FDArray)) || !fdArray->sanitize (&sc) ||
(fdSelect == &Null (CFF1FDSelect)) || !fdSelect->sanitize (&sc, fdArray->count)))
goto fail;
hb_barrier ();
fdCount = fdArray->count;
}
@ -1134,21 +1142,25 @@ struct cff1
{
encoding = &StructAtOffsetOrNull<Encoding> (cff, topDict.EncodingOffset);
if (unlikely ((encoding == &Null (Encoding)) || !encoding->sanitize (&sc))) goto fail;
hb_barrier ();
}
}
stringIndex = &StructAtOffset<CFF1StringIndex> (topDictIndex, topDictIndex->get_size ());
if ((stringIndex == &Null (CFF1StringIndex)) || !stringIndex->sanitize (&sc))
goto fail;
hb_barrier ();
globalSubrs = &StructAtOffset<CFF1Subrs> (stringIndex, stringIndex->get_size ());
if ((globalSubrs != &Null (CFF1Subrs)) && !globalSubrs->sanitize (&sc))
goto fail;
hb_barrier ();
charStrings = &StructAtOffsetOrNull<CFF1CharStrings> (cff, topDict.charStringsOffset);
if ((charStrings == &Null (CFF1CharStrings)) || unlikely (!charStrings->sanitize (&sc)))
goto fail;
hb_barrier ();
num_glyphs = charStrings->count;
if (num_glyphs != sc.get_num_glyphs ())
@ -1166,6 +1178,7 @@ struct cff1
{
hb_ubytes_t fontDictStr = (*fdArray)[i];
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
cff1_font_dict_values_t *font;
cff1_top_dict_interp_env_t env (fontDictStr);
cff1_font_dict_interpreter_t font_interp (env);
@ -1177,6 +1190,7 @@ struct cff1
PRIVDICTVAL *priv = &privateDicts[i];
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
num_interp_env_t env2 (privDictStr);
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env2);
priv->init ();
@ -1186,6 +1200,7 @@ struct cff1
if (priv->localSubrs != &Null (CFF1Subrs) &&
unlikely (!priv->localSubrs->sanitize (&sc)))
goto fail;
hb_barrier ();
}
}
else /* non-CID */
@ -1195,6 +1210,7 @@ struct cff1
const hb_ubytes_t privDictStr = StructAtOffset<UnsizedByteStr> (cff, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
num_interp_env_t env (privDictStr);
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL> priv_interp (env);
priv->init ();
@ -1204,6 +1220,7 @@ struct cff1
if (priv->localSubrs != &Null (CFF1Subrs) &&
unlikely (!priv->localSubrs->sanitize (&sc)))
goto fail;
hb_barrier ();
}
return;

@ -90,6 +90,7 @@ struct CFF2FDSelect
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return_trace (false);
hb_barrier ();
switch (format)
{
@ -115,7 +116,10 @@ struct CFF2VariationStore
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (likely (c->check_struct (this)) && c->check_range (&varStore, size) && varStore.sanitize (c));
return_trace (c->check_struct (this) &&
hb_barrier () &&
c->check_range (&varStore, size) &&
varStore.sanitize (c));
}
bool serialize (hb_serialize_context_t *c, const CFF2VariationStore *varStore)
@ -384,6 +388,7 @@ struct cff2
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
hb_barrier () &&
likely (version.major == 2));
}
@ -414,6 +419,7 @@ struct cff2
{ /* parse top dict */
hb_ubytes_t topDictStr = (cff2 + cff2->topDict).as_ubytes (cff2->topDictSize);
if (unlikely (!topDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
num_interp_env_t env (topDictStr);
cff2_top_dict_interpreter_t top_interp (env);
topDict.init ();
@ -430,6 +436,7 @@ struct cff2
(charStrings == &Null (CFF2CharStrings)) || unlikely (!charStrings->sanitize (&sc)) ||
(globalSubrs == &Null (CFF2Subrs)) || unlikely (!globalSubrs->sanitize (&sc)) ||
(fdArray == &Null (CFF2FDArray)) || unlikely (!fdArray->sanitize (&sc)) ||
!hb_barrier () ||
(((fdSelect != &Null (CFF2FDSelect)) && unlikely (!fdSelect->sanitize (&sc, fdArray->count)))))
goto fail;
@ -446,6 +453,7 @@ struct cff2
{
const hb_ubytes_t fontDictStr = (*fdArray)[i];
if (unlikely (!fontDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
cff2_font_dict_values_t *font;
num_interp_env_t env (fontDictStr);
cff2_font_dict_interpreter_t font_interp (env);
@ -456,6 +464,7 @@ struct cff2
const hb_ubytes_t privDictStr = StructAtOffsetOrNull<UnsizedByteStr> (cff2, font->privateDictInfo.offset).as_ubytes (font->privateDictInfo.size);
if (unlikely (!privDictStr.sanitize (&sc))) goto fail;
hb_barrier ();
cff2_priv_dict_interp_env_t env2 (privDictStr);
dict_interpreter_t<PRIVOPSET, PRIVDICTVAL, cff2_priv_dict_interp_env_t> priv_interp (env2);
privateDicts[i].init ();
@ -465,6 +474,7 @@ struct cff2
if (privateDicts[i].localSubrs != &Null (CFF2Subrs) &&
unlikely (!privateDicts[i].localSubrs->sanitize (&sc)))
goto fail;
hb_barrier ();
}
return;

Loading…
Cancel
Save