[subset] further changes to serializer error handling.

- Rename enum type and enum members.
- in_errors() now returns true for any error having been set. hb-subset now looks for offset overflow only errors to divert to repacker.
- Added INT_OVERFLOW and ARRAY_OVERFLOW enum values.
pull/2857/head
Garret Rieger 4 years ago
parent 73ed59f7a6
commit b14475d2ae
  1. 2
      src/hb-cff-interp-common.hh
  2. 8
      src/hb-open-type.hh
  3. 11
      src/hb-ot-cmap-table.hh
  4. 2
      src/hb-ot-hmtx-table.hh
  5. 8
      src/hb-ot-layout-gpos-table.hh
  6. 2
      src/hb-ot-layout-gsub-table.hh
  7. 4
      src/hb-repacker.hh
  8. 50
      src/hb-serialize.hh
  9. 4
      src/hb-subset.cc

@ -263,7 +263,7 @@ struct UnsizedByteStr : UnsizedArrayOf <HBUINT8>
T *ip = c->allocate_size<T> (T::static_size); T *ip = c->allocate_size<T> (T::static_size);
if (unlikely (!ip)) return_trace (false); if (unlikely (!ip)) return_trace (false);
return_trace (c->check_assign (*ip, value, HB_SERIALIZE_ERR_OTHER)); return_trace (c->check_assign (*ip, value, HB_SERIALIZE_ERROR_INT_OVERFLOW));
} }
template <typename V> template <typename V>

@ -211,7 +211,7 @@ struct Offset : Type
void *t = c->start_embed<void> (); void *t = c->start_embed<void> ();
c->check_assign (*this, c->check_assign (*this,
(unsigned) ((char *) t - (char *) base), (unsigned) ((char *) t - (char *) base),
HB_SERIALIZE_ERR_OTHER); HB_SERIALIZE_ERROR_OFFSET_OVERFLOW);
return t; return t;
} }
@ -623,7 +623,7 @@ struct ArrayOf
{ {
TRACE_SERIALIZE (this); TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false); if (unlikely (!c->extend_min (*this))) return_trace (false);
c->check_assign (len, items_len, HB_SERIALIZE_ERR_OTHER); c->check_assign (len, items_len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
if (unlikely (!c->extend (*this))) return_trace (false); if (unlikely (!c->extend (*this))) return_trace (false);
return_trace (true); return_trace (true);
} }
@ -658,7 +658,7 @@ struct ArrayOf
TRACE_SERIALIZE (this); TRACE_SERIALIZE (this);
auto *out = c->start_embed (this); auto *out = c->start_embed (this);
if (unlikely (!c->extend_min (out))) return_trace (nullptr); if (unlikely (!c->extend_min (out))) return_trace (nullptr);
c->check_assign (out->len, len, HB_SERIALIZE_ERR_OTHER); c->check_assign (out->len, len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
if (unlikely (!as_array ().copy (c))) return_trace (nullptr); if (unlikely (!as_array ().copy (c))) return_trace (nullptr);
return_trace (out); return_trace (out);
} }
@ -789,7 +789,7 @@ struct HeadlessArrayOf
{ {
TRACE_SERIALIZE (this); TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false); if (unlikely (!c->extend_min (*this))) return_trace (false);
c->check_assign (lenP1, items_len + 1, HB_SERIALIZE_ERR_OTHER); c->check_assign (lenP1, items_len + 1, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
if (unlikely (!c->extend (*this))) return_trace (false); if (unlikely (!c->extend (*this))) return_trace (false);
return_trace (true); return_trace (true);
} }

@ -278,7 +278,7 @@ struct CmapSubtableFormat4
if (unlikely (!c->check_assign(this->length, if (unlikely (!c->check_assign(this->length,
c->length () - table_initpos, c->length () - table_initpos,
HB_SERIALIZE_ERR_OTHER))) return; HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
this->segCountX2 = segcount * 2; this->segCountX2 = segcount * 2;
this->entrySelector = hb_max (1u, hb_bit_storage (segcount)) - 1; this->entrySelector = hb_max (1u, hb_bit_storage (segcount)) - 1;
this->searchRange = 2 * (1u << this->entrySelector); this->searchRange = 2 * (1u << this->entrySelector);
@ -854,7 +854,7 @@ struct DefaultUVS : SortedArrayOf<UnicodeValueRange, HBUINT32>
{ {
if (unlikely (!c->check_assign (out->len, if (unlikely (!c->check_assign (out->len,
(c->length () - init_len) / UnicodeValueRange::static_size, (c->length () - init_len) / UnicodeValueRange::static_size,
HB_SERIALIZE_ERR_OTHER))) return nullptr; HB_SERIALIZE_ERROR_INT_OVERFLOW))) return nullptr;
return out; return out;
} }
} }
@ -1116,11 +1116,12 @@ struct CmapSubtableFormat14
return; return;
int tail_len = init_tail - c->tail; int tail_len = init_tail - c->tail;
c->check_assign (this->length, c->length () - table_initpos + tail_len, HB_SERIALIZE_ERR_OTHER); c->check_assign (this->length, c->length () - table_initpos + tail_len,
HB_SERIALIZE_ERROR_INT_OVERFLOW);
c->check_assign (this->record.len, c->check_assign (this->record.len,
(c->length () - table_initpos - CmapSubtableFormat14::min_size) / (c->length () - table_initpos - CmapSubtableFormat14::min_size) /
VariationSelectorRecord::static_size, VariationSelectorRecord::static_size,
HB_SERIALIZE_ERR_OTHER); HB_SERIALIZE_ERROR_INT_OVERFLOW);
/* Correct the incorrect write order by reversing the order of the variation /* Correct the incorrect write order by reversing the order of the variation
records array. */ records array. */
@ -1408,7 +1409,7 @@ struct cmap
c->check_assign(this->encodingRecord.len, c->check_assign(this->encodingRecord.len,
(c->length () - cmap::min_size)/EncodingRecord::static_size, (c->length () - cmap::min_size)/EncodingRecord::static_size,
HB_SERIALIZE_ERR_OTHER); HB_SERIALIZE_ERROR_INT_OVERFLOW);
} }
void closure_glyphs (const hb_set_t *unicodes, void closure_glyphs (const hb_set_t *unicodes,

@ -146,7 +146,7 @@ struct hmtxvmtx
_mtx.fini (); _mtx.fini ();
if (unlikely (c->serializer->ran_out_of_room () || c->serializer->in_error ())) if (unlikely (c->serializer->in_error ()))
return_trace (false); return_trace (false);
// Amend header num hmetrics // Amend header num hmetrics

@ -694,7 +694,7 @@ struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage orde
{ {
TRACE_SERIALIZE (this); TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false); if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!c->check_assign (len, it.len (), HB_SERIALIZE_ERR_OTHER))) return_trace (false); if (unlikely (!c->check_assign (len, it.len (), HB_SERIALIZE_ERROR_ARRAY_OVERFLOW))) return_trace (false);
c->copy_all (it, base, c->to_bias (this), klass_mapping, layout_variation_idx_map); c->copy_all (it, base, c->to_bias (this), klass_mapping, layout_variation_idx_map);
return_trace (true); return_trace (true);
} }
@ -756,7 +756,7 @@ struct SinglePosFormat1
{ {
auto out = c->extend_min (*this); auto out = c->extend_min (*this);
if (unlikely (!out)) return; if (unlikely (!out)) return;
if (unlikely (!c->check_assign (valueFormat, valFormat, HB_SERIALIZE_ERR_OTHER))) return; if (unlikely (!c->check_assign (valueFormat, valFormat, HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
+ it + it
| hb_map (hb_second) | hb_map (hb_second)
@ -870,8 +870,8 @@ struct SinglePosFormat2
{ {
auto out = c->extend_min (*this); auto out = c->extend_min (*this);
if (unlikely (!out)) return; if (unlikely (!out)) return;
if (unlikely (!c->check_assign (valueFormat, valFormat, HB_SERIALIZE_ERR_OTHER))) return; if (unlikely (!c->check_assign (valueFormat, valFormat, HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
if (unlikely (!c->check_assign (valueCount, it.len (), HB_SERIALIZE_ERR_OTHER))) return; if (unlikely (!c->check_assign (valueCount, it.len (), HB_SERIALIZE_ERROR_ARRAY_OVERFLOW))) return;
+ it + it
| hb_map (hb_second) | hb_map (hb_second)

@ -102,7 +102,7 @@ struct SingleSubstFormat1
TRACE_SERIALIZE (this); TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false); if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false); if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false);
c->check_assign (deltaGlyphID, delta, HB_SERIALIZE_ERR_OTHER); c->check_assign (deltaGlyphID, delta, HB_SERIALIZE_ERROR_INT_OVERFLOW);
return_trace (true); return_trace (true);
} }

@ -738,13 +738,13 @@ hb_resolve_overflows (const hb_vector_t<hb_serialize_context_t::object_t *>& pac
} }
DEBUG_MSG (SUBSET_REPACK, nullptr, "No resolution available :("); DEBUG_MSG (SUBSET_REPACK, nullptr, "No resolution available :(");
c->set_error (HB_SERIALIZE_ERR_OFFSET_OVERFLOW); c->set_error (HB_SERIALIZE_ERROR_OFFSET_OVERFLOW);
return; return;
} }
if (sorted_graph.in_error ()) if (sorted_graph.in_error ())
{ {
c->set_error (HB_SERIALIZE_ERR_OTHER); c->set_error (HB_SERIALIZE_ERROR_OTHER);
return; return;
} }
sorted_graph.serialize (c); sorted_graph.serialize (c);

@ -41,13 +41,15 @@
* Serialize * Serialize
*/ */
enum hb_serialize_error_type_t { enum hb_serialize_error_t {
HB_SERIALIZE_ERR_NONE = 0x00000000u, HB_SERIALIZE_ERROR_NONE = 0x00000000u,
HB_SERIALIZE_ERR_OTHER = 0x00000001u, HB_SERIALIZE_ERROR_OTHER = 0x00000001u,
HB_SERIALIZE_ERR_OFFSET_OVERFLOW = 0x00000002u, HB_SERIALIZE_ERROR_OFFSET_OVERFLOW = 0x00000002u,
HB_SERIALIZE_ERR_OUT_OF_ROOM = 0x00000004u, HB_SERIALIZE_ERROR_OUT_OF_ROOM = 0x00000004u,
HB_SERIALIZE_ERROR_INT_OVERFLOW = 0x00000008u,
HB_SERIALIZE_ERROR_ARRAY_OVERFLOW = 0x00000010u
}; };
HB_MARK_AS_FLAG_T (hb_serialize_error_type_t); HB_MARK_AS_FLAG_T (hb_serialize_error_t);
struct hb_serialize_context_t struct hb_serialize_context_t
{ {
@ -127,12 +129,13 @@ struct hb_serialize_context_t
object_pool.fini (); object_pool.fini ();
} }
bool in_error () const { return errors & HB_SERIALIZE_ERR_OTHER; } bool in_error () const { return bool (errors); }
bool successful () const { return !in_error (); } bool successful () const { return !bool (errors); }
bool ran_out_of_room () const { return errors & HB_SERIALIZE_ERR_OUT_OF_ROOM; } bool ran_out_of_room () const { return errors & HB_SERIALIZE_ERROR_OUT_OF_ROOM; }
bool offset_overflow () const { return errors & HB_SERIALIZE_ERR_OFFSET_OVERFLOW; } bool offset_overflow () const { return errors & HB_SERIALIZE_ERROR_OFFSET_OVERFLOW; }
bool only_offset_overflow () const { return errors == HB_SERIALIZE_ERROR_OFFSET_OVERFLOW; }
void reset (void *start_, unsigned int size) void reset (void *start_, unsigned int size)
{ {
@ -144,7 +147,7 @@ struct hb_serialize_context_t
void reset () void reset ()
{ {
this->errors = HB_SERIALIZE_ERR_NONE; this->errors = HB_SERIALIZE_ERROR_NONE;
this->head = this->start; this->head = this->start;
this->tail = this->end; this->tail = this->end;
this->debug_depth = 0; this->debug_depth = 0;
@ -155,14 +158,14 @@ struct hb_serialize_context_t
} }
bool check_success (bool success, bool check_success (bool success,
hb_serialize_error_type_t err_type = HB_SERIALIZE_ERR_OTHER) hb_serialize_error_t err_type = HB_SERIALIZE_ERROR_OTHER)
{ {
return successful () return successful ()
&& (success || (set_error (err_type), false)); && (success || (set_error (err_type), false));
} }
template <typename T1, typename T2> template <typename T1, typename T2>
bool check_equal (T1 &&v1, T2 &&v2, hb_serialize_error_type_t err_type) bool check_equal (T1 &&v1, T2 &&v2, hb_serialize_error_t err_type)
{ {
if ((long long) v1 != (long long) v2) if ((long long) v1 != (long long) v2)
{ {
@ -173,7 +176,7 @@ struct hb_serialize_context_t
} }
template <typename T1, typename T2> template <typename T1, typename T2>
bool check_assign (T1 &v1, T2 &&v2, hb_serialize_error_type_t err_type) bool check_assign (T1 &v1, T2 &&v2, hb_serialize_error_t err_type)
{ return check_equal (v1 = v2, v2, err_type); } { return check_equal (v1 = v2, v2, err_type); }
template <typename T> bool propagate_error (T &&obj) template <typename T> bool propagate_error (T &&obj)
@ -206,7 +209,13 @@ struct hb_serialize_context_t
propagate_error (packed, packed_map); propagate_error (packed, packed_map);
if (unlikely (!current)) return; if (unlikely (!current)) return;
if (unlikely (in_error())) return; if (unlikely (in_error()))
{
// Offset overflows that occur before link resolution cannot be handled
// by repacking, so set a more general error.
if (offset_overflow ()) set_error (HB_SERIALIZE_ERROR_OTHER);
return;
}
assert (!current->next); assert (!current->next);
@ -385,7 +394,7 @@ struct hb_serialize_context_t
for (const object_t::link_t &link : parent->links) for (const object_t::link_t &link : parent->links)
{ {
const object_t* child = packed[link.objidx]; const object_t* child = packed[link.objidx];
if (unlikely (!child)) { set_error (HB_SERIALIZE_ERR_OTHER); return; } if (unlikely (!child)) { set_error (HB_SERIALIZE_ERROR_OTHER); return; }
unsigned offset = 0; unsigned offset = 0;
switch ((whence_t) link.whence) { switch ((whence_t) link.whence) {
case Head: offset = child->head - parent->head; break; case Head: offset = child->head - parent->head; break;
@ -432,7 +441,7 @@ struct hb_serialize_context_t
Type *start_embed (const Type &obj) const Type *start_embed (const Type &obj) const
{ return start_embed (hb_addressof (obj)); } { return start_embed (hb_addressof (obj)); }
void set_error (hb_serialize_error_type_t err_type) void set_error (hb_serialize_error_t err_type)
{ {
errors = errors | err_type; errors = errors | err_type;
} }
@ -444,8 +453,7 @@ struct hb_serialize_context_t
if (this->tail - this->head < ptrdiff_t (size)) if (this->tail - this->head < ptrdiff_t (size))
{ {
set_error (HB_SERIALIZE_ERR_OUT_OF_ROOM); set_error (HB_SERIALIZE_ERROR_OUT_OF_ROOM);
set_error (HB_SERIALIZE_ERR_OTHER);
return nullptr; return nullptr;
} }
memset (this->head, 0, size); memset (this->head, 0, size);
@ -564,13 +572,13 @@ struct hb_serialize_context_t
{ {
auto &off = * ((BEInt<T> *) (parent->head + link.position)); auto &off = * ((BEInt<T> *) (parent->head + link.position));
assert (0 == off); assert (0 == off);
check_assign (off, offset, HB_SERIALIZE_ERR_OFFSET_OVERFLOW); check_assign (off, offset, HB_SERIALIZE_ERROR_OFFSET_OVERFLOW);
} }
public: /* TODO Make private. */ public: /* TODO Make private. */
char *start, *head, *tail, *end; char *start, *head, *tail, *end;
unsigned int debug_depth; unsigned int debug_depth;
hb_serialize_error_type_t errors; hb_serialize_error_t errors;
private: private:

@ -86,7 +86,7 @@ _repack (hb_tag_t tag, const hb_serialize_context_t& c)
hb_serialize_context_t repacked ((void *) buf, buf_size); hb_serialize_context_t repacked ((void *) buf, buf_size);
hb_resolve_overflows (c.object_graph (), &repacked); hb_resolve_overflows (c.object_graph (), &repacked);
if (unlikely (repacked.ran_out_of_room () || repacked.in_error () || repacked.offset_overflow ())) if (unlikely (repacked.in_error ()))
// TODO(garretrieger): refactor so we can share the resize/retry logic with the subset // TODO(garretrieger): refactor so we can share the resize/retry logic with the subset
// portion. // portion.
return nullptr; return nullptr;
@ -163,7 +163,7 @@ _subset (hb_subset_plan_t *plan)
} }
hb_blob_destroy (source_blob); hb_blob_destroy (source_blob);
if (serializer.ran_out_of_room () || serializer.in_error ()) if (serializer.in_error () && !serializer.only_offset_overflow ())
{ {
DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c::subset FAILED!", HB_UNTAG (tag)); DEBUG_MSG (SUBSET, nullptr, "OT::%c%c%c%c::subset FAILED!", HB_UNTAG (tag));
return false; return false;

Loading…
Cancel
Save