}
template <typename Type>
- inline Type *allocate (unsigned int size, unsigned int alignment = 2)
+ inline Type *allocate_size (unsigned int size, unsigned int alignment = 1)
{
- unsigned int padding = (alignment - (this->head - this->start) % alignment) % alignment; /* TODO speedup */
+ unsigned int padding = alignment < 2 ? 0 : (alignment - (this->head - this->start) % alignment) % alignment;
if (unlikely (this->ran_out_of_room || this->end - this->head > padding + size)) {
this->ran_out_of_room = true;
return NULL;
}
+ memset (this->head, 0, padding + size);
this->head += padding;
char *ret = this->head;
this->head += size;
template <typename Type>
inline Type *allocate_min (unsigned int alignment = 2)
{
- return this->allocate<Type> (Type::min_size, alignment);
+ return this->allocate_size<Type> (Type::min_size, alignment);
}
template <typename Type>
inline Type *embed (const Type &obj, unsigned int alignment = 2)
{
- return this->allocate<Type> (obj.get_size (), alignment);
+ unsigned int size = obj.get_size ();
+ Type *ret = this->allocate_size<Type> (size, alignment);
+ if (unlikely (!ret)) return NULL;
+ memcpy (ret, obj, size);
+ return ret;
}
template <typename Type>
- inline Type *extend (Type &obj, unsigned int size, unsigned int alignment = 2)
+ inline Type *extend_min (Type &obj, unsigned int alignment = 2)
{
+ unsigned int size = obj.min_size;
assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
- this->allocate<Type> (((char *) &obj) + size - this->head, alignment);
+ this->allocate_size<Type> (((char *) &obj) + size - this->head, alignment);
return reinterpret_cast<Type *> (&obj);
}
template <typename Type>
- inline Type *extend (Type &obj)
+ inline Type *extend (Type &obj, unsigned int alignment = 2)
{
- return this->extend<Type> (obj, obj.get_size ());
+ unsigned int size = obj.get_size ();
+ assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
+ this->allocate_size<Type> (((char *) &obj) + size - this->head, alignment);
+ return reinterpret_cast<Type *> (&obj);
}
inline void truncate (void *head)
if (unlikely (!offset)) return Null(Type);
return StructAtOffset<Type> (base, offset);
}
+ inline Type& operator () (void *base)
+ {
+ unsigned int offset = *this;
+ return StructAtOffset<Type> (base, offset);
+ }
+
+ inline void set_offset (void *base, void *obj)
+ {
+ this->set ((char *) obj - (char *) base);
+ }
inline bool sanitize (hb_sanitize_context_t *c, void *base) {
TRACE_SANITIZE ();
}
};
template <typename Base, typename OffsetType, typename Type>
-inline const Type& operator + (const Base &base, GenericOffsetTo<OffsetType, Type> offset) { return offset (base); }
+inline const Type& operator + (const Base &base, const GenericOffsetTo<OffsetType, Type> &offset) { return offset (base); }
+template <typename Base, typename OffsetType, typename Type>
+inline Type& operator + (Base &base, GenericOffsetTo<OffsetType, Type> &offset) { return offset (base); }
template <typename Type>
struct OffsetTo : GenericOffsetTo<Offset, Type> {};
return i;
}
- inline static bool serialize (hb_serialize_context_t *c,
- const USHORT *glyphs,
- unsigned int num_glyphs)
+ inline bool serialize (hb_serialize_context_t *c,
+ const USHORT *glyphs,
+ unsigned int num_glyphs)
{
TRACE_SERIALIZE ();
- CoverageFormat1 *t = c->allocate_min<CoverageFormat1> ();
- if (unlikely (!t)) return TRACE_RETURN (false);
- t->coverageFormat.set (1);
- t->glyphArray.len.set (num_glyphs);
- if (unlikely (!c->extend (t->glyphArray))) return TRACE_RETURN (false);
+ if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
+ glyphArray.len.set (num_glyphs);
+ if (unlikely (!c->extend (glyphArray))) return TRACE_RETURN (false);
for (unsigned int i = 0; i < num_glyphs; i++)
- t->glyphArray[i].set (glyphs[i]);
+ glyphArray[i].set (glyphs[i]);
return TRACE_RETURN (true);
}
return NOT_COVERED;
}
- inline static bool serialize (hb_serialize_context_t *c,
- const USHORT *glyphs,
- unsigned int num_glyphs)
+ inline bool serialize (hb_serialize_context_t *c,
+ const USHORT *glyphs,
+ unsigned int num_glyphs)
{
TRACE_SERIALIZE ();
- CoverageFormat2 *t = c->allocate_min<CoverageFormat2> ();
+ if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
+
+ if (unlikely (!num_glyphs)) return TRACE_RETURN (true);
+
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < num_glyphs; i++)
if (glyphs[i - 1] + 1 != glyphs[i])
num_ranges++;
- if (unlikely (!t)) return TRACE_RETURN (false);
- t->coverageFormat.set (2);
- t->rangeRecord.len.set (num_ranges);
- if (unlikely (!c->extend (t->rangeRecord))) return TRACE_RETURN (false);
- if (unlikely (!num_glyphs)) return TRACE_RETURN (true);
+ rangeRecord.len.set (num_ranges);
+ if (unlikely (!c->extend (rangeRecord))) return TRACE_RETURN (false);
+
unsigned int range = 0;
- t->rangeRecord[range].start.set (glyphs[0]);
- t->rangeRecord[range].value.set (0);
+ rangeRecord[range].start.set (glyphs[0]);
+ rangeRecord[range].value.set (0);
for (unsigned int i = 1; i < num_glyphs; i++)
if (glyphs[i - 1] + 1 != glyphs[i]) {
- t->rangeRecord[range].start.set (glyphs[i]);
- t->rangeRecord[range].value.set (i);
+ rangeRecord[range].start.set (glyphs[i]);
+ rangeRecord[range].value.set (i);
range++;
} else {
- t->rangeRecord[range].end = glyphs[i];
+ rangeRecord[range].end = glyphs[i];
}
return TRACE_RETURN (true);
}
}
}
- inline static bool serialize (hb_serialize_context_t *c,
- const USHORT *glyphs,
- unsigned int num_glyphs)
+ inline bool serialize (hb_serialize_context_t *c,
+ const USHORT *glyphs,
+ unsigned int num_glyphs)
{
TRACE_SERIALIZE ();
- unsigned int format;
+ if (unlikely (c->extend_min (*this))) return TRACE_RETURN (false);
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < num_glyphs; i++)
if (glyphs[i - 1] + 1 != glyphs[i])
num_ranges++;
- format = num_glyphs * 2 < num_ranges * 3 ? 1 : 2;
- switch (format) {
- case 1: return TRACE_RETURN (CoverageFormat1::serialize (c, glyphs, num_glyphs));
- case 2: return TRACE_RETURN (CoverageFormat2::serialize (c, glyphs, num_glyphs));
+ u.format.set (num_glyphs * 2 < num_ranges * 3 ? 1 : 2);
+ switch (u.format) {
+ case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs));
+ case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, num_glyphs));
default:return TRACE_RETURN (false);
}
}
return TRACE_RETURN (true);
}
+ inline bool serialize (hb_serialize_context_t *c,
+ const USHORT *glyphs,
+ unsigned int num_glyphs,
+ SHORT delta)
+ {
+ TRACE_SERIALIZE ();
+ if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
+ deltaGlyphID.set (delta);
+ coverage.set_offset (this, c->head);
+ if (unlikely (!(this+coverage).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
+ return TRACE_RETURN (true);
+ }
+
inline bool sanitize (hb_sanitize_context_t *c) {
TRACE_SANITIZE ();
return TRACE_RETURN (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));