/*
- * Copyright (C) 2007,2008,2009,2010 Red Hat, Inc.
+ * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
+ * Copyright © 2010 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Red Hat Author(s): Behdad Esfahbod
+ * Google Author(s): Behdad Esfahbod
*/
#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
#include "hb-buffer-private.hh"
#include "hb-ot-layout-gdef-private.hh"
+HB_BEGIN_DECLS
+
+
+/* buffer var allocations */
+#define lig_id() var2.u16[0] /* unique ligature id */
+#define lig_comp() var2.u16[1] /* component number in the ligature (0 = base) */
+
#ifndef HB_DEBUG_APPLY
-#define HB_DEBUG_APPLY HB_DEBUG+0
+#define HB_DEBUG_APPLY (HB_DEBUG+0)
#endif
#define TRACE_APPLY() \
hb_trace_t<HB_DEBUG_APPLY> trace (&c->debug_depth, "APPLY", HB_FUNC, this); \
+HB_BEGIN_DECLS
+
struct hb_apply_context_t
{
unsigned int debug_depth;
- hb_ot_layout_context_t *layout;
+ hb_font_t *font;
+ hb_face_t *face;
hb_buffer_t *buffer;
+ hb_direction_t direction;
+ hb_mask_t lookup_mask;
unsigned int context_length;
unsigned int nesting_level_left;
- unsigned int lookup_flag;
- unsigned int property; /* propety of first glyph (TODO remove) */
+ unsigned int lookup_props;
+ unsigned int property; /* propety of first glyph */
+
+
+ inline void replace_glyph (hb_codepoint_t glyph_index) const
+ {
+ clear_property ();
+ buffer->replace_glyph (glyph_index);
+ }
+ inline void replace_glyphs_be16 (unsigned int num_in,
+ unsigned int num_out,
+ const uint16_t *glyph_data_be) const
+ {
+ clear_property ();
+ buffer->replace_glyphs_be16 (num_in, num_out, glyph_data_be);
+ }
+
+ inline void guess_glyph_class (unsigned int klass)
+ {
+ /* XXX if ! has gdef */
+ buffer->info[buffer->i].props_cache() = klass;
+ }
+
+ private:
+ inline void clear_property (void) const
+ {
+ /* XXX if has gdef */
+ buffer->info[buffer->i].props_cache() = 0;
+ }
};
unsigned int *context_length_out)
{
unsigned int i, j;
- unsigned int end = MIN (c->buffer->in_length, c->buffer->in_pos + c->context_length);
- if (unlikely (c->buffer->in_pos + count > end))
+ unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
+ if (unlikely (c->buffer->i + count > end))
return false;
- for (i = 1, j = c->buffer->in_pos + 1; i < count; i++, j++)
+ for (i = 1, j = c->buffer->i + 1; i < count; i++, j++)
{
- while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->in_string[j], c->lookup_flag, NULL))
+ while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[j], c->lookup_props, NULL))
{
if (unlikely (j + count - i == end))
return false;
j++;
}
- if (likely (!match_func (c->buffer->in_string[j].codepoint, input[i - 1], match_data)))
+ if (likely (!match_func (c->buffer->info[j].codepoint, input[i - 1], match_data)))
return false;
}
- *context_length_out = j - c->buffer->in_pos;
+ *context_length_out = j - c->buffer->i;
return true;
}
match_func_t match_func,
const void *match_data)
{
- if (unlikely (c->buffer->out_pos < count))
+ if (unlikely (c->buffer->backtrack_len () < count))
return false;
- for (unsigned int i = 0, j = c->buffer->out_pos - 1; i < count; i++, j--)
+ for (unsigned int i = 0, j = c->buffer->backtrack_len () - 1; i < count; i++, j--)
{
- while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->out_string[j], c->lookup_flag, NULL))
+ while (_hb_ot_layout_skip_mark (c->face, &c->buffer->out_info[j], c->lookup_props, NULL))
{
if (unlikely (j + 1 == count - i))
return false;
j--;
}
- if (likely (!match_func (c->buffer->out_string[j].codepoint, backtrack[i], match_data)))
+ if (likely (!match_func (c->buffer->out_info[j].codepoint, backtrack[i], match_data)))
return false;
}
unsigned int offset)
{
unsigned int i, j;
- unsigned int end = MIN (c->buffer->in_length, c->buffer->in_pos + c->context_length);
- if (unlikely (c->buffer->in_pos + offset + count > end))
+ unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
+ if (unlikely (c->buffer->i + offset + count > end))
return false;
- for (i = 0, j = c->buffer->in_pos + offset; i < count; i++, j++)
+ for (i = 0, j = c->buffer->i + offset; i < count; i++, j++)
{
- while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->in_string[j], c->lookup_flag, NULL))
+ while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[j], c->lookup_props, NULL))
{
if (unlikely (j + count - i == end))
return false;
j++;
}
- if (likely (!match_func (c->buffer->in_string[j].codepoint, lookahead[i], match_data)))
+ if (likely (!match_func (c->buffer->info[j].codepoint, lookahead[i], match_data)))
return false;
}
return true;
}
+HB_END_DECLS
+
struct LookupRecord
{
DEFINE_SIZE_STATIC (4);
};
+
+HB_BEGIN_DECLS
+
static inline bool apply_lookup (hb_apply_context_t *c,
unsigned int count, /* Including the first glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
apply_lookup_func_t apply_func)
{
- unsigned int end = MIN (c->buffer->in_length, c->buffer->in_pos + c->context_length);
- if (unlikely (c->buffer->in_pos + count > end))
+ unsigned int end = MIN (c->buffer->len, c->buffer->i + c->context_length);
+ if (unlikely (count == 0 || c->buffer->i + count > end))
return false;
/* TODO We don't support lookupRecord arrays that are not increasing:
* Should be easy for in_place ones at least. */
- /* Note: If sublookup is reverse, i will underflow after the first loop
+ /* Note: If sublookup is reverse, it will underflow after the first loop
* and we jump out of it. Not entirely disastrous. So we don't check
* for reverse lookup here.
*/
for (unsigned int i = 0; i < count; /* NOP */)
{
- while (_hb_ot_layout_skip_mark (c->layout->face, &c->buffer->in_string[c->buffer->in_pos], c->lookup_flag, NULL))
+ while (_hb_ot_layout_skip_mark (c->face, &c->buffer->info[c->buffer->i], c->lookup_props, NULL))
{
- if (unlikely (c->buffer->in_pos == end))
+ if (unlikely (c->buffer->i == end))
return true;
/* No lookup applied for this index */
c->buffer->next_glyph ();
if (lookupCount && i == lookupRecord->sequenceIndex)
{
- unsigned int old_pos = c->buffer->in_pos;
+ unsigned int old_pos = c->buffer->i;
/* Apply a lookup */
bool done = apply_func (c, lookupRecord->lookupListIndex);
lookupRecord++;
lookupCount--;
/* Err, this is wrong if the lookup jumped over some glyphs */
- i += c->buffer->in_pos - old_pos;
- if (unlikely (c->buffer->in_pos == end))
+ i += c->buffer->i - old_pos;
+ if (unlikely (c->buffer->i == end))
return true;
if (!done)
return true;
}
+HB_END_DECLS
+
/* Contextual lookups */
inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
{
TRACE_APPLY ();
- unsigned int index = (this+coverage) (c->buffer->in_string[c->buffer->in_pos].codepoint);
+ unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
if (likely (index == NOT_COVERED))
return false;
inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
{
TRACE_APPLY ();
- unsigned int index = (this+coverage) (c->buffer->in_string[c->buffer->in_pos].codepoint);
+ unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
if (likely (index == NOT_COVERED))
return false;
const ClassDef &class_def = this+classDef;
- index = class_def (c->buffer->in_string[c->buffer->in_pos].codepoint);
+ index = class_def (c->buffer->info[c->buffer->i].codepoint);
const RuleSet &rule_set = this+ruleSet[index];
- /* LONGTERMTODO: Old code fetches glyph classes at most once and caches
- * them across subrule lookups. Not sure it's worth it.
- */
struct ContextLookupContext lookup_context = {
{match_class, apply_func},
&class_def
inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
{
TRACE_APPLY ();
- unsigned int index = (this+coverage[0]) (c->buffer->in_string[c->buffer->in_pos].codepoint);
+ unsigned int index = (this+coverage[0]) (c->buffer->info[c->buffer->i].codepoint);
if (likely (index == NOT_COVERED))
return false;
ChainContextLookupContext &lookup_context)
{
/* First guess */
- if (unlikely (c->buffer->out_pos < backtrackCount ||
- c->buffer->in_pos + inputCount + lookaheadCount > c->buffer->in_length ||
+ if (unlikely (c->buffer->backtrack_len () < backtrackCount ||
+ c->buffer->i + inputCount + lookaheadCount > c->buffer->len ||
inputCount + lookaheadCount > c->context_length))
return false;
lookahead.len, lookahead.array,
lookup.len, lookup.array,
lookup_context);
- return false;
}
public:
inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
{
TRACE_APPLY ();
- unsigned int index = (this+coverage) (c->buffer->in_string[c->buffer->in_pos].codepoint);
+ unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
if (likely (index == NOT_COVERED))
return false;
inline bool apply (hb_apply_context_t *c, apply_lookup_func_t apply_func) const
{
TRACE_APPLY ();
- unsigned int index = (this+coverage) (c->buffer->in_string[c->buffer->in_pos].codepoint);
+ unsigned int index = (this+coverage) (c->buffer->info[c->buffer->i].codepoint);
if (likely (index == NOT_COVERED))
return false;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
- index = input_class_def (c->buffer->in_string[c->buffer->in_pos].codepoint);
+ index = input_class_def (c->buffer->info[c->buffer->i].codepoint);
const ChainRuleSet &rule_set = this+ruleSet[index];
- /* LONGTERMTODO: Old code fetches glyph classes at most once and caches
- * them across subrule lookups. Not sure it's worth it.
- */
struct ChainContextLookupContext lookup_context = {
{match_class, apply_func},
{&backtrack_class_def,
TRACE_APPLY ();
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
- unsigned int index = (this+input[0]) (c->buffer->in_string[c->buffer->in_pos].codepoint);
+ unsigned int index = (this+input[0]) (c->buffer->info[c->buffer->i].codepoint);
if (likely (index == NOT_COVERED))
return false;
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array,
lookup_context);
- return false;
}
inline bool sanitize (hb_sanitize_context_t *c) {
};
+HB_END_DECLS
+
#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */