12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
31
32 #include "hb-buffer-private.hh"
33 #include "hb-ot-layout-gdef-table.hh"
34 #include "hb-set-private.hh"
35
36
37 namespace OT {
38
39
40 #ifndef HB_DEBUG_CLOSURE
41 #define HB_DEBUG_CLOSURE (HB_DEBUG+0)
42 #endif
43
44 #define TRACE_CLOSURE(this) \
45 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \
46 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
47 "");
48
49 struct hb_closure_context_t :
50 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE>
51 {
52 inline const char *get_name (void) { return "CLOSURE"; }
53 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
54 template <typename T>
55 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; }
56 static return_t default_return_value (void) { return HB_VOID; }
57 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
58 return_t recurse (unsigned int lookup_index)
59 {
60 if (unlikely (nesting_level_left == 0 || !recurse_func))
61 return default_return_value ();
62
63 nesting_level_left--;
64 recurse_func (this, lookup_index);
65 nesting_level_left++;
66 return HB_VOID;
67 }
68
69 hb_face_t *face;
70 hb_set_t *glyphs;
71 recurse_func_t recurse_func;
72 unsigned int nesting_level_left;
73 unsigned int debug_depth;
74
75 hb_closure_context_t (hb_face_t *face_,
76 hb_set_t *glyphs_,
77 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
78 face (face_),
79 glyphs (glyphs_),
80 recurse_func (NULL),
81 nesting_level_left (nesting_level_left_),
82 debug_depth (0) {}
83
84 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
85 };
86
87
88
89 #ifndef HB_DEBUG_WOULD_APPLY
90 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
91 #endif
92
93 #define TRACE_WOULD_APPLY(this) \
94 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \
95 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
96 "%d glyphs", c->len);
97
98 struct hb_would_apply_context_t :
99 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY>
100 {
101 inline const char *get_name (void) { return "WOULD_APPLY"; }
102 template <typename T>
103 inline return_t dispatch (const T &obj) { return obj.would_apply (this); }
104 static return_t default_return_value (void) { return false; }
105 bool stop_sublookup_iteration (return_t r) const { return r; }
106
107 hb_face_t *face;
108 const hb_codepoint_t *glyphs;
109 unsigned int len;
110 bool zero_context;
111 unsigned int debug_depth;
112
113 hb_would_apply_context_t (hb_face_t *face_,
114 const hb_codepoint_t *glyphs_,
115 unsigned int len_,
116 bool zero_context_) :
117 face (face_),
118 glyphs (glyphs_),
119 len (len_),
120 zero_context (zero_context_),
121 debug_depth (0) {}
122 };
123
124
125
126 #ifndef HB_DEBUG_COLLECT_GLYPHS
127 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0)
128 #endif
129
130 #define TRACE_COLLECT_GLYPHS(this) \
131 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \
132 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
133 "");
134
135 struct hb_collect_glyphs_context_t :
136 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS>
137 {
138 inline const char *get_name (void) { return "COLLECT_GLYPHS"; }
139 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
140 template <typename T>
141 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; }
142 static return_t default_return_value (void) { return HB_VOID; }
143 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
144 return_t recurse (unsigned int lookup_index)
145 {
146 if (unlikely (nesting_level_left == 0 || !recurse_func))
147 return default_return_value ();
148
149 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get
150 * past the previous check. For GSUB, we only want to collect the output
151 * glyphs in the recursion. If output is not requested, we can go home now.
152 *
153 * Note further, that the above is not exactly correct. A recursed lookup
154 * is allowed to match input that is not matched in the context, but that's
155 * not how most fonts are built. It's possible to relax that and recurse
156 * with all sets here if it proves to be an issue.
157 */
158
159 if (output == hb_set_get_empty ())
160 return HB_VOID;
161
162 /* Return if new lookup was recursed to before. */
163 if (recursed_lookups.has (lookup_index))
164 return HB_VOID;
165
166 hb_set_t *old_before = before;
167 hb_set_t *old_input = input;
168 hb_set_t *old_after = after;
169 before = input = after = hb_set_get_empty ();
170
171 nesting_level_left--;
172 recurse_func (this, lookup_index);
173 nesting_level_left++;
174
175 before = old_before;
176 input = old_input;
177 after = old_after;
178
179 recursed_lookups.add (lookup_index);
180
181 return HB_VOID;
182 }
183
184 hb_face_t *face;
185 hb_set_t *before;
186 hb_set_t *input;
187 hb_set_t *after;
188 hb_set_t *output;
189 recurse_func_t recurse_func;
190 hb_set_t recursed_lookups;
191 unsigned int nesting_level_left;
192 unsigned int debug_depth;
193
194 hb_collect_glyphs_context_t (hb_face_t *face_,
195 hb_set_t *glyphs_before, /* OUT. May be NULL */
196 hb_set_t *glyphs_input, /* OUT. May be NULL */
197 hb_set_t *glyphs_after, /* OUT. May be NULL */
198 hb_set_t *glyphs_output, /* OUT. May be NULL */
199 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
200 face (face_),
201 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
202 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
203 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
204 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
205 recurse_func (NULL),
206 recursed_lookups (),
207 nesting_level_left (nesting_level_left_),
208 debug_depth (0)
209 {
210 recursed_lookups.init ();
211 }
212 ~hb_collect_glyphs_context_t (void)
213 {
214 recursed_lookups.fini ();
215 }
216
217 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
218 };
219
220
221
222 #ifndef HB_DEBUG_GET_COVERAGE
223 #define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0)
224 #endif
225
226 /* XXX Can we remove this? */
227
228 template <typename set_t>
229 struct hb_add_coverage_context_t :
230 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE>
231 {
232 inline const char *get_name (void) { return "GET_COVERAGE"; }
233 typedef const Coverage &return_t;
234 template <typename T>
235 inline return_t dispatch (const T &obj) { return obj.get_coverage (); }
236 static return_t default_return_value (void) { return Null(Coverage); }
237 bool stop_sublookup_iteration (return_t r) const
238 {
239 r.add_coverage (set);
240 return false;
241 }
242
243 hb_add_coverage_context_t (set_t *set_) :
244 set (set_),
245 debug_depth (0) {}
246
247 set_t *set;
248 unsigned int debug_depth;
249 };
250
251
252
253 #ifndef HB_DEBUG_APPLY
254 #define HB_DEBUG_APPLY (HB_DEBUG+0)
255 #endif
256
257 #define TRACE_APPLY(this) \
258 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \
259 (&c->debug_depth, c->get_name (), this, HB_FUNC, \
260 "idx %d gid %u lookup %d", \
261 c->buffer->idx, c->buffer->cur().codepoint, (int) c->lookup_index);
262
263 struct hb_apply_context_t :
264 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY>
265 {
266 struct matcher_t
267 {
268 inline matcher_t (void) :
269 lookup_props (0),
270 ignore_zwnj (false),
271 ignore_zwj (false),
272 mask (-1),
273 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
274 syllable arg1(0),
275 #undef arg1
276 match_func (NULL),
277 match_data (NULL) {};
278
279 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
280
281 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
282 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
283 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
284 inline void set_mask (hb_mask_t mask_) { mask = mask_; }
285 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; }
286 inline void set_match_func (match_func_t match_func_,
287 const void *match_data_)
288 { match_func = match_func_; match_data = match_data_; }
289
290 enum may_match_t {
291 MATCH_NO,
292 MATCH_YES,
293 MATCH_MAYBE
294 };
295
296 inline may_match_t may_match (const hb_glyph_info_t &info,
297 const USHORT *glyph_data) const
302
303 if (match_func)
304 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
305
306 return MATCH_MAYBE;
307 }
308
309 enum may_skip_t {
310 SKIP_NO,
311 SKIP_YES,
312 SKIP_MAYBE
313 };
314
315 inline may_skip_t
316 may_skip (const hb_apply_context_t *c,
317 const hb_glyph_info_t &info) const
318 {
319 if (!c->check_glyph_property (&info, lookup_props))
320 return SKIP_YES;
321
322 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_fvs (&info) &&
323 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
324 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
325 return SKIP_MAYBE;
326
327 return SKIP_NO;
328 }
329
330 protected:
331 unsigned int lookup_props;
332 bool ignore_zwnj;
333 bool ignore_zwj;
334 hb_mask_t mask;
335 uint8_t syllable;
336 match_func_t match_func;
337 const void *match_data;
338 };
339
340 struct skipping_iterator_t
341 {
342 inline void init (hb_apply_context_t *c_, bool context_match = false)
343 {
344 c = c_;
345 match_glyph_data = NULL,
346 matcher.set_match_func (NULL, NULL);
347 matcher.set_lookup_props (c->lookup_props);
348 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
349 matcher.set_ignore_zwnj (context_match || c->table_index == 1);
350 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
351 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj);
352 matcher.set_mask (context_match ? -1 : c->lookup_mask);
353 }
354 inline void set_lookup_props (unsigned int lookup_props)
355 {
356 matcher.set_lookup_props (lookup_props);
357 }
358 inline void set_match_func (matcher_t::match_func_t match_func_,
359 const void *match_data_,
360 const USHORT glyph_data[])
361 {
362 matcher.set_match_func (match_func_, match_data_);
363 match_glyph_data = glyph_data;
364 }
365
366 inline void reset (unsigned int start_index_,
367 unsigned int num_items_)
368 {
369 idx = start_index_;
370 num_items = num_items_;
371 end = c->buffer->len;
372 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
373 }
374
375 inline void reject (void) { num_items++; match_glyph_data--; }
376
377 inline bool next (void)
378 {
379 assert (num_items > 0);
380 while (idx + num_items < end)
381 {
382 idx++;
383 const hb_glyph_info_t &info = c->buffer->info[idx];
384
385 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
386 if (unlikely (skip == matcher_t::SKIP_YES))
387 continue;
388
389 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
390 if (match == matcher_t::MATCH_YES ||
391 (match == matcher_t::MATCH_MAYBE &&
392 skip == matcher_t::SKIP_NO))
393 {
394 num_items--;
395 match_glyph_data++;
396 return true;
440 };
441
442
443 inline const char *get_name (void) { return "APPLY"; }
444 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
445 template <typename T>
446 inline return_t dispatch (const T &obj) { return obj.apply (this); }
447 static return_t default_return_value (void) { return false; }
448 bool stop_sublookup_iteration (return_t r) const { return r; }
449 return_t recurse (unsigned int lookup_index)
450 {
451 if (unlikely (nesting_level_left == 0 || !recurse_func))
452 return default_return_value ();
453
454 nesting_level_left--;
455 bool ret = recurse_func (this, lookup_index);
456 nesting_level_left++;
457 return ret;
458 }
459
460 unsigned int table_index; /* GSUB/GPOS */
461 hb_font_t *font;
462 hb_face_t *face;
463 hb_buffer_t *buffer;
464 hb_direction_t direction;
465 hb_mask_t lookup_mask;
466 bool auto_zwj;
467 recurse_func_t recurse_func;
468 unsigned int nesting_level_left;
469 unsigned int lookup_props;
470 const GDEF &gdef;
471 bool has_glyph_classes;
472 const VariationStore &var_store;
473 skipping_iterator_t iter_input, iter_context;
474 unsigned int lookup_index;
475 unsigned int debug_depth;
476
477
478 hb_apply_context_t (unsigned int table_index_,
479 hb_font_t *font_,
480 hb_buffer_t *buffer_) :
481 table_index (table_index_),
482 font (font_), face (font->face), buffer (buffer_),
483 direction (buffer_->props.direction),
484 lookup_mask (1),
485 auto_zwj (true),
486 recurse_func (NULL),
487 nesting_level_left (HB_MAX_NESTING_LEVEL),
488 lookup_props (0),
489 gdef (*hb_ot_layout_from_face (face)->gdef),
490 has_glyph_classes (gdef.has_glyph_classes ()),
491 var_store (gdef.get_var_store ()),
492 iter_input (),
493 iter_context (),
494 lookup_index ((unsigned int) -1),
495 debug_depth (0) {}
496
497 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; }
498 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; }
499 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; }
500 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
501 inline void set_lookup_props (unsigned int lookup_props_)
502 {
503 lookup_props = lookup_props_;
504 iter_input.init (this, false);
505 iter_context.init (this, true);
506 }
507
508 inline bool
509 match_properties_mark (hb_codepoint_t glyph,
510 unsigned int glyph_props,
511 unsigned int match_props) const
512 {
513 /* If using mark filtering sets, the high short of
514 * match_props has the set index.
515 */
516 if (match_props & LookupFlag::UseMarkFilteringSet)
517 return gdef.mark_set_covers (match_props >> 16, glyph);
518
690 const USHORT input[], /* Array of input values--start with second glyph */
691 match_func_t match_func,
692 const void *match_data)
693 {
694 if (count != c->len)
695 return false;
696
697 for (unsigned int i = 1; i < count; i++)
698 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
699 return false;
700
701 return true;
702 }
703 static inline bool match_input (hb_apply_context_t *c,
704 unsigned int count, /* Including the first glyph (not matched) */
705 const USHORT input[], /* Array of input values--start with second glyph */
706 match_func_t match_func,
707 const void *match_data,
708 unsigned int *end_offset,
709 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
710 bool *p_is_mark_ligature = NULL,
711 unsigned int *p_total_component_count = NULL)
712 {
713 TRACE_APPLY (NULL);
714
715 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
716
717 hb_buffer_t *buffer = c->buffer;
718
719 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
720 skippy_iter.reset (buffer->idx, count - 1);
721 skippy_iter.set_match_func (match_func, match_data, input);
722
723 /*
724 * This is perhaps the trickiest part of OpenType... Remarks:
725 *
726 * - If all components of the ligature were marks, we call this a mark ligature.
727 *
728 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
729 * it as a ligature glyph.
730 *
731 * - Ligatures cannot be formed across glyphs attached to different components
732 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
733 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
734 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o
735 * There is an exception to this: If a ligature tries ligating with marks that
736 * belong to it itself, go ahead, assuming that the font designer knows what
737 * they are doing (otherwise it can break Indic stuff when a matra wants to
738 * ligate with a conjunct...)
739 */
740
741 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur());
742
743 unsigned int total_component_count = 0;
744 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
745
746 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
747 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
748
749 match_positions[0] = buffer->idx;
750 for (unsigned int i = 1; i < count; i++)
751 {
752 if (!skippy_iter.next ()) return_trace (false);
753
754 match_positions[i] = skippy_iter.idx;
755
756 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
757 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
758
759 if (first_lig_id && first_lig_comp) {
760 /* If first component was attached to a previous ligature component,
761 * all subsequent components should be attached to the same ligature
762 * component, otherwise we shouldn't ligate them. */
763 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
764 return_trace (false);
765 } else {
766 /* If first component was NOT attached to a previous ligature component,
767 * all subsequent components should also NOT be attached to any ligature
768 * component, unless they are attached to the first component itself! */
769 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
770 return_trace (false);
771 }
772
773 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]);
774 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
775 }
776
777 *end_offset = skippy_iter.idx - buffer->idx + 1;
778
779 if (p_is_mark_ligature)
780 *p_is_mark_ligature = is_mark_ligature;
781
782 if (p_total_component_count)
783 *p_total_component_count = total_component_count;
784
785 return_trace (true);
786 }
787 static inline bool ligate_input (hb_apply_context_t *c,
788 unsigned int count, /* Including the first glyph */
789 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
790 unsigned int match_length,
791 hb_codepoint_t lig_glyph,
792 bool is_mark_ligature,
793 unsigned int total_component_count)
794 {
795 TRACE_APPLY (NULL);
796
797 hb_buffer_t *buffer = c->buffer;
798
799 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
800
801 /*
802 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
803 * the ligature to keep its old ligature id. This will allow it to attach to
804 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
805 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
806 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
807 * later, we don't want them to lose their ligature id/component, otherwise
808 * GPOS will fail to correctly position the mark ligature on top of the
809 * LAM,LAM,HEH ligature. See:
810 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
811 *
812 * - If a ligature is formed of components that some of which are also ligatures
813 * themselves, and those ligature components had marks attached to *their*
814 * components, we have to attach the marks to the new ligature component
815 * positions! Now *that*'s tricky! And these marks may be following the
869 /* Re-adjust components for any marks following. */
870 for (unsigned int i = buffer->idx; i < buffer->len; i++) {
871 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) {
872 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
873 if (!this_comp)
874 break;
875 unsigned int new_lig_comp = components_so_far - last_num_components +
876 MIN (this_comp, last_num_components);
877 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
878 } else
879 break;
880 }
881 }
882 return_trace (true);
883 }
884
885 static inline bool match_backtrack (hb_apply_context_t *c,
886 unsigned int count,
887 const USHORT backtrack[],
888 match_func_t match_func,
889 const void *match_data)
890 {
891 TRACE_APPLY (NULL);
892
893 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
894 skippy_iter.reset (c->buffer->backtrack_len (), count);
895 skippy_iter.set_match_func (match_func, match_data, backtrack);
896
897 for (unsigned int i = 0; i < count; i++)
898 if (!skippy_iter.prev ())
899 return_trace (false);
900
901 return_trace (true);
902 }
903
904 static inline bool match_lookahead (hb_apply_context_t *c,
905 unsigned int count,
906 const USHORT lookahead[],
907 match_func_t match_func,
908 const void *match_data,
909 unsigned int offset)
910 {
911 TRACE_APPLY (NULL);
912
913 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
914 skippy_iter.reset (c->buffer->idx + offset - 1, count);
915 skippy_iter.set_match_func (match_func, match_data, lookahead);
916
917 for (unsigned int i = 0; i < count; i++)
918 if (!skippy_iter.next ())
919 return_trace (false);
920
921 return_trace (true);
922 }
923
924
925
926 struct LookupRecord
927 {
928 inline bool sanitize (hb_sanitize_context_t *c) const
929 {
930 TRACE_SANITIZE (this);
931 return_trace (c->check_struct (this));
932 }
933
934 USHORT sequenceIndex; /* Index into current glyph
935 * sequence--first glyph = 0 */
936 USHORT lookupListIndex; /* Lookup to apply to that
937 * position--zero--based */
938 public:
939 DEFINE_SIZE_STATIC (4);
940 };
941
942
943 template <typename context_t>
944 static inline void recurse_lookups (context_t *c,
945 unsigned int lookupCount,
946 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
947 {
948 for (unsigned int i = 0; i < lookupCount; i++)
949 c->recurse (lookupRecord[i].lookupListIndex);
950 }
951
952 static inline bool apply_lookup (hb_apply_context_t *c,
953 unsigned int count, /* Including the first glyph */
954 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
955 unsigned int lookupCount,
956 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
957 unsigned int match_length)
958 {
959 TRACE_APPLY (NULL);
960
961 hb_buffer_t *buffer = c->buffer;
962 unsigned int end;
963
964 /* All positions are distance from beginning of *output* buffer.
965 * Adjust. */
966 {
967 unsigned int bl = buffer->backtrack_len ();
968 end = bl + match_length;
969
970 int delta = bl - buffer->idx;
971 /* Convert positions to new indexing. */
972 for (unsigned int j = 0; j < count; j++)
973 match_positions[j] += delta;
974 }
975
976 for (unsigned int i = 0; i < lookupCount && !buffer->in_error; i++)
977 {
978 unsigned int idx = lookupRecord[i].sequenceIndex;
979 if (idx >= count)
980 continue;
981
982 /* Don't recurse to ourself at same position.
983 * Note that this test is too naive, it doesn't catch longer loops. */
984 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index)
985 continue;
986
987 buffer->move_to (match_positions[idx]);
988
989 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
990 if (!c->recurse (lookupRecord[i].lookupListIndex))
991 continue;
992
993 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
994 int delta = new_len - orig_len;
995
996 if (!delta)
997 continue;
998
999 /* Recursed lookup changed buffer len. Adjust. */
1000
1001 end = int (end) + delta;
1002 if (end <= match_positions[idx])
1003 {
1004 /* End might end up being smaller than match_positions[idx] if the recursed
1005 * lookup ended up removing many items, more than we have had matched.
1006 * Just never rewind end back and get out of here.
1007 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1008 end = match_positions[idx];
1009 /* There can't be any further changes. */
1010 break;
1011 }
1012
1013 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1014
1015 if (delta > 0)
1016 {
1017 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1018 break;
1019 }
1020 else
1021 {
1022 /* NOTE: delta is negative. */
1101 const LookupRecord lookupRecord[] HB_UNUSED,
1102 ContextApplyLookupContext &lookup_context)
1103 {
1104 return would_match_input (c,
1105 inputCount, input,
1106 lookup_context.funcs.match, lookup_context.match_data);
1107 }
1108 static inline bool context_apply_lookup (hb_apply_context_t *c,
1109 unsigned int inputCount, /* Including the first glyph (not matched) */
1110 const USHORT input[], /* Array of input values--start with second glyph */
1111 unsigned int lookupCount,
1112 const LookupRecord lookupRecord[],
1113 ContextApplyLookupContext &lookup_context)
1114 {
1115 unsigned int match_length = 0;
1116 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1117 return match_input (c,
1118 inputCount, input,
1119 lookup_context.funcs.match, lookup_context.match_data,
1120 &match_length, match_positions)
1121 && apply_lookup (c,
1122 inputCount, match_positions,
1123 lookupCount, lookupRecord,
1124 match_length);
1125 }
1126
1127 struct Rule
1128 {
1129 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1130 {
1131 TRACE_CLOSURE (this);
1132 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1133 context_closure_lookup (c,
1134 inputCount, inputZ,
1135 lookupCount, lookupRecord,
1136 lookup_context);
1137 }
1138
1139 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
1140 {
1141 TRACE_COLLECT_GLYPHS (this);
1142 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1143 context_collect_glyphs_lookup (c,
1144 inputCount, inputZ,
1147 }
1148
1149 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1150 {
1151 TRACE_WOULD_APPLY (this);
1152 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1153 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1154 }
1155
1156 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1157 {
1158 TRACE_APPLY (this);
1159 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1160 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1161 }
1162
1163 public:
1164 inline bool sanitize (hb_sanitize_context_t *c) const
1165 {
1166 TRACE_SANITIZE (this);
1167 return inputCount.sanitize (c)
1168 && lookupCount.sanitize (c)
1169 && c->check_range (inputZ,
1170 inputZ[0].static_size * inputCount
1171 + lookupRecordX[0].static_size * lookupCount);
1172 }
1173
1174 protected:
1175 USHORT inputCount; /* Total number of glyphs in input
1176 * glyph sequence--includes the first
1177 * glyph */
1178 USHORT lookupCount; /* Number of LookupRecords */
1179 USHORT inputZ[VAR]; /* Array of match inputs--start with
1180 * second glyph */
1181 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1182 * design order */
1183 public:
1184 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX);
1185 };
1186
1187 struct RuleSet
1188 {
1189 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1190 {
1191 TRACE_CLOSURE (this);
1234
1235 protected:
1236 OffsetArrayOf<Rule>
1237 rule; /* Array of Rule tables
1238 * ordered by preference */
1239 public:
1240 DEFINE_SIZE_ARRAY (2, rule);
1241 };
1242
1243
1244 struct ContextFormat1
1245 {
1246 inline void closure (hb_closure_context_t *c) const
1247 {
1248 TRACE_CLOSURE (this);
1249
1250 const Coverage &cov = (this+coverage);
1251
1252 struct ContextClosureLookupContext lookup_context = {
1253 {intersects_glyph},
1254 NULL
1255 };
1256
1257 unsigned int count = ruleSet.len;
1258 for (unsigned int i = 0; i < count; i++)
1259 if (cov.intersects_coverage (c->glyphs, i)) {
1260 const RuleSet &rule_set = this+ruleSet[i];
1261 rule_set.closure (c, lookup_context);
1262 }
1263 }
1264
1265 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1266 {
1267 TRACE_COLLECT_GLYPHS (this);
1268 (this+coverage).add_coverage (c->input);
1269
1270 struct ContextCollectGlyphsLookupContext lookup_context = {
1271 {collect_glyph},
1272 NULL
1273 };
1274
1275 unsigned int count = ruleSet.len;
1276 for (unsigned int i = 0; i < count; i++)
1277 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1278 }
1279
1280 inline bool would_apply (hb_would_apply_context_t *c) const
1281 {
1282 TRACE_WOULD_APPLY (this);
1283
1284 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1285 struct ContextApplyLookupContext lookup_context = {
1286 {match_glyph},
1287 NULL
1288 };
1289 return_trace (rule_set.would_apply (c, lookup_context));
1290 }
1291
1292 inline const Coverage &get_coverage (void) const
1293 {
1294 return this+coverage;
1295 }
1296
1297 inline bool apply (hb_apply_context_t *c) const
1298 {
1299 TRACE_APPLY (this);
1300 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1301 if (likely (index == NOT_COVERED))
1302 return_trace (false);
1303
1304 const RuleSet &rule_set = this+ruleSet[index];
1305 struct ContextApplyLookupContext lookup_context = {
1306 {match_glyph},
1307 NULL
1308 };
1309 return_trace (rule_set.apply (c, lookup_context));
1310 }
1311
1312 inline bool sanitize (hb_sanitize_context_t *c) const
1313 {
1314 TRACE_SANITIZE (this);
1315 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1316 }
1317
1318 protected:
1319 USHORT format; /* Format identifier--format = 1 */
1320 OffsetTo<Coverage>
1321 coverage; /* Offset to Coverage table--from
1322 * beginning of table */
1323 OffsetArrayOf<RuleSet>
1324 ruleSet; /* Array of RuleSet tables
1325 * ordered by Coverage Index */
1326 public:
1327 DEFINE_SIZE_ARRAY (6, ruleSet);
1622 const LookupRecord lookupRecord[] HB_UNUSED,
1623 ChainContextApplyLookupContext &lookup_context)
1624 {
1625 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
1626 && would_match_input (c,
1627 inputCount, input,
1628 lookup_context.funcs.match, lookup_context.match_data[1]);
1629 }
1630
1631 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
1632 unsigned int backtrackCount,
1633 const USHORT backtrack[],
1634 unsigned int inputCount, /* Including the first glyph (not matched) */
1635 const USHORT input[], /* Array of input values--start with second glyph */
1636 unsigned int lookaheadCount,
1637 const USHORT lookahead[],
1638 unsigned int lookupCount,
1639 const LookupRecord lookupRecord[],
1640 ChainContextApplyLookupContext &lookup_context)
1641 {
1642 unsigned int match_length = 0;
1643 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1644 return match_input (c,
1645 inputCount, input,
1646 lookup_context.funcs.match, lookup_context.match_data[1],
1647 &match_length, match_positions)
1648 && match_backtrack (c,
1649 backtrackCount, backtrack,
1650 lookup_context.funcs.match, lookup_context.match_data[0])
1651 && match_lookahead (c,
1652 lookaheadCount, lookahead,
1653 lookup_context.funcs.match, lookup_context.match_data[2],
1654 match_length)
1655 && apply_lookup (c,
1656 inputCount, match_positions,
1657 lookupCount, lookupRecord,
1658 match_length);
1659 }
1660
1661 struct ChainRule
1662 {
1663 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1664 {
1665 TRACE_CLOSURE (this);
1666 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1667 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1668 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1669 chain_context_closure_lookup (c,
1670 backtrack.len, backtrack.array,
1671 input.len, input.array,
1672 lookahead.len, lookahead.array,
1673 lookup.len, lookup.array,
1674 lookup_context);
1675 }
1676
1677 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
1678 {
1790 return_trace (rule.sanitize (c, this));
1791 }
1792
1793 protected:
1794 OffsetArrayOf<ChainRule>
1795 rule; /* Array of ChainRule tables
1796 * ordered by preference */
1797 public:
1798 DEFINE_SIZE_ARRAY (2, rule);
1799 };
1800
1801 struct ChainContextFormat1
1802 {
1803 inline void closure (hb_closure_context_t *c) const
1804 {
1805 TRACE_CLOSURE (this);
1806 const Coverage &cov = (this+coverage);
1807
1808 struct ChainContextClosureLookupContext lookup_context = {
1809 {intersects_glyph},
1810 {NULL, NULL, NULL}
1811 };
1812
1813 unsigned int count = ruleSet.len;
1814 for (unsigned int i = 0; i < count; i++)
1815 if (cov.intersects_coverage (c->glyphs, i)) {
1816 const ChainRuleSet &rule_set = this+ruleSet[i];
1817 rule_set.closure (c, lookup_context);
1818 }
1819 }
1820
1821 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1822 {
1823 TRACE_COLLECT_GLYPHS (this);
1824 (this+coverage).add_coverage (c->input);
1825
1826 struct ChainContextCollectGlyphsLookupContext lookup_context = {
1827 {collect_glyph},
1828 {NULL, NULL, NULL}
1829 };
1830
1831 unsigned int count = ruleSet.len;
1832 for (unsigned int i = 0; i < count; i++)
1833 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1834 }
1835
1836 inline bool would_apply (hb_would_apply_context_t *c) const
1837 {
1838 TRACE_WOULD_APPLY (this);
1839
1840 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1841 struct ChainContextApplyLookupContext lookup_context = {
1842 {match_glyph},
1843 {NULL, NULL, NULL}
1844 };
1845 return_trace (rule_set.would_apply (c, lookup_context));
1846 }
1847
1848 inline const Coverage &get_coverage (void) const
1849 {
1850 return this+coverage;
1851 }
1852
1853 inline bool apply (hb_apply_context_t *c) const
1854 {
1855 TRACE_APPLY (this);
1856 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1857 if (likely (index == NOT_COVERED)) return_trace (false);
1858
1859 const ChainRuleSet &rule_set = this+ruleSet[index];
1860 struct ChainContextApplyLookupContext lookup_context = {
1861 {match_glyph},
1862 {NULL, NULL, NULL}
1863 };
1864 return_trace (rule_set.apply (c, lookup_context));
1865 }
1866
1867 inline bool sanitize (hb_sanitize_context_t *c) const
1868 {
1869 TRACE_SANITIZE (this);
1870 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1871 }
1872
1873 protected:
1874 USHORT format; /* Format identifier--format = 1 */
1875 OffsetTo<Coverage>
1876 coverage; /* Offset to Coverage table--from
1877 * beginning of table */
1878 OffsetArrayOf<ChainRuleSet>
1879 ruleSet; /* Array of ChainRuleSet tables
1880 * ordered by Coverage Index */
1881 public:
1882 DEFINE_SIZE_ARRAY (6, ruleSet);
2296 inline bool sanitize (hb_sanitize_context_t *c) const
2297 {
2298 TRACE_SANITIZE (this);
2299 return_trace (version.sanitize (c) &&
2300 likely (version.major == 1) &&
2301 scriptList.sanitize (c, this) &&
2302 featureList.sanitize (c, this) &&
2303 lookupList.sanitize (c, this) &&
2304 (version.to_int () < 0x00010001u || featureVars.sanitize (c, this)));
2305 }
2306
2307 protected:
2308 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
2309 * to 0x00010000u */
2310 OffsetTo<ScriptList>
2311 scriptList; /* ScriptList table */
2312 OffsetTo<FeatureList>
2313 featureList; /* FeatureList table */
2314 OffsetTo<LookupList>
2315 lookupList; /* LookupList table */
2316 OffsetTo<FeatureVariations, ULONG>
2317 featureVars; /* Offset to Feature Variations
2318 table--from beginning of table
2319 * (may be NULL). Introduced
2320 * in version 0x00010001. */
2321 public:
2322 DEFINE_SIZE_MIN (10);
2323 };
2324
2325
2326 } /* namespace OT */
2327
2328
2329 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */
|
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
31
32 #include "hb-private.hh"
33 #include "hb-debug.hh"
34 #include "hb-buffer-private.hh"
35 #include "hb-ot-layout-gdef-table.hh"
36 #include "hb-set-private.hh"
37
38
39 namespace OT {
40
41
42 struct hb_closure_context_t :
43 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE>
44 {
45 inline const char *get_name (void) { return "CLOSURE"; }
46 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
47 template <typename T>
48 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; }
49 static return_t default_return_value (void) { return HB_VOID; }
50 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
51 return_t recurse (unsigned int lookup_index)
52 {
53 if (unlikely (nesting_level_left == 0 || !recurse_func))
54 return default_return_value ();
55
56 nesting_level_left--;
57 recurse_func (this, lookup_index);
58 nesting_level_left++;
59 return HB_VOID;
60 }
61
62 hb_face_t *face;
63 hb_set_t *glyphs;
64 recurse_func_t recurse_func;
65 unsigned int nesting_level_left;
66 unsigned int debug_depth;
67
68 hb_closure_context_t (hb_face_t *face_,
69 hb_set_t *glyphs_,
70 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
71 face (face_),
72 glyphs (glyphs_),
73 recurse_func (nullptr),
74 nesting_level_left (nesting_level_left_),
75 debug_depth (0) {}
76
77 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
78 };
79
80
81 struct hb_would_apply_context_t :
82 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY>
83 {
84 inline const char *get_name (void) { return "WOULD_APPLY"; }
85 template <typename T>
86 inline return_t dispatch (const T &obj) { return obj.would_apply (this); }
87 static return_t default_return_value (void) { return false; }
88 bool stop_sublookup_iteration (return_t r) const { return r; }
89
90 hb_face_t *face;
91 const hb_codepoint_t *glyphs;
92 unsigned int len;
93 bool zero_context;
94 unsigned int debug_depth;
95
96 hb_would_apply_context_t (hb_face_t *face_,
97 const hb_codepoint_t *glyphs_,
98 unsigned int len_,
99 bool zero_context_) :
100 face (face_),
101 glyphs (glyphs_),
102 len (len_),
103 zero_context (zero_context_),
104 debug_depth (0) {}
105 };
106
107
108 struct hb_collect_glyphs_context_t :
109 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS>
110 {
111 inline const char *get_name (void) { return "COLLECT_GLYPHS"; }
112 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
113 template <typename T>
114 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; }
115 static return_t default_return_value (void) { return HB_VOID; }
116 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
117 return_t recurse (unsigned int lookup_index)
118 {
119 if (unlikely (nesting_level_left == 0 || !recurse_func))
120 return default_return_value ();
121
122 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
123 * past the previous check. For GSUB, we only want to collect the output
124 * glyphs in the recursion. If output is not requested, we can go home now.
125 *
126 * Note further, that the above is not exactly correct. A recursed lookup
127 * is allowed to match input that is not matched in the context, but that's
128 * not how most fonts are built. It's possible to relax that and recurse
129 * with all sets here if it proves to be an issue.
130 */
131
132 if (output == hb_set_get_empty ())
133 return HB_VOID;
134
135 /* Return if new lookup was recursed to before. */
136 if (recursed_lookups->has (lookup_index))
137 return HB_VOID;
138
139 hb_set_t *old_before = before;
140 hb_set_t *old_input = input;
141 hb_set_t *old_after = after;
142 before = input = after = hb_set_get_empty ();
143
144 nesting_level_left--;
145 recurse_func (this, lookup_index);
146 nesting_level_left++;
147
148 before = old_before;
149 input = old_input;
150 after = old_after;
151
152 recursed_lookups->add (lookup_index);
153
154 return HB_VOID;
155 }
156
157 hb_face_t *face;
158 hb_set_t *before;
159 hb_set_t *input;
160 hb_set_t *after;
161 hb_set_t *output;
162 recurse_func_t recurse_func;
163 hb_set_t *recursed_lookups;
164 unsigned int nesting_level_left;
165 unsigned int debug_depth;
166
167 hb_collect_glyphs_context_t (hb_face_t *face_,
168 hb_set_t *glyphs_before, /* OUT. May be nullptr */
169 hb_set_t *glyphs_input, /* OUT. May be nullptr */
170 hb_set_t *glyphs_after, /* OUT. May be nullptr */
171 hb_set_t *glyphs_output, /* OUT. May be nullptr */
172 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
173 face (face_),
174 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
175 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
176 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
177 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
178 recurse_func (nullptr),
179 recursed_lookups (nullptr),
180 nesting_level_left (nesting_level_left_),
181 debug_depth (0)
182 {
183 recursed_lookups = hb_set_create ();
184 }
185 ~hb_collect_glyphs_context_t (void)
186 {
187 hb_set_destroy (recursed_lookups);
188 }
189
190 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
191 };
192
193
194
195 /* XXX Can we remove this? */
196
197 template <typename set_t>
198 struct hb_add_coverage_context_t :
199 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE>
200 {
201 inline const char *get_name (void) { return "GET_COVERAGE"; }
202 typedef const Coverage &return_t;
203 template <typename T>
204 inline return_t dispatch (const T &obj) { return obj.get_coverage (); }
205 static return_t default_return_value (void) { return Null(Coverage); }
206 bool stop_sublookup_iteration (return_t r) const
207 {
208 r.add_coverage (set);
209 return false;
210 }
211
212 hb_add_coverage_context_t (set_t *set_) :
213 set (set_),
214 debug_depth (0) {}
215
216 set_t *set;
217 unsigned int debug_depth;
218 };
219
220
221 struct hb_apply_context_t :
222 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY>
223 {
224 struct matcher_t
225 {
226 inline matcher_t (void) :
227 lookup_props (0),
228 ignore_zwnj (false),
229 ignore_zwj (false),
230 mask (-1),
231 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
232 syllable arg1(0),
233 #undef arg1
234 match_func (nullptr),
235 match_data (nullptr) {};
236
237 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
238
239 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
240 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
241 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
242 inline void set_mask (hb_mask_t mask_) { mask = mask_; }
243 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; }
244 inline void set_match_func (match_func_t match_func_,
245 const void *match_data_)
246 { match_func = match_func_; match_data = match_data_; }
247
248 enum may_match_t {
249 MATCH_NO,
250 MATCH_YES,
251 MATCH_MAYBE
252 };
253
254 inline may_match_t may_match (const hb_glyph_info_t &info,
255 const USHORT *glyph_data) const
260
261 if (match_func)
262 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
263
264 return MATCH_MAYBE;
265 }
266
267 enum may_skip_t {
268 SKIP_NO,
269 SKIP_YES,
270 SKIP_MAYBE
271 };
272
273 inline may_skip_t
274 may_skip (const hb_apply_context_t *c,
275 const hb_glyph_info_t &info) const
276 {
277 if (!c->check_glyph_property (&info, lookup_props))
278 return SKIP_YES;
279
280 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
281 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
282 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
283 return SKIP_MAYBE;
284
285 return SKIP_NO;
286 }
287
288 protected:
289 unsigned int lookup_props;
290 bool ignore_zwnj;
291 bool ignore_zwj;
292 hb_mask_t mask;
293 uint8_t syllable;
294 match_func_t match_func;
295 const void *match_data;
296 };
297
298 struct skipping_iterator_t
299 {
300 inline void init (hb_apply_context_t *c_, bool context_match = false)
301 {
302 c = c_;
303 match_glyph_data = nullptr;
304 matcher.set_match_func (nullptr, nullptr);
305 matcher.set_lookup_props (c->lookup_props);
306 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
307 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
308 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
309 matcher.set_ignore_zwj (c->table_index == 1 || (context_match || c->auto_zwj));
310 matcher.set_mask (context_match ? -1 : c->lookup_mask);
311 }
312 inline void set_lookup_props (unsigned int lookup_props)
313 {
314 matcher.set_lookup_props (lookup_props);
315 }
316 inline void set_match_func (matcher_t::match_func_t match_func_,
317 const void *match_data_,
318 const USHORT glyph_data[])
319 {
320 matcher.set_match_func (match_func_, match_data_);
321 match_glyph_data = glyph_data;
322 }
323
324 inline void reset (unsigned int start_index_,
325 unsigned int num_items_)
326 {
327 idx = start_index_;
328 num_items = num_items_;
329 end = c->buffer->len;
330 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
331 }
332
333 inline void reject (void) { num_items++; match_glyph_data--; }
334
335 inline matcher_t::may_skip_t
336 may_skip (const hb_apply_context_t *c,
337 const hb_glyph_info_t &info) const
338 {
339 return matcher.may_skip (c, info);
340 }
341
342 inline bool next (void)
343 {
344 assert (num_items > 0);
345 while (idx + num_items < end)
346 {
347 idx++;
348 const hb_glyph_info_t &info = c->buffer->info[idx];
349
350 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
351 if (unlikely (skip == matcher_t::SKIP_YES))
352 continue;
353
354 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
355 if (match == matcher_t::MATCH_YES ||
356 (match == matcher_t::MATCH_MAYBE &&
357 skip == matcher_t::SKIP_NO))
358 {
359 num_items--;
360 match_glyph_data++;
361 return true;
405 };
406
407
408 inline const char *get_name (void) { return "APPLY"; }
409 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
410 template <typename T>
411 inline return_t dispatch (const T &obj) { return obj.apply (this); }
412 static return_t default_return_value (void) { return false; }
413 bool stop_sublookup_iteration (return_t r) const { return r; }
414 return_t recurse (unsigned int lookup_index)
415 {
416 if (unlikely (nesting_level_left == 0 || !recurse_func))
417 return default_return_value ();
418
419 nesting_level_left--;
420 bool ret = recurse_func (this, lookup_index);
421 nesting_level_left++;
422 return ret;
423 }
424
425 skipping_iterator_t iter_input, iter_context;
426
427 hb_font_t *font;
428 hb_face_t *face;
429 hb_buffer_t *buffer;
430 recurse_func_t recurse_func;
431 const GDEF &gdef;
432 const VariationStore &var_store;
433
434 hb_direction_t direction;
435 hb_mask_t lookup_mask;
436 unsigned int table_index; /* GSUB/GPOS */
437 unsigned int lookup_index;
438 unsigned int lookup_props;
439 unsigned int nesting_level_left;
440 unsigned int debug_depth;
441
442 bool auto_zwnj;
443 bool auto_zwj;
444 bool has_glyph_classes;
445
446
447 hb_apply_context_t (unsigned int table_index_,
448 hb_font_t *font_,
449 hb_buffer_t *buffer_) :
450 iter_input (), iter_context (),
451 font (font_), face (font->face), buffer (buffer_),
452 recurse_func (nullptr),
453 gdef (*hb_ot_layout_from_face (face)->gdef),
454 var_store (gdef.get_var_store ()),
455 direction (buffer_->props.direction),
456 lookup_mask (1),
457 table_index (table_index_),
458 lookup_index ((unsigned int) -1),
459 lookup_props (0),
460 nesting_level_left (HB_MAX_NESTING_LEVEL),
461 debug_depth (0),
462 auto_zwnj (true),
463 auto_zwj (true),
464 has_glyph_classes (gdef.has_glyph_classes ()) {}
465
466 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; }
467 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; }
468 inline void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; }
469 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; }
470 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
471 inline void set_lookup_props (unsigned int lookup_props_)
472 {
473 lookup_props = lookup_props_;
474 iter_input.init (this, false);
475 iter_context.init (this, true);
476 }
477
478 inline bool
479 match_properties_mark (hb_codepoint_t glyph,
480 unsigned int glyph_props,
481 unsigned int match_props) const
482 {
483 /* If using mark filtering sets, the high short of
484 * match_props has the set index.
485 */
486 if (match_props & LookupFlag::UseMarkFilteringSet)
487 return gdef.mark_set_covers (match_props >> 16, glyph);
488
660 const USHORT input[], /* Array of input values--start with second glyph */
661 match_func_t match_func,
662 const void *match_data)
663 {
664 if (count != c->len)
665 return false;
666
667 for (unsigned int i = 1; i < count; i++)
668 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
669 return false;
670
671 return true;
672 }
673 static inline bool match_input (hb_apply_context_t *c,
674 unsigned int count, /* Including the first glyph (not matched) */
675 const USHORT input[], /* Array of input values--start with second glyph */
676 match_func_t match_func,
677 const void *match_data,
678 unsigned int *end_offset,
679 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
680 bool *p_is_mark_ligature = nullptr,
681 unsigned int *p_total_component_count = nullptr)
682 {
683 TRACE_APPLY (nullptr);
684
685 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
686
687 hb_buffer_t *buffer = c->buffer;
688
689 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
690 skippy_iter.reset (buffer->idx, count - 1);
691 skippy_iter.set_match_func (match_func, match_data, input);
692
693 /*
694 * This is perhaps the trickiest part of OpenType... Remarks:
695 *
696 * - If all components of the ligature were marks, we call this a mark ligature.
697 *
698 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
699 * it as a ligature glyph.
700 *
701 * - Ligatures cannot be formed across glyphs attached to different components
702 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
703 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
704 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
705 * There are a couple of exceptions to this:
706 *
707 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
708 * assuming that the font designer knows what they are doing (otherwise it can
709 * break Indic stuff when a matra wants to ligate with a conjunct,
710 *
711 * o If two marks want to ligate and they belong to different components of the
712 * same ligature glyph, and said ligature glyph is to be ignored according to
713 * mark-filtering rules, then allow.
714 * https://github.com/behdad/harfbuzz/issues/545
715 */
716
717 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur());
718
719 unsigned int total_component_count = 0;
720 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
721
722 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
723 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
724
725 enum {
726 LIGBASE_NOT_CHECKED,
727 LIGBASE_MAY_NOT_SKIP,
728 LIGBASE_MAY_SKIP
729 } ligbase = LIGBASE_NOT_CHECKED;
730
731 match_positions[0] = buffer->idx;
732 for (unsigned int i = 1; i < count; i++)
733 {
734 if (!skippy_iter.next ()) return_trace (false);
735
736 match_positions[i] = skippy_iter.idx;
737
738 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
739 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
740
741 if (first_lig_id && first_lig_comp)
742 {
743 /* If first component was attached to a previous ligature component,
744 * all subsequent components should be attached to the same ligature
745 * component, otherwise we shouldn't ligate them... */
746 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
747 {
748 /* ...unless, we are attached to a base ligature and that base
749 * ligature is ignorable. */
750 if (ligbase == LIGBASE_NOT_CHECKED)
751 {
752 bool found = false;
753 const hb_glyph_info_t *out = buffer->out_info;
754 unsigned int j = buffer->out_len;
755 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
756 {
757 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
758 {
759 j--;
760 found = true;
761 break;
762 }
763 j--;
764 }
765
766 if (found && skippy_iter.may_skip (c, out[j]) == hb_apply_context_t::matcher_t::SKIP_YES)
767 ligbase = LIGBASE_MAY_SKIP;
768 else
769 ligbase = LIGBASE_MAY_NOT_SKIP;
770 }
771
772 if (ligbase == LIGBASE_MAY_NOT_SKIP)
773 return_trace (false);
774 }
775 }
776 else
777 {
778 /* If first component was NOT attached to a previous ligature component,
779 * all subsequent components should also NOT be attached to any ligature
780 * component, unless they are attached to the first component itself! */
781 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
782 return_trace (false);
783 }
784
785 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]);
786 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
787 }
788
789 *end_offset = skippy_iter.idx - buffer->idx + 1;
790
791 if (p_is_mark_ligature)
792 *p_is_mark_ligature = is_mark_ligature;
793
794 if (p_total_component_count)
795 *p_total_component_count = total_component_count;
796
797 return_trace (true);
798 }
799 static inline bool ligate_input (hb_apply_context_t *c,
800 unsigned int count, /* Including the first glyph */
801 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
802 unsigned int match_length,
803 hb_codepoint_t lig_glyph,
804 bool is_mark_ligature,
805 unsigned int total_component_count)
806 {
807 TRACE_APPLY (nullptr);
808
809 hb_buffer_t *buffer = c->buffer;
810
811 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
812
813 /*
814 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
815 * the ligature to keep its old ligature id. This will allow it to attach to
816 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
817 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
818 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
819 * later, we don't want them to lose their ligature id/component, otherwise
820 * GPOS will fail to correctly position the mark ligature on top of the
821 * LAM,LAM,HEH ligature. See:
822 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
823 *
824 * - If a ligature is formed of components that some of which are also ligatures
825 * themselves, and those ligature components had marks attached to *their*
826 * components, we have to attach the marks to the new ligature component
827 * positions! Now *that*'s tricky! And these marks may be following the
881 /* Re-adjust components for any marks following. */
882 for (unsigned int i = buffer->idx; i < buffer->len; i++) {
883 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) {
884 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
885 if (!this_comp)
886 break;
887 unsigned int new_lig_comp = components_so_far - last_num_components +
888 MIN (this_comp, last_num_components);
889 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
890 } else
891 break;
892 }
893 }
894 return_trace (true);
895 }
896
897 static inline bool match_backtrack (hb_apply_context_t *c,
898 unsigned int count,
899 const USHORT backtrack[],
900 match_func_t match_func,
901 const void *match_data,
902 unsigned int *match_start)
903 {
904 TRACE_APPLY (nullptr);
905
906 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
907 skippy_iter.reset (c->buffer->backtrack_len (), count);
908 skippy_iter.set_match_func (match_func, match_data, backtrack);
909
910 for (unsigned int i = 0; i < count; i++)
911 if (!skippy_iter.prev ())
912 return_trace (false);
913
914 *match_start = skippy_iter.idx;
915
916 return_trace (true);
917 }
918
919 static inline bool match_lookahead (hb_apply_context_t *c,
920 unsigned int count,
921 const USHORT lookahead[],
922 match_func_t match_func,
923 const void *match_data,
924 unsigned int offset,
925 unsigned int *end_index)
926 {
927 TRACE_APPLY (nullptr);
928
929 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
930 skippy_iter.reset (c->buffer->idx + offset - 1, count);
931 skippy_iter.set_match_func (match_func, match_data, lookahead);
932
933 for (unsigned int i = 0; i < count; i++)
934 if (!skippy_iter.next ())
935 return_trace (false);
936
937 *end_index = skippy_iter.idx + 1;
938
939 return_trace (true);
940 }
941
942
943
944 struct LookupRecord
945 {
946 inline bool sanitize (hb_sanitize_context_t *c) const
947 {
948 TRACE_SANITIZE (this);
949 return_trace (c->check_struct (this));
950 }
951
952 USHORT sequenceIndex; /* Index into current glyph
953 * sequence--first glyph = 0 */
954 USHORT lookupListIndex; /* Lookup to apply to that
955 * position--zero--based */
956 public:
957 DEFINE_SIZE_STATIC (4);
958 };
959
960
961 template <typename context_t>
962 static inline void recurse_lookups (context_t *c,
963 unsigned int lookupCount,
964 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
965 {
966 for (unsigned int i = 0; i < lookupCount; i++)
967 c->recurse (lookupRecord[i].lookupListIndex);
968 }
969
970 static inline bool apply_lookup (hb_apply_context_t *c,
971 unsigned int count, /* Including the first glyph */
972 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
973 unsigned int lookupCount,
974 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
975 unsigned int match_length)
976 {
977 TRACE_APPLY (nullptr);
978
979 hb_buffer_t *buffer = c->buffer;
980 int end;
981
982 /* All positions are distance from beginning of *output* buffer.
983 * Adjust. */
984 {
985 unsigned int bl = buffer->backtrack_len ();
986 end = bl + match_length;
987
988 int delta = bl - buffer->idx;
989 /* Convert positions to new indexing. */
990 for (unsigned int j = 0; j < count; j++)
991 match_positions[j] += delta;
992 }
993
994 for (unsigned int i = 0; i < lookupCount && !buffer->in_error; i++)
995 {
996 unsigned int idx = lookupRecord[i].sequenceIndex;
997 if (idx >= count)
998 continue;
999
1000 /* Don't recurse to ourself at same position.
1001 * Note that this test is too naive, it doesn't catch longer loops. */
1002 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index)
1003 continue;
1004
1005 buffer->move_to (match_positions[idx]);
1006
1007 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1008 if (!c->recurse (lookupRecord[i].lookupListIndex))
1009 continue;
1010
1011 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1012 int delta = new_len - orig_len;
1013
1014 if (!delta)
1015 continue;
1016
1017 /* Recursed lookup changed buffer len. Adjust.
1018 *
1019 * TODO:
1020 *
1021 * Right now, if buffer length increased by n, we assume n new glyphs
1022 * were added right after the current position, and if buffer length
1023 * was decreased by n, we assume n match positions after the current
1024 * one where removed. The former (buffer length increased) case is
1025 * fine, but the decrease case can be improved in at least two ways,
1026 * both of which are significant:
1027 *
1028 * - If recursed-to lookup is MultipleSubst and buffer length
1029 * decreased, then it's current match position that was deleted,
1030 * NOT the one after it.
1031 *
1032 * - If buffer length was decreased by n, it does not necessarily
1033 * mean that n match positions where removed, as there might
1034 * have been marks and default-ignorables in the sequence. We
1035 * should instead drop match positions between current-position
1036 * and current-position + n instead.
1037 *
1038 * It should be possible to construct tests for both of these cases.
1039 */
1040
1041 end += delta;
1042 if (end <= int (match_positions[idx]))
1043 {
1044 /* End might end up being smaller than match_positions[idx] if the recursed
1045 * lookup ended up removing many items, more than we have had matched.
1046 * Just never rewind end back and get out of here.
1047 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1048 end = match_positions[idx];
1049 /* There can't be any further changes. */
1050 break;
1051 }
1052
1053 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1054
1055 if (delta > 0)
1056 {
1057 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1058 break;
1059 }
1060 else
1061 {
1062 /* NOTE: delta is negative. */
1141 const LookupRecord lookupRecord[] HB_UNUSED,
1142 ContextApplyLookupContext &lookup_context)
1143 {
1144 return would_match_input (c,
1145 inputCount, input,
1146 lookup_context.funcs.match, lookup_context.match_data);
1147 }
1148 static inline bool context_apply_lookup (hb_apply_context_t *c,
1149 unsigned int inputCount, /* Including the first glyph (not matched) */
1150 const USHORT input[], /* Array of input values--start with second glyph */
1151 unsigned int lookupCount,
1152 const LookupRecord lookupRecord[],
1153 ContextApplyLookupContext &lookup_context)
1154 {
1155 unsigned int match_length = 0;
1156 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1157 return match_input (c,
1158 inputCount, input,
1159 lookup_context.funcs.match, lookup_context.match_data,
1160 &match_length, match_positions)
1161 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length),
1162 apply_lookup (c,
1163 inputCount, match_positions,
1164 lookupCount, lookupRecord,
1165 match_length));
1166 }
1167
1168 struct Rule
1169 {
1170 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1171 {
1172 TRACE_CLOSURE (this);
1173 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1174 context_closure_lookup (c,
1175 inputCount, inputZ,
1176 lookupCount, lookupRecord,
1177 lookup_context);
1178 }
1179
1180 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
1181 {
1182 TRACE_COLLECT_GLYPHS (this);
1183 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1184 context_collect_glyphs_lookup (c,
1185 inputCount, inputZ,
1188 }
1189
1190 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1191 {
1192 TRACE_WOULD_APPLY (this);
1193 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1194 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1195 }
1196
1197 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
1198 {
1199 TRACE_APPLY (this);
1200 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
1201 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
1202 }
1203
1204 public:
1205 inline bool sanitize (hb_sanitize_context_t *c) const
1206 {
1207 TRACE_SANITIZE (this);
1208 return_trace (inputCount.sanitize (c) &&
1209 lookupCount.sanitize (c) &&
1210 c->check_range (inputZ,
1211 inputZ[0].static_size * inputCount +
1212 lookupRecordX[0].static_size * lookupCount));
1213 }
1214
1215 protected:
1216 USHORT inputCount; /* Total number of glyphs in input
1217 * glyph sequence--includes the first
1218 * glyph */
1219 USHORT lookupCount; /* Number of LookupRecords */
1220 USHORT inputZ[VAR]; /* Array of match inputs--start with
1221 * second glyph */
1222 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
1223 * design order */
1224 public:
1225 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX);
1226 };
1227
1228 struct RuleSet
1229 {
1230 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
1231 {
1232 TRACE_CLOSURE (this);
1275
1276 protected:
1277 OffsetArrayOf<Rule>
1278 rule; /* Array of Rule tables
1279 * ordered by preference */
1280 public:
1281 DEFINE_SIZE_ARRAY (2, rule);
1282 };
1283
1284
1285 struct ContextFormat1
1286 {
1287 inline void closure (hb_closure_context_t *c) const
1288 {
1289 TRACE_CLOSURE (this);
1290
1291 const Coverage &cov = (this+coverage);
1292
1293 struct ContextClosureLookupContext lookup_context = {
1294 {intersects_glyph},
1295 nullptr
1296 };
1297
1298 unsigned int count = ruleSet.len;
1299 for (unsigned int i = 0; i < count; i++)
1300 if (cov.intersects_coverage (c->glyphs, i)) {
1301 const RuleSet &rule_set = this+ruleSet[i];
1302 rule_set.closure (c, lookup_context);
1303 }
1304 }
1305
1306 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1307 {
1308 TRACE_COLLECT_GLYPHS (this);
1309 (this+coverage).add_coverage (c->input);
1310
1311 struct ContextCollectGlyphsLookupContext lookup_context = {
1312 {collect_glyph},
1313 nullptr
1314 };
1315
1316 unsigned int count = ruleSet.len;
1317 for (unsigned int i = 0; i < count; i++)
1318 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1319 }
1320
1321 inline bool would_apply (hb_would_apply_context_t *c) const
1322 {
1323 TRACE_WOULD_APPLY (this);
1324
1325 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1326 struct ContextApplyLookupContext lookup_context = {
1327 {match_glyph},
1328 nullptr
1329 };
1330 return_trace (rule_set.would_apply (c, lookup_context));
1331 }
1332
1333 inline const Coverage &get_coverage (void) const
1334 {
1335 return this+coverage;
1336 }
1337
1338 inline bool apply (hb_apply_context_t *c) const
1339 {
1340 TRACE_APPLY (this);
1341 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1342 if (likely (index == NOT_COVERED))
1343 return_trace (false);
1344
1345 const RuleSet &rule_set = this+ruleSet[index];
1346 struct ContextApplyLookupContext lookup_context = {
1347 {match_glyph},
1348 nullptr
1349 };
1350 return_trace (rule_set.apply (c, lookup_context));
1351 }
1352
1353 inline bool sanitize (hb_sanitize_context_t *c) const
1354 {
1355 TRACE_SANITIZE (this);
1356 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1357 }
1358
1359 protected:
1360 USHORT format; /* Format identifier--format = 1 */
1361 OffsetTo<Coverage>
1362 coverage; /* Offset to Coverage table--from
1363 * beginning of table */
1364 OffsetArrayOf<RuleSet>
1365 ruleSet; /* Array of RuleSet tables
1366 * ordered by Coverage Index */
1367 public:
1368 DEFINE_SIZE_ARRAY (6, ruleSet);
1663 const LookupRecord lookupRecord[] HB_UNUSED,
1664 ChainContextApplyLookupContext &lookup_context)
1665 {
1666 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
1667 && would_match_input (c,
1668 inputCount, input,
1669 lookup_context.funcs.match, lookup_context.match_data[1]);
1670 }
1671
1672 static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
1673 unsigned int backtrackCount,
1674 const USHORT backtrack[],
1675 unsigned int inputCount, /* Including the first glyph (not matched) */
1676 const USHORT input[], /* Array of input values--start with second glyph */
1677 unsigned int lookaheadCount,
1678 const USHORT lookahead[],
1679 unsigned int lookupCount,
1680 const LookupRecord lookupRecord[],
1681 ChainContextApplyLookupContext &lookup_context)
1682 {
1683 unsigned int start_index = 0, match_length = 0, end_index = 0;
1684 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1685 return match_input (c,
1686 inputCount, input,
1687 lookup_context.funcs.match, lookup_context.match_data[1],
1688 &match_length, match_positions)
1689 && match_backtrack (c,
1690 backtrackCount, backtrack,
1691 lookup_context.funcs.match, lookup_context.match_data[0],
1692 &start_index)
1693 && match_lookahead (c,
1694 lookaheadCount, lookahead,
1695 lookup_context.funcs.match, lookup_context.match_data[2],
1696 match_length, &end_index)
1697 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index),
1698 apply_lookup (c,
1699 inputCount, match_positions,
1700 lookupCount, lookupRecord,
1701 match_length));
1702 }
1703
1704 struct ChainRule
1705 {
1706 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
1707 {
1708 TRACE_CLOSURE (this);
1709 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
1710 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
1711 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
1712 chain_context_closure_lookup (c,
1713 backtrack.len, backtrack.array,
1714 input.len, input.array,
1715 lookahead.len, lookahead.array,
1716 lookup.len, lookup.array,
1717 lookup_context);
1718 }
1719
1720 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
1721 {
1833 return_trace (rule.sanitize (c, this));
1834 }
1835
1836 protected:
1837 OffsetArrayOf<ChainRule>
1838 rule; /* Array of ChainRule tables
1839 * ordered by preference */
1840 public:
1841 DEFINE_SIZE_ARRAY (2, rule);
1842 };
1843
1844 struct ChainContextFormat1
1845 {
1846 inline void closure (hb_closure_context_t *c) const
1847 {
1848 TRACE_CLOSURE (this);
1849 const Coverage &cov = (this+coverage);
1850
1851 struct ChainContextClosureLookupContext lookup_context = {
1852 {intersects_glyph},
1853 {nullptr, nullptr, nullptr}
1854 };
1855
1856 unsigned int count = ruleSet.len;
1857 for (unsigned int i = 0; i < count; i++)
1858 if (cov.intersects_coverage (c->glyphs, i)) {
1859 const ChainRuleSet &rule_set = this+ruleSet[i];
1860 rule_set.closure (c, lookup_context);
1861 }
1862 }
1863
1864 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
1865 {
1866 TRACE_COLLECT_GLYPHS (this);
1867 (this+coverage).add_coverage (c->input);
1868
1869 struct ChainContextCollectGlyphsLookupContext lookup_context = {
1870 {collect_glyph},
1871 {nullptr, nullptr, nullptr}
1872 };
1873
1874 unsigned int count = ruleSet.len;
1875 for (unsigned int i = 0; i < count; i++)
1876 (this+ruleSet[i]).collect_glyphs (c, lookup_context);
1877 }
1878
1879 inline bool would_apply (hb_would_apply_context_t *c) const
1880 {
1881 TRACE_WOULD_APPLY (this);
1882
1883 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1884 struct ChainContextApplyLookupContext lookup_context = {
1885 {match_glyph},
1886 {nullptr, nullptr, nullptr}
1887 };
1888 return_trace (rule_set.would_apply (c, lookup_context));
1889 }
1890
1891 inline const Coverage &get_coverage (void) const
1892 {
1893 return this+coverage;
1894 }
1895
1896 inline bool apply (hb_apply_context_t *c) const
1897 {
1898 TRACE_APPLY (this);
1899 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1900 if (likely (index == NOT_COVERED)) return_trace (false);
1901
1902 const ChainRuleSet &rule_set = this+ruleSet[index];
1903 struct ChainContextApplyLookupContext lookup_context = {
1904 {match_glyph},
1905 {nullptr, nullptr, nullptr}
1906 };
1907 return_trace (rule_set.apply (c, lookup_context));
1908 }
1909
1910 inline bool sanitize (hb_sanitize_context_t *c) const
1911 {
1912 TRACE_SANITIZE (this);
1913 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1914 }
1915
1916 protected:
1917 USHORT format; /* Format identifier--format = 1 */
1918 OffsetTo<Coverage>
1919 coverage; /* Offset to Coverage table--from
1920 * beginning of table */
1921 OffsetArrayOf<ChainRuleSet>
1922 ruleSet; /* Array of ChainRuleSet tables
1923 * ordered by Coverage Index */
1924 public:
1925 DEFINE_SIZE_ARRAY (6, ruleSet);
2339 inline bool sanitize (hb_sanitize_context_t *c) const
2340 {
2341 TRACE_SANITIZE (this);
2342 return_trace (version.sanitize (c) &&
2343 likely (version.major == 1) &&
2344 scriptList.sanitize (c, this) &&
2345 featureList.sanitize (c, this) &&
2346 lookupList.sanitize (c, this) &&
2347 (version.to_int () < 0x00010001u || featureVars.sanitize (c, this)));
2348 }
2349
2350 protected:
2351 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
2352 * to 0x00010000u */
2353 OffsetTo<ScriptList>
2354 scriptList; /* ScriptList table */
2355 OffsetTo<FeatureList>
2356 featureList; /* FeatureList table */
2357 OffsetTo<LookupList>
2358 lookupList; /* LookupList table */
2359 LOffsetTo<FeatureVariations>
2360 featureVars; /* Offset to Feature Variations
2361 table--from beginning of table
2362 * (may be NULL). Introduced
2363 * in version 0x00010001. */
2364 public:
2365 DEFINE_SIZE_MIN (10);
2366 };
2367
2368
2369 } /* namespace OT */
2370
2371
2372 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */
|