1 /* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 31 32 #include "hb-buffer-private.hh" 33 #include "hb-ot-layout-gdef-table.hh" 34 #include "hb-set-private.hh" 35 36 37 namespace OT { 38 39 40 #ifndef HB_DEBUG_CLOSURE 41 #define HB_DEBUG_CLOSURE (HB_DEBUG+0) 42 #endif 43 44 #define TRACE_CLOSURE(this) \ 45 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \ 46 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 47 ""); 48 49 struct hb_closure_context_t : 50 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE> 51 { 52 inline const char *get_name (void) { return "CLOSURE"; } 53 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 54 template <typename T> 55 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; } 56 static return_t default_return_value (void) { return HB_VOID; } 57 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 58 return_t recurse (unsigned int lookup_index) 59 { 60 if (unlikely (nesting_level_left == 0 || !recurse_func)) 61 return default_return_value (); 62 63 nesting_level_left--; 64 recurse_func (this, lookup_index); 65 nesting_level_left++; 66 return HB_VOID; 67 } 68 69 hb_face_t *face; 70 hb_set_t *glyphs; 71 recurse_func_t recurse_func; 72 unsigned int nesting_level_left; 73 unsigned int debug_depth; 74 75 hb_closure_context_t (hb_face_t *face_, 76 hb_set_t *glyphs_, 77 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 78 face (face_), 79 glyphs (glyphs_), 80 recurse_func (NULL), 81 nesting_level_left (nesting_level_left_), 82 debug_depth (0) {} 83 84 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 85 }; 86 87 88 89 #ifndef HB_DEBUG_WOULD_APPLY 90 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0) 91 #endif 92 93 #define TRACE_WOULD_APPLY(this) \ 94 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \ 95 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 96 "%d glyphs", c->len); 97 98 struct hb_would_apply_context_t : 99 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY> 100 { 101 inline const char *get_name (void) { return "WOULD_APPLY"; } 102 template <typename T> 103 inline return_t dispatch (const T &obj) { return obj.would_apply (this); } 104 static return_t default_return_value (void) { return false; } 105 bool stop_sublookup_iteration (return_t r) const { return r; } 106 107 hb_face_t *face; 108 const hb_codepoint_t *glyphs; 109 unsigned int len; 110 bool zero_context; 111 unsigned int debug_depth; 112 113 hb_would_apply_context_t (hb_face_t *face_, 114 const hb_codepoint_t *glyphs_, 115 unsigned int len_, 116 bool zero_context_) : 117 face (face_), 118 glyphs (glyphs_), 119 len (len_), 120 zero_context (zero_context_), 121 debug_depth (0) {} 122 }; 123 124 125 126 #ifndef HB_DEBUG_COLLECT_GLYPHS 127 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0) 128 #endif 129 130 #define TRACE_COLLECT_GLYPHS(this) \ 131 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \ 132 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 133 ""); 134 135 struct hb_collect_glyphs_context_t : 136 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS> 137 { 138 inline const char *get_name (void) { return "COLLECT_GLYPHS"; } 139 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); 140 template <typename T> 141 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; } 142 static return_t default_return_value (void) { return HB_VOID; } 143 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 144 return_t recurse (unsigned int lookup_index) 145 { 146 if (unlikely (nesting_level_left == 0 || !recurse_func)) 147 return default_return_value (); 148 149 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get 150 * past the previous check. For GSUB, we only want to collect the output 151 * glyphs in the recursion. If output is not requested, we can go home now. 152 * 153 * Note further, that the above is not exactly correct. A recursed lookup 154 * is allowed to match input that is not matched in the context, but that's 155 * not how most fonts are built. It's possible to relax that and recurse 156 * with all sets here if it proves to be an issue. 157 */ 158 159 if (output == hb_set_get_empty ()) 160 return HB_VOID; 161 162 /* Return if new lookup was recursed to before. */ 163 if (recursed_lookups.has (lookup_index)) 164 return HB_VOID; 165 166 hb_set_t *old_before = before; 167 hb_set_t *old_input = input; 168 hb_set_t *old_after = after; 169 before = input = after = hb_set_get_empty (); 170 171 nesting_level_left--; 172 recurse_func (this, lookup_index); 173 nesting_level_left++; 174 175 before = old_before; 176 input = old_input; 177 after = old_after; 178 179 recursed_lookups.add (lookup_index); 180 181 return HB_VOID; 182 } 183 184 hb_face_t *face; 185 hb_set_t *before; 186 hb_set_t *input; 187 hb_set_t *after; 188 hb_set_t *output; 189 recurse_func_t recurse_func; 190 hb_set_t recursed_lookups; 191 unsigned int nesting_level_left; 192 unsigned int debug_depth; 193 194 hb_collect_glyphs_context_t (hb_face_t *face_, 195 hb_set_t *glyphs_before, /* OUT. May be NULL */ 196 hb_set_t *glyphs_input, /* OUT. May be NULL */ 197 hb_set_t *glyphs_after, /* OUT. May be NULL */ 198 hb_set_t *glyphs_output, /* OUT. May be NULL */ 199 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 200 face (face_), 201 before (glyphs_before ? glyphs_before : hb_set_get_empty ()), 202 input (glyphs_input ? glyphs_input : hb_set_get_empty ()), 203 after (glyphs_after ? glyphs_after : hb_set_get_empty ()), 204 output (glyphs_output ? glyphs_output : hb_set_get_empty ()), 205 recurse_func (NULL), 206 recursed_lookups (), 207 nesting_level_left (nesting_level_left_), 208 debug_depth (0) 209 { 210 recursed_lookups.init (); 211 } 212 ~hb_collect_glyphs_context_t (void) 213 { 214 recursed_lookups.fini (); 215 } 216 217 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 218 }; 219 220 221 222 #ifndef HB_DEBUG_GET_COVERAGE 223 #define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0) 224 #endif 225 226 /* XXX Can we remove this? */ 227 228 template <typename set_t> 229 struct hb_add_coverage_context_t : 230 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE> 231 { 232 inline const char *get_name (void) { return "GET_COVERAGE"; } 233 typedef const Coverage &return_t; 234 template <typename T> 235 inline return_t dispatch (const T &obj) { return obj.get_coverage (); } 236 static return_t default_return_value (void) { return Null(Coverage); } 237 bool stop_sublookup_iteration (return_t r) const 238 { 239 r.add_coverage (set); 240 return false; 241 } 242 243 hb_add_coverage_context_t (set_t *set_) : 244 set (set_), 245 debug_depth (0) {} 246 247 set_t *set; 248 unsigned int debug_depth; 249 }; 250 251 252 253 #ifndef HB_DEBUG_APPLY 254 #define HB_DEBUG_APPLY (HB_DEBUG+0) 255 #endif 256 257 #define TRACE_APPLY(this) \ 258 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \ 259 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 260 "idx %d gid %u lookup %d", \ 261 c->buffer->idx, c->buffer->cur().codepoint, (int) c->lookup_index); 262 263 struct hb_apply_context_t : 264 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY> 265 { 266 struct matcher_t 267 { 268 inline matcher_t (void) : 269 lookup_props (0), 270 ignore_zwnj (false), 271 ignore_zwj (false), 272 mask (-1), 273 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */ 274 syllable arg1(0), 275 #undef arg1 276 match_func (NULL), 277 match_data (NULL) {}; 278 279 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 280 281 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } 282 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } 283 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } 284 inline void set_mask (hb_mask_t mask_) { mask = mask_; } 285 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; } 286 inline void set_match_func (match_func_t match_func_, 287 const void *match_data_) 288 { match_func = match_func_; match_data = match_data_; } 289 290 enum may_match_t { 291 MATCH_NO, 292 MATCH_YES, 293 MATCH_MAYBE 294 }; 295 296 inline may_match_t may_match (const hb_glyph_info_t &info, 297 const USHORT *glyph_data) const 298 { 299 if (!(info.mask & mask) || 300 (syllable && syllable != info.syllable ())) 301 return MATCH_NO; 302 303 if (match_func) 304 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO; 305 306 return MATCH_MAYBE; 307 } 308 309 enum may_skip_t { 310 SKIP_NO, 311 SKIP_YES, 312 SKIP_MAYBE 313 }; 314 315 inline may_skip_t 316 may_skip (const hb_apply_context_t *c, 317 const hb_glyph_info_t &info) const 318 { 319 if (!c->check_glyph_property (&info, lookup_props)) 320 return SKIP_YES; 321 322 if (unlikely (_hb_glyph_info_is_default_ignorable (&info) && 323 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && 324 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)))) 325 return SKIP_MAYBE; 326 327 return SKIP_NO; 328 } 329 330 protected: 331 unsigned int lookup_props; 332 bool ignore_zwnj; 333 bool ignore_zwj; 334 hb_mask_t mask; 335 uint8_t syllable; 336 match_func_t match_func; 337 const void *match_data; 338 }; 339 340 struct skipping_iterator_t 341 { 342 inline void init (hb_apply_context_t *c_, bool context_match = false) 343 { 344 c = c_; 345 match_glyph_data = NULL, 346 matcher.set_match_func (NULL, NULL); 347 matcher.set_lookup_props (c->lookup_props); 348 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ 349 matcher.set_ignore_zwnj (context_match || c->table_index == 1); 350 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ 351 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj); 352 matcher.set_mask (context_match ? -1 : c->lookup_mask); 353 } 354 inline void set_lookup_props (unsigned int lookup_props) 355 { 356 matcher.set_lookup_props (lookup_props); 357 } 358 inline void set_match_func (matcher_t::match_func_t match_func, 359 const void *match_data, 360 const USHORT glyph_data[]) 361 { 362 matcher.set_match_func (match_func, match_data); 363 match_glyph_data = glyph_data; 364 } 365 366 inline void reset (unsigned int start_index_, 367 unsigned int num_items_) 368 { 369 idx = start_index_; 370 num_items = num_items_; 371 end = c->buffer->len; 372 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); 373 } 374 375 inline void reject (void) { num_items++; match_glyph_data--; } 376 377 inline bool next (void) 378 { 379 assert (num_items > 0); 380 while (idx + num_items < end) 381 { 382 idx++; 383 const hb_glyph_info_t &info = c->buffer->info[idx]; 384 385 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 386 if (unlikely (skip == matcher_t::SKIP_YES)) 387 continue; 388 389 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 390 if (match == matcher_t::MATCH_YES || 391 (match == matcher_t::MATCH_MAYBE && 392 skip == matcher_t::SKIP_NO)) 393 { 394 num_items--; 395 match_glyph_data++; 396 return true; 397 } 398 399 if (skip == matcher_t::SKIP_NO) 400 return false; 401 } 402 return false; 403 } 404 inline bool prev (void) 405 { 406 assert (num_items > 0); 407 while (idx >= num_items) 408 { 409 idx--; 410 const hb_glyph_info_t &info = c->buffer->out_info[idx]; 411 412 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 413 if (unlikely (skip == matcher_t::SKIP_YES)) 414 continue; 415 416 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 417 if (match == matcher_t::MATCH_YES || 418 (match == matcher_t::MATCH_MAYBE && 419 skip == matcher_t::SKIP_NO)) 420 { 421 num_items--; 422 match_glyph_data++; 423 return true; 424 } 425 426 if (skip == matcher_t::SKIP_NO) 427 return false; 428 } 429 return false; 430 } 431 432 unsigned int idx; 433 protected: 434 hb_apply_context_t *c; 435 matcher_t matcher; 436 const USHORT *match_glyph_data; 437 438 unsigned int num_items; 439 unsigned int end; 440 }; 441 442 443 inline const char *get_name (void) { return "APPLY"; } 444 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 445 template <typename T> 446 inline return_t dispatch (const T &obj) { return obj.apply (this); } 447 static return_t default_return_value (void) { return false; } 448 bool stop_sublookup_iteration (return_t r) const { return r; } 449 return_t recurse (unsigned int lookup_index) 450 { 451 if (unlikely (nesting_level_left == 0 || !recurse_func)) 452 return default_return_value (); 453 454 nesting_level_left--; 455 bool ret = recurse_func (this, lookup_index); 456 nesting_level_left++; 457 return ret; 458 } 459 460 unsigned int table_index; /* GSUB/GPOS */ 461 hb_font_t *font; 462 hb_face_t *face; 463 hb_buffer_t *buffer; 464 hb_direction_t direction; 465 hb_mask_t lookup_mask; 466 bool auto_zwj; 467 recurse_func_t recurse_func; 468 unsigned int nesting_level_left; 469 unsigned int lookup_props; 470 const GDEF &gdef; 471 bool has_glyph_classes; 472 skipping_iterator_t iter_input, iter_context; 473 unsigned int lookup_index; 474 unsigned int debug_depth; 475 476 477 hb_apply_context_t (unsigned int table_index_, 478 hb_font_t *font_, 479 hb_buffer_t *buffer_) : 480 table_index (table_index_), 481 font (font_), face (font->face), buffer (buffer_), 482 direction (buffer_->props.direction), 483 lookup_mask (1), 484 auto_zwj (true), 485 recurse_func (NULL), 486 nesting_level_left (MAX_NESTING_LEVEL), 487 lookup_props (0), 488 gdef (*hb_ot_layout_from_face (face)->gdef), 489 has_glyph_classes (gdef.has_glyph_classes ()), 490 iter_input (), 491 iter_context (), 492 lookup_index ((unsigned int) -1), 493 debug_depth (0) {} 494 495 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; } 496 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; } 497 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; } 498 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; } 499 inline void set_lookup_props (unsigned int lookup_props_) 500 { 501 lookup_props = lookup_props_; 502 iter_input.init (this, false); 503 iter_context.init (this, true); 504 } 505 506 inline bool 507 match_properties_mark (hb_codepoint_t glyph, 508 unsigned int glyph_props, 509 unsigned int match_props) const 510 { 511 /* If using mark filtering sets, the high short of 512 * match_props has the set index. 513 */ 514 if (match_props & LookupFlag::UseMarkFilteringSet) 515 return gdef.mark_set_covers (match_props >> 16, glyph); 516 517 /* The second byte of match_props has the meaning 518 * "ignore marks of attachment type different than 519 * the attachment type specified." 520 */ 521 if (match_props & LookupFlag::MarkAttachmentType) 522 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); 523 524 return true; 525 } 526 527 inline bool 528 check_glyph_property (const hb_glyph_info_t *info, 529 unsigned int match_props) const 530 { 531 hb_codepoint_t glyph = info->codepoint; 532 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info); 533 534 /* Not covered, if, for example, glyph class is ligature and 535 * match_props includes LookupFlags::IgnoreLigatures 536 */ 537 if (glyph_props & match_props & LookupFlag::IgnoreFlags) 538 return false; 539 540 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) 541 return match_properties_mark (glyph, glyph_props, match_props); 542 543 return true; 544 } 545 546 inline void _set_glyph_props (hb_codepoint_t glyph_index, 547 unsigned int class_guess = 0, 548 bool ligature = false, 549 bool component = false) const 550 { 551 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) & 552 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; 553 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED; 554 if (ligature) 555 { 556 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED; 557 /* In the only place that the MULTIPLIED bit is used, Uniscribe 558 * seems to only care about the "last" transformation between 559 * Ligature and Multiple substitions. Ie. if you ligate, expand, 560 * and ligate again, it forgives the multiplication and acts as 561 * if only ligation happened. As such, clear MULTIPLIED bit. 562 */ 563 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 564 } 565 if (component) 566 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 567 if (likely (has_glyph_classes)) 568 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index)); 569 else if (class_guess) 570 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess); 571 } 572 573 inline void replace_glyph (hb_codepoint_t glyph_index) const 574 { 575 _set_glyph_props (glyph_index); 576 buffer->replace_glyph (glyph_index); 577 } 578 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const 579 { 580 _set_glyph_props (glyph_index); 581 buffer->cur().codepoint = glyph_index; 582 } 583 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index, 584 unsigned int class_guess) const 585 { 586 _set_glyph_props (glyph_index, class_guess, true); 587 buffer->replace_glyph (glyph_index); 588 } 589 inline void output_glyph_for_component (hb_codepoint_t glyph_index, 590 unsigned int class_guess) const 591 { 592 _set_glyph_props (glyph_index, class_guess, false, true); 593 buffer->output_glyph (glyph_index); 594 } 595 }; 596 597 598 599 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 600 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 601 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 602 603 struct ContextClosureFuncs 604 { 605 intersects_func_t intersects; 606 }; 607 struct ContextCollectGlyphsFuncs 608 { 609 collect_glyphs_func_t collect; 610 }; 611 struct ContextApplyFuncs 612 { 613 match_func_t match; 614 }; 615 616 617 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 618 { 619 return glyphs->has (value); 620 } 621 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data) 622 { 623 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 624 return class_def.intersects_class (glyphs, value); 625 } 626 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 627 { 628 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 629 return (data+coverage).intersects (glyphs); 630 } 631 632 static inline bool intersects_array (hb_closure_context_t *c, 633 unsigned int count, 634 const USHORT values[], 635 intersects_func_t intersects_func, 636 const void *intersects_data) 637 { 638 for (unsigned int i = 0; i < count; i++) 639 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 640 return false; 641 return true; 642 } 643 644 645 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 646 { 647 glyphs->add (value); 648 } 649 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data) 650 { 651 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 652 class_def.add_class (glyphs, value); 653 } 654 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 655 { 656 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 657 (data+coverage).add_coverage (glyphs); 658 } 659 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, 660 hb_set_t *glyphs, 661 unsigned int count, 662 const USHORT values[], 663 collect_glyphs_func_t collect_func, 664 const void *collect_data) 665 { 666 for (unsigned int i = 0; i < count; i++) 667 collect_func (glyphs, values[i], collect_data); 668 } 669 670 671 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 672 { 673 return glyph_id == value; 674 } 675 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 676 { 677 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 678 return class_def.get_class (glyph_id) == value; 679 } 680 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 681 { 682 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 683 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 684 } 685 686 static inline bool would_match_input (hb_would_apply_context_t *c, 687 unsigned int count, /* Including the first glyph (not matched) */ 688 const USHORT input[], /* Array of input values--start with second glyph */ 689 match_func_t match_func, 690 const void *match_data) 691 { 692 if (count != c->len) 693 return false; 694 695 for (unsigned int i = 1; i < count; i++) 696 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data))) 697 return false; 698 699 return true; 700 } 701 static inline bool match_input (hb_apply_context_t *c, 702 unsigned int count, /* Including the first glyph (not matched) */ 703 const USHORT input[], /* Array of input values--start with second glyph */ 704 match_func_t match_func, 705 const void *match_data, 706 unsigned int *end_offset, 707 unsigned int match_positions[MAX_CONTEXT_LENGTH], 708 bool *p_is_mark_ligature = NULL, 709 unsigned int *p_total_component_count = NULL) 710 { 711 TRACE_APPLY (NULL); 712 713 if (unlikely (count > MAX_CONTEXT_LENGTH)) return_trace (false); 714 715 hb_buffer_t *buffer = c->buffer; 716 717 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; 718 skippy_iter.reset (buffer->idx, count - 1); 719 skippy_iter.set_match_func (match_func, match_data, input); 720 721 /* 722 * This is perhaps the trickiest part of OpenType... Remarks: 723 * 724 * - If all components of the ligature were marks, we call this a mark ligature. 725 * 726 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize 727 * it as a ligature glyph. 728 * 729 * - Ligatures cannot be formed across glyphs attached to different components 730 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and 731 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. 732 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o 733 * There is an exception to this: If a ligature tries ligating with marks that 734 * belong to it itself, go ahead, assuming that the font designer knows what 735 * they are doing (otherwise it can break Indic stuff when a matra wants to 736 * ligate with a conjunct...) 737 */ 738 739 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur()); 740 741 unsigned int total_component_count = 0; 742 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 743 744 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 745 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 746 747 match_positions[0] = buffer->idx; 748 for (unsigned int i = 1; i < count; i++) 749 { 750 if (!skippy_iter.next ()) return_trace (false); 751 752 match_positions[i] = skippy_iter.idx; 753 754 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]); 755 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]); 756 757 if (first_lig_id && first_lig_comp) { 758 /* If first component was attached to a previous ligature component, 759 * all subsequent components should be attached to the same ligature 760 * component, otherwise we shouldn't ligate them. */ 761 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) 762 return_trace (false); 763 } else { 764 /* If first component was NOT attached to a previous ligature component, 765 * all subsequent components should also NOT be attached to any ligature 766 * component, unless they are attached to the first component itself! */ 767 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) 768 return_trace (false); 769 } 770 771 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]); 772 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]); 773 } 774 775 *end_offset = skippy_iter.idx - buffer->idx + 1; 776 777 if (p_is_mark_ligature) 778 *p_is_mark_ligature = is_mark_ligature; 779 780 if (p_total_component_count) 781 *p_total_component_count = total_component_count; 782 783 return_trace (true); 784 } 785 static inline bool ligate_input (hb_apply_context_t *c, 786 unsigned int count, /* Including the first glyph */ 787 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */ 788 unsigned int match_length, 789 hb_codepoint_t lig_glyph, 790 bool is_mark_ligature, 791 unsigned int total_component_count) 792 { 793 TRACE_APPLY (NULL); 794 795 hb_buffer_t *buffer = c->buffer; 796 797 buffer->merge_clusters (buffer->idx, buffer->idx + match_length); 798 799 /* 800 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave 801 * the ligature to keep its old ligature id. This will allow it to attach to 802 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, 803 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a 804 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature 805 * later, we don't want them to lose their ligature id/component, otherwise 806 * GPOS will fail to correctly position the mark ligature on top of the 807 * LAM,LAM,HEH ligature. See: 808 * https://bugzilla.gnome.org/show_bug.cgi?id=676343 809 * 810 * - If a ligature is formed of components that some of which are also ligatures 811 * themselves, and those ligature components had marks attached to *their* 812 * components, we have to attach the marks to the new ligature component 813 * positions! Now *that*'s tricky! And these marks may be following the 814 * last component of the whole sequence, so we should loop forward looking 815 * for them and update them. 816 * 817 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a 818 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature 819 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature 820 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to 821 * the new ligature with a component value of 2. 822 * 823 * This in fact happened to a font... See: 824 * https://bugzilla.gnome.org/show_bug.cgi?id=437633 825 */ 826 827 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE; 828 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer); 829 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 830 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 831 unsigned int components_so_far = last_num_components; 832 833 if (!is_mark_ligature) 834 { 835 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count); 836 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK) 837 { 838 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER); 839 _hb_glyph_info_set_modified_combining_class (&buffer->cur(), 0); 840 } 841 } 842 c->replace_glyph_with_ligature (lig_glyph, klass); 843 844 for (unsigned int i = 1; i < count; i++) 845 { 846 while (buffer->idx < match_positions[i]) 847 { 848 if (!is_mark_ligature) { 849 unsigned int new_lig_comp = components_so_far - last_num_components + 850 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->cur()), 1u), last_num_components); 851 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp); 852 } 853 buffer->next_glyph (); 854 } 855 856 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 857 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 858 components_so_far += last_num_components; 859 860 /* Skip the base glyph */ 861 buffer->idx++; 862 } 863 864 if (!is_mark_ligature && last_lig_id) { 865 /* Re-adjust components for any marks following. */ 866 for (unsigned int i = buffer->idx; i < buffer->len; i++) { 867 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) { 868 unsigned int new_lig_comp = components_so_far - last_num_components + 869 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->info[i]), 1u), last_num_components); 870 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp); 871 } else 872 break; 873 } 874 } 875 return_trace (true); 876 } 877 878 static inline bool match_backtrack (hb_apply_context_t *c, 879 unsigned int count, 880 const USHORT backtrack[], 881 match_func_t match_func, 882 const void *match_data) 883 { 884 TRACE_APPLY (NULL); 885 886 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 887 skippy_iter.reset (c->buffer->backtrack_len (), count); 888 skippy_iter.set_match_func (match_func, match_data, backtrack); 889 890 for (unsigned int i = 0; i < count; i++) 891 if (!skippy_iter.prev ()) 892 return_trace (false); 893 894 return_trace (true); 895 } 896 897 static inline bool match_lookahead (hb_apply_context_t *c, 898 unsigned int count, 899 const USHORT lookahead[], 900 match_func_t match_func, 901 const void *match_data, 902 unsigned int offset) 903 { 904 TRACE_APPLY (NULL); 905 906 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 907 skippy_iter.reset (c->buffer->idx + offset - 1, count); 908 skippy_iter.set_match_func (match_func, match_data, lookahead); 909 910 for (unsigned int i = 0; i < count; i++) 911 if (!skippy_iter.next ()) 912 return_trace (false); 913 914 return_trace (true); 915 } 916 917 918 919 struct LookupRecord 920 { 921 inline bool sanitize (hb_sanitize_context_t *c) const 922 { 923 TRACE_SANITIZE (this); 924 return_trace (c->check_struct (this)); 925 } 926 927 USHORT sequenceIndex; /* Index into current glyph 928 * sequence--first glyph = 0 */ 929 USHORT lookupListIndex; /* Lookup to apply to that 930 * position--zero--based */ 931 public: 932 DEFINE_SIZE_STATIC (4); 933 }; 934 935 936 template <typename context_t> 937 static inline void recurse_lookups (context_t *c, 938 unsigned int lookupCount, 939 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) 940 { 941 for (unsigned int i = 0; i < lookupCount; i++) 942 c->recurse (lookupRecord[i].lookupListIndex); 943 } 944 945 static inline bool apply_lookup (hb_apply_context_t *c, 946 unsigned int count, /* Including the first glyph */ 947 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */ 948 unsigned int lookupCount, 949 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 950 unsigned int match_length) 951 { 952 TRACE_APPLY (NULL); 953 954 hb_buffer_t *buffer = c->buffer; 955 unsigned int end; 956 957 /* All positions are distance from beginning of *output* buffer. 958 * Adjust. */ 959 { 960 unsigned int bl = buffer->backtrack_len (); 961 end = bl + match_length; 962 963 int delta = bl - buffer->idx; 964 /* Convert positions to new indexing. */ 965 for (unsigned int j = 0; j < count; j++) 966 match_positions[j] += delta; 967 } 968 969 for (unsigned int i = 0; i < lookupCount; i++) 970 { 971 unsigned int idx = lookupRecord[i].sequenceIndex; 972 if (idx >= count) 973 continue; 974 975 buffer->move_to (match_positions[idx]); 976 977 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len (); 978 if (!c->recurse (lookupRecord[i].lookupListIndex)) 979 continue; 980 981 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len (); 982 int delta = new_len - orig_len; 983 984 if (!delta) 985 continue; 986 987 /* Recursed lookup changed buffer len. Adjust. */ 988 989 /* end can't go back past the current match position. 990 * Note: this is only true because we do NOT allow MultipleSubst 991 * with zero sequence len. */ 992 end = MAX ((int) match_positions[idx] + 1, int (end) + delta); 993 994 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */ 995 996 if (delta > 0) 997 { 998 if (unlikely (delta + count > MAX_CONTEXT_LENGTH)) 999 break; 1000 } 1001 else 1002 { 1003 /* NOTE: delta is negative. */ 1004 delta = MAX (delta, (int) next - (int) count); 1005 next -= delta; 1006 } 1007 1008 /* Shift! */ 1009 memmove (match_positions + next + delta, match_positions + next, 1010 (count - next) * sizeof (match_positions[0])); 1011 next += delta; 1012 count += delta; 1013 1014 /* Fill in new entries. */ 1015 for (unsigned int j = idx + 1; j < next; j++) 1016 match_positions[j] = match_positions[j - 1] + 1; 1017 1018 /* And fixup the rest. */ 1019 for (; next < count; next++) 1020 match_positions[next] += delta; 1021 } 1022 1023 buffer->move_to (end); 1024 1025 return_trace (true); 1026 } 1027 1028 1029 1030 /* Contextual lookups */ 1031 1032 struct ContextClosureLookupContext 1033 { 1034 ContextClosureFuncs funcs; 1035 const void *intersects_data; 1036 }; 1037 1038 struct ContextCollectGlyphsLookupContext 1039 { 1040 ContextCollectGlyphsFuncs funcs; 1041 const void *collect_data; 1042 }; 1043 1044 struct ContextApplyLookupContext 1045 { 1046 ContextApplyFuncs funcs; 1047 const void *match_data; 1048 }; 1049 1050 static inline void context_closure_lookup (hb_closure_context_t *c, 1051 unsigned int inputCount, /* Including the first glyph (not matched) */ 1052 const USHORT input[], /* Array of input values--start with second glyph */ 1053 unsigned int lookupCount, 1054 const LookupRecord lookupRecord[], 1055 ContextClosureLookupContext &lookup_context) 1056 { 1057 if (intersects_array (c, 1058 inputCount ? inputCount - 1 : 0, input, 1059 lookup_context.funcs.intersects, lookup_context.intersects_data)) 1060 recurse_lookups (c, 1061 lookupCount, lookupRecord); 1062 } 1063 1064 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1065 unsigned int inputCount, /* Including the first glyph (not matched) */ 1066 const USHORT input[], /* Array of input values--start with second glyph */ 1067 unsigned int lookupCount, 1068 const LookupRecord lookupRecord[], 1069 ContextCollectGlyphsLookupContext &lookup_context) 1070 { 1071 collect_array (c, c->input, 1072 inputCount ? inputCount - 1 : 0, input, 1073 lookup_context.funcs.collect, lookup_context.collect_data); 1074 recurse_lookups (c, 1075 lookupCount, lookupRecord); 1076 } 1077 1078 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, 1079 unsigned int inputCount, /* Including the first glyph (not matched) */ 1080 const USHORT input[], /* Array of input values--start with second glyph */ 1081 unsigned int lookupCount HB_UNUSED, 1082 const LookupRecord lookupRecord[] HB_UNUSED, 1083 ContextApplyLookupContext &lookup_context) 1084 { 1085 return would_match_input (c, 1086 inputCount, input, 1087 lookup_context.funcs.match, lookup_context.match_data); 1088 } 1089 static inline bool context_apply_lookup (hb_apply_context_t *c, 1090 unsigned int inputCount, /* Including the first glyph (not matched) */ 1091 const USHORT input[], /* Array of input values--start with second glyph */ 1092 unsigned int lookupCount, 1093 const LookupRecord lookupRecord[], 1094 ContextApplyLookupContext &lookup_context) 1095 { 1096 unsigned int match_length = 0; 1097 unsigned int match_positions[MAX_CONTEXT_LENGTH]; 1098 return match_input (c, 1099 inputCount, input, 1100 lookup_context.funcs.match, lookup_context.match_data, 1101 &match_length, match_positions) 1102 && apply_lookup (c, 1103 inputCount, match_positions, 1104 lookupCount, lookupRecord, 1105 match_length); 1106 } 1107 1108 struct Rule 1109 { 1110 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1111 { 1112 TRACE_CLOSURE (this); 1113 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1114 context_closure_lookup (c, 1115 inputCount, inputZ, 1116 lookupCount, lookupRecord, 1117 lookup_context); 1118 } 1119 1120 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1121 { 1122 TRACE_COLLECT_GLYPHS (this); 1123 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1124 context_collect_glyphs_lookup (c, 1125 inputCount, inputZ, 1126 lookupCount, lookupRecord, 1127 lookup_context); 1128 } 1129 1130 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1131 { 1132 TRACE_WOULD_APPLY (this); 1133 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1134 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1135 } 1136 1137 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1138 { 1139 TRACE_APPLY (this); 1140 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1141 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1142 } 1143 1144 public: 1145 inline bool sanitize (hb_sanitize_context_t *c) const 1146 { 1147 TRACE_SANITIZE (this); 1148 return inputCount.sanitize (c) 1149 && lookupCount.sanitize (c) 1150 && c->check_range (inputZ, 1151 inputZ[0].static_size * inputCount 1152 + lookupRecordX[0].static_size * lookupCount); 1153 } 1154 1155 protected: 1156 USHORT inputCount; /* Total number of glyphs in input 1157 * glyph sequence--includes the first 1158 * glyph */ 1159 USHORT lookupCount; /* Number of LookupRecords */ 1160 USHORT inputZ[VAR]; /* Array of match inputs--start with 1161 * second glyph */ 1162 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1163 * design order */ 1164 public: 1165 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX); 1166 }; 1167 1168 struct RuleSet 1169 { 1170 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1171 { 1172 TRACE_CLOSURE (this); 1173 unsigned int num_rules = rule.len; 1174 for (unsigned int i = 0; i < num_rules; i++) 1175 (this+rule[i]).closure (c, lookup_context); 1176 } 1177 1178 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1179 { 1180 TRACE_COLLECT_GLYPHS (this); 1181 unsigned int num_rules = rule.len; 1182 for (unsigned int i = 0; i < num_rules; i++) 1183 (this+rule[i]).collect_glyphs (c, lookup_context); 1184 } 1185 1186 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1187 { 1188 TRACE_WOULD_APPLY (this); 1189 unsigned int num_rules = rule.len; 1190 for (unsigned int i = 0; i < num_rules; i++) 1191 { 1192 if ((this+rule[i]).would_apply (c, lookup_context)) 1193 return_trace (true); 1194 } 1195 return_trace (false); 1196 } 1197 1198 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1199 { 1200 TRACE_APPLY (this); 1201 unsigned int num_rules = rule.len; 1202 for (unsigned int i = 0; i < num_rules; i++) 1203 { 1204 if ((this+rule[i]).apply (c, lookup_context)) 1205 return_trace (true); 1206 } 1207 return_trace (false); 1208 } 1209 1210 inline bool sanitize (hb_sanitize_context_t *c) const 1211 { 1212 TRACE_SANITIZE (this); 1213 return_trace (rule.sanitize (c, this)); 1214 } 1215 1216 protected: 1217 OffsetArrayOf<Rule> 1218 rule; /* Array of Rule tables 1219 * ordered by preference */ 1220 public: 1221 DEFINE_SIZE_ARRAY (2, rule); 1222 }; 1223 1224 1225 struct ContextFormat1 1226 { 1227 inline void closure (hb_closure_context_t *c) const 1228 { 1229 TRACE_CLOSURE (this); 1230 1231 const Coverage &cov = (this+coverage); 1232 1233 struct ContextClosureLookupContext lookup_context = { 1234 {intersects_glyph}, 1235 NULL 1236 }; 1237 1238 unsigned int count = ruleSet.len; 1239 for (unsigned int i = 0; i < count; i++) 1240 if (cov.intersects_coverage (c->glyphs, i)) { 1241 const RuleSet &rule_set = this+ruleSet[i]; 1242 rule_set.closure (c, lookup_context); 1243 } 1244 } 1245 1246 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1247 { 1248 TRACE_COLLECT_GLYPHS (this); 1249 (this+coverage).add_coverage (c->input); 1250 1251 struct ContextCollectGlyphsLookupContext lookup_context = { 1252 {collect_glyph}, 1253 NULL 1254 }; 1255 1256 unsigned int count = ruleSet.len; 1257 for (unsigned int i = 0; i < count; i++) 1258 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1259 } 1260 1261 inline bool would_apply (hb_would_apply_context_t *c) const 1262 { 1263 TRACE_WOULD_APPLY (this); 1264 1265 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1266 struct ContextApplyLookupContext lookup_context = { 1267 {match_glyph}, 1268 NULL 1269 }; 1270 return_trace (rule_set.would_apply (c, lookup_context)); 1271 } 1272 1273 inline const Coverage &get_coverage (void) const 1274 { 1275 return this+coverage; 1276 } 1277 1278 inline bool apply (hb_apply_context_t *c) const 1279 { 1280 TRACE_APPLY (this); 1281 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1282 if (likely (index == NOT_COVERED)) 1283 return_trace (false); 1284 1285 const RuleSet &rule_set = this+ruleSet[index]; 1286 struct ContextApplyLookupContext lookup_context = { 1287 {match_glyph}, 1288 NULL 1289 }; 1290 return_trace (rule_set.apply (c, lookup_context)); 1291 } 1292 1293 inline bool sanitize (hb_sanitize_context_t *c) const 1294 { 1295 TRACE_SANITIZE (this); 1296 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1297 } 1298 1299 protected: 1300 USHORT format; /* Format identifier--format = 1 */ 1301 OffsetTo<Coverage> 1302 coverage; /* Offset to Coverage table--from 1303 * beginning of table */ 1304 OffsetArrayOf<RuleSet> 1305 ruleSet; /* Array of RuleSet tables 1306 * ordered by Coverage Index */ 1307 public: 1308 DEFINE_SIZE_ARRAY (6, ruleSet); 1309 }; 1310 1311 1312 struct ContextFormat2 1313 { 1314 inline void closure (hb_closure_context_t *c) const 1315 { 1316 TRACE_CLOSURE (this); 1317 if (!(this+coverage).intersects (c->glyphs)) 1318 return; 1319 1320 const ClassDef &class_def = this+classDef; 1321 1322 struct ContextClosureLookupContext lookup_context = { 1323 {intersects_class}, 1324 &class_def 1325 }; 1326 1327 unsigned int count = ruleSet.len; 1328 for (unsigned int i = 0; i < count; i++) 1329 if (class_def.intersects_class (c->glyphs, i)) { 1330 const RuleSet &rule_set = this+ruleSet[i]; 1331 rule_set.closure (c, lookup_context); 1332 } 1333 } 1334 1335 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1336 { 1337 TRACE_COLLECT_GLYPHS (this); 1338 (this+coverage).add_coverage (c->input); 1339 1340 const ClassDef &class_def = this+classDef; 1341 struct ContextCollectGlyphsLookupContext lookup_context = { 1342 {collect_class}, 1343 &class_def 1344 }; 1345 1346 unsigned int count = ruleSet.len; 1347 for (unsigned int i = 0; i < count; i++) 1348 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1349 } 1350 1351 inline bool would_apply (hb_would_apply_context_t *c) const 1352 { 1353 TRACE_WOULD_APPLY (this); 1354 1355 const ClassDef &class_def = this+classDef; 1356 unsigned int index = class_def.get_class (c->glyphs[0]); 1357 const RuleSet &rule_set = this+ruleSet[index]; 1358 struct ContextApplyLookupContext lookup_context = { 1359 {match_class}, 1360 &class_def 1361 }; 1362 return_trace (rule_set.would_apply (c, lookup_context)); 1363 } 1364 1365 inline const Coverage &get_coverage (void) const 1366 { 1367 return this+coverage; 1368 } 1369 1370 inline bool apply (hb_apply_context_t *c) const 1371 { 1372 TRACE_APPLY (this); 1373 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1374 if (likely (index == NOT_COVERED)) return_trace (false); 1375 1376 const ClassDef &class_def = this+classDef; 1377 index = class_def.get_class (c->buffer->cur().codepoint); 1378 const RuleSet &rule_set = this+ruleSet[index]; 1379 struct ContextApplyLookupContext lookup_context = { 1380 {match_class}, 1381 &class_def 1382 }; 1383 return_trace (rule_set.apply (c, lookup_context)); 1384 } 1385 1386 inline bool sanitize (hb_sanitize_context_t *c) const 1387 { 1388 TRACE_SANITIZE (this); 1389 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); 1390 } 1391 1392 protected: 1393 USHORT format; /* Format identifier--format = 2 */ 1394 OffsetTo<Coverage> 1395 coverage; /* Offset to Coverage table--from 1396 * beginning of table */ 1397 OffsetTo<ClassDef> 1398 classDef; /* Offset to glyph ClassDef table--from 1399 * beginning of table */ 1400 OffsetArrayOf<RuleSet> 1401 ruleSet; /* Array of RuleSet tables 1402 * ordered by class */ 1403 public: 1404 DEFINE_SIZE_ARRAY (8, ruleSet); 1405 }; 1406 1407 1408 struct ContextFormat3 1409 { 1410 inline void closure (hb_closure_context_t *c) const 1411 { 1412 TRACE_CLOSURE (this); 1413 if (!(this+coverageZ[0]).intersects (c->glyphs)) 1414 return; 1415 1416 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1417 struct ContextClosureLookupContext lookup_context = { 1418 {intersects_coverage}, 1419 this 1420 }; 1421 context_closure_lookup (c, 1422 glyphCount, (const USHORT *) (coverageZ + 1), 1423 lookupCount, lookupRecord, 1424 lookup_context); 1425 } 1426 1427 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1428 { 1429 TRACE_COLLECT_GLYPHS (this); 1430 (this+coverageZ[0]).add_coverage (c->input); 1431 1432 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1433 struct ContextCollectGlyphsLookupContext lookup_context = { 1434 {collect_coverage}, 1435 this 1436 }; 1437 1438 context_collect_glyphs_lookup (c, 1439 glyphCount, (const USHORT *) (coverageZ + 1), 1440 lookupCount, lookupRecord, 1441 lookup_context); 1442 } 1443 1444 inline bool would_apply (hb_would_apply_context_t *c) const 1445 { 1446 TRACE_WOULD_APPLY (this); 1447 1448 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1449 struct ContextApplyLookupContext lookup_context = { 1450 {match_coverage}, 1451 this 1452 }; 1453 return_trace (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1454 } 1455 1456 inline const Coverage &get_coverage (void) const 1457 { 1458 return this+coverageZ[0]; 1459 } 1460 1461 inline bool apply (hb_apply_context_t *c) const 1462 { 1463 TRACE_APPLY (this); 1464 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint); 1465 if (likely (index == NOT_COVERED)) return_trace (false); 1466 1467 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1468 struct ContextApplyLookupContext lookup_context = { 1469 {match_coverage}, 1470 this 1471 }; 1472 return_trace (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1473 } 1474 1475 inline bool sanitize (hb_sanitize_context_t *c) const 1476 { 1477 TRACE_SANITIZE (this); 1478 if (!c->check_struct (this)) return_trace (false); 1479 unsigned int count = glyphCount; 1480 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */ 1481 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false); 1482 for (unsigned int i = 0; i < count; i++) 1483 if (!coverageZ[i].sanitize (c, this)) return_trace (false); 1484 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count); 1485 return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); 1486 } 1487 1488 protected: 1489 USHORT format; /* Format identifier--format = 3 */ 1490 USHORT glyphCount; /* Number of glyphs in the input glyph 1491 * sequence */ 1492 USHORT lookupCount; /* Number of LookupRecords */ 1493 OffsetTo<Coverage> 1494 coverageZ[VAR]; /* Array of offsets to Coverage 1495 * table in glyph sequence order */ 1496 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1497 * design order */ 1498 public: 1499 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX); 1500 }; 1501 1502 struct Context 1503 { 1504 template <typename context_t> 1505 inline typename context_t::return_t dispatch (context_t *c) const 1506 { 1507 TRACE_DISPATCH (this, u.format); 1508 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 1509 switch (u.format) { 1510 case 1: return_trace (c->dispatch (u.format1)); 1511 case 2: return_trace (c->dispatch (u.format2)); 1512 case 3: return_trace (c->dispatch (u.format3)); 1513 default:return_trace (c->default_return_value ()); 1514 } 1515 } 1516 1517 protected: 1518 union { 1519 USHORT format; /* Format identifier */ 1520 ContextFormat1 format1; 1521 ContextFormat2 format2; 1522 ContextFormat3 format3; 1523 } u; 1524 }; 1525 1526 1527 /* Chaining Contextual lookups */ 1528 1529 struct ChainContextClosureLookupContext 1530 { 1531 ContextClosureFuncs funcs; 1532 const void *intersects_data[3]; 1533 }; 1534 1535 struct ChainContextCollectGlyphsLookupContext 1536 { 1537 ContextCollectGlyphsFuncs funcs; 1538 const void *collect_data[3]; 1539 }; 1540 1541 struct ChainContextApplyLookupContext 1542 { 1543 ContextApplyFuncs funcs; 1544 const void *match_data[3]; 1545 }; 1546 1547 static inline void chain_context_closure_lookup (hb_closure_context_t *c, 1548 unsigned int backtrackCount, 1549 const USHORT backtrack[], 1550 unsigned int inputCount, /* Including the first glyph (not matched) */ 1551 const USHORT input[], /* Array of input values--start with second glyph */ 1552 unsigned int lookaheadCount, 1553 const USHORT lookahead[], 1554 unsigned int lookupCount, 1555 const LookupRecord lookupRecord[], 1556 ChainContextClosureLookupContext &lookup_context) 1557 { 1558 if (intersects_array (c, 1559 backtrackCount, backtrack, 1560 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 1561 && intersects_array (c, 1562 inputCount ? inputCount - 1 : 0, input, 1563 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 1564 && intersects_array (c, 1565 lookaheadCount, lookahead, 1566 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) 1567 recurse_lookups (c, 1568 lookupCount, lookupRecord); 1569 } 1570 1571 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1572 unsigned int backtrackCount, 1573 const USHORT backtrack[], 1574 unsigned int inputCount, /* Including the first glyph (not matched) */ 1575 const USHORT input[], /* Array of input values--start with second glyph */ 1576 unsigned int lookaheadCount, 1577 const USHORT lookahead[], 1578 unsigned int lookupCount, 1579 const LookupRecord lookupRecord[], 1580 ChainContextCollectGlyphsLookupContext &lookup_context) 1581 { 1582 collect_array (c, c->before, 1583 backtrackCount, backtrack, 1584 lookup_context.funcs.collect, lookup_context.collect_data[0]); 1585 collect_array (c, c->input, 1586 inputCount ? inputCount - 1 : 0, input, 1587 lookup_context.funcs.collect, lookup_context.collect_data[1]); 1588 collect_array (c, c->after, 1589 lookaheadCount, lookahead, 1590 lookup_context.funcs.collect, lookup_context.collect_data[2]); 1591 recurse_lookups (c, 1592 lookupCount, lookupRecord); 1593 } 1594 1595 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, 1596 unsigned int backtrackCount, 1597 const USHORT backtrack[] HB_UNUSED, 1598 unsigned int inputCount, /* Including the first glyph (not matched) */ 1599 const USHORT input[], /* Array of input values--start with second glyph */ 1600 unsigned int lookaheadCount, 1601 const USHORT lookahead[] HB_UNUSED, 1602 unsigned int lookupCount HB_UNUSED, 1603 const LookupRecord lookupRecord[] HB_UNUSED, 1604 ChainContextApplyLookupContext &lookup_context) 1605 { 1606 return (c->zero_context ? !backtrackCount && !lookaheadCount : true) 1607 && would_match_input (c, 1608 inputCount, input, 1609 lookup_context.funcs.match, lookup_context.match_data[1]); 1610 } 1611 1612 static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 1613 unsigned int backtrackCount, 1614 const USHORT backtrack[], 1615 unsigned int inputCount, /* Including the first glyph (not matched) */ 1616 const USHORT input[], /* Array of input values--start with second glyph */ 1617 unsigned int lookaheadCount, 1618 const USHORT lookahead[], 1619 unsigned int lookupCount, 1620 const LookupRecord lookupRecord[], 1621 ChainContextApplyLookupContext &lookup_context) 1622 { 1623 unsigned int match_length = 0; 1624 unsigned int match_positions[MAX_CONTEXT_LENGTH]; 1625 return match_input (c, 1626 inputCount, input, 1627 lookup_context.funcs.match, lookup_context.match_data[1], 1628 &match_length, match_positions) 1629 && match_backtrack (c, 1630 backtrackCount, backtrack, 1631 lookup_context.funcs.match, lookup_context.match_data[0]) 1632 && match_lookahead (c, 1633 lookaheadCount, lookahead, 1634 lookup_context.funcs.match, lookup_context.match_data[2], 1635 match_length) 1636 && apply_lookup (c, 1637 inputCount, match_positions, 1638 lookupCount, lookupRecord, 1639 match_length); 1640 } 1641 1642 struct ChainRule 1643 { 1644 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1645 { 1646 TRACE_CLOSURE (this); 1647 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1648 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1649 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1650 chain_context_closure_lookup (c, 1651 backtrack.len, backtrack.array, 1652 input.len, input.array, 1653 lookahead.len, lookahead.array, 1654 lookup.len, lookup.array, 1655 lookup_context); 1656 } 1657 1658 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1659 { 1660 TRACE_COLLECT_GLYPHS (this); 1661 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1662 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1663 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1664 chain_context_collect_glyphs_lookup (c, 1665 backtrack.len, backtrack.array, 1666 input.len, input.array, 1667 lookahead.len, lookahead.array, 1668 lookup.len, lookup.array, 1669 lookup_context); 1670 } 1671 1672 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1673 { 1674 TRACE_WOULD_APPLY (this); 1675 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1676 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1677 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1678 return_trace (chain_context_would_apply_lookup (c, 1679 backtrack.len, backtrack.array, 1680 input.len, input.array, 1681 lookahead.len, lookahead.array, lookup.len, 1682 lookup.array, lookup_context)); 1683 } 1684 1685 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1686 { 1687 TRACE_APPLY (this); 1688 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1689 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1690 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1691 return_trace (chain_context_apply_lookup (c, 1692 backtrack.len, backtrack.array, 1693 input.len, input.array, 1694 lookahead.len, lookahead.array, lookup.len, 1695 lookup.array, lookup_context)); 1696 } 1697 1698 inline bool sanitize (hb_sanitize_context_t *c) const 1699 { 1700 TRACE_SANITIZE (this); 1701 if (!backtrack.sanitize (c)) return_trace (false); 1702 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1703 if (!input.sanitize (c)) return_trace (false); 1704 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1705 if (!lookahead.sanitize (c)) return_trace (false); 1706 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1707 return_trace (lookup.sanitize (c)); 1708 } 1709 1710 protected: 1711 ArrayOf<USHORT> 1712 backtrack; /* Array of backtracking values 1713 * (to be matched before the input 1714 * sequence) */ 1715 HeadlessArrayOf<USHORT> 1716 inputX; /* Array of input values (start with 1717 * second glyph) */ 1718 ArrayOf<USHORT> 1719 lookaheadX; /* Array of lookahead values's (to be 1720 * matched after the input sequence) */ 1721 ArrayOf<LookupRecord> 1722 lookupX; /* Array of LookupRecords--in 1723 * design order) */ 1724 public: 1725 DEFINE_SIZE_MIN (8); 1726 }; 1727 1728 struct ChainRuleSet 1729 { 1730 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1731 { 1732 TRACE_CLOSURE (this); 1733 unsigned int num_rules = rule.len; 1734 for (unsigned int i = 0; i < num_rules; i++) 1735 (this+rule[i]).closure (c, lookup_context); 1736 } 1737 1738 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1739 { 1740 TRACE_COLLECT_GLYPHS (this); 1741 unsigned int num_rules = rule.len; 1742 for (unsigned int i = 0; i < num_rules; i++) 1743 (this+rule[i]).collect_glyphs (c, lookup_context); 1744 } 1745 1746 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1747 { 1748 TRACE_WOULD_APPLY (this); 1749 unsigned int num_rules = rule.len; 1750 for (unsigned int i = 0; i < num_rules; i++) 1751 if ((this+rule[i]).would_apply (c, lookup_context)) 1752 return_trace (true); 1753 1754 return_trace (false); 1755 } 1756 1757 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1758 { 1759 TRACE_APPLY (this); 1760 unsigned int num_rules = rule.len; 1761 for (unsigned int i = 0; i < num_rules; i++) 1762 if ((this+rule[i]).apply (c, lookup_context)) 1763 return_trace (true); 1764 1765 return_trace (false); 1766 } 1767 1768 inline bool sanitize (hb_sanitize_context_t *c) const 1769 { 1770 TRACE_SANITIZE (this); 1771 return_trace (rule.sanitize (c, this)); 1772 } 1773 1774 protected: 1775 OffsetArrayOf<ChainRule> 1776 rule; /* Array of ChainRule tables 1777 * ordered by preference */ 1778 public: 1779 DEFINE_SIZE_ARRAY (2, rule); 1780 }; 1781 1782 struct ChainContextFormat1 1783 { 1784 inline void closure (hb_closure_context_t *c) const 1785 { 1786 TRACE_CLOSURE (this); 1787 const Coverage &cov = (this+coverage); 1788 1789 struct ChainContextClosureLookupContext lookup_context = { 1790 {intersects_glyph}, 1791 {NULL, NULL, NULL} 1792 }; 1793 1794 unsigned int count = ruleSet.len; 1795 for (unsigned int i = 0; i < count; i++) 1796 if (cov.intersects_coverage (c->glyphs, i)) { 1797 const ChainRuleSet &rule_set = this+ruleSet[i]; 1798 rule_set.closure (c, lookup_context); 1799 } 1800 } 1801 1802 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1803 { 1804 TRACE_COLLECT_GLYPHS (this); 1805 (this+coverage).add_coverage (c->input); 1806 1807 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1808 {collect_glyph}, 1809 {NULL, NULL, NULL} 1810 }; 1811 1812 unsigned int count = ruleSet.len; 1813 for (unsigned int i = 0; i < count; i++) 1814 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1815 } 1816 1817 inline bool would_apply (hb_would_apply_context_t *c) const 1818 { 1819 TRACE_WOULD_APPLY (this); 1820 1821 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1822 struct ChainContextApplyLookupContext lookup_context = { 1823 {match_glyph}, 1824 {NULL, NULL, NULL} 1825 }; 1826 return_trace (rule_set.would_apply (c, lookup_context)); 1827 } 1828 1829 inline const Coverage &get_coverage (void) const 1830 { 1831 return this+coverage; 1832 } 1833 1834 inline bool apply (hb_apply_context_t *c) const 1835 { 1836 TRACE_APPLY (this); 1837 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1838 if (likely (index == NOT_COVERED)) return_trace (false); 1839 1840 const ChainRuleSet &rule_set = this+ruleSet[index]; 1841 struct ChainContextApplyLookupContext lookup_context = { 1842 {match_glyph}, 1843 {NULL, NULL, NULL} 1844 }; 1845 return_trace (rule_set.apply (c, lookup_context)); 1846 } 1847 1848 inline bool sanitize (hb_sanitize_context_t *c) const 1849 { 1850 TRACE_SANITIZE (this); 1851 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1852 } 1853 1854 protected: 1855 USHORT format; /* Format identifier--format = 1 */ 1856 OffsetTo<Coverage> 1857 coverage; /* Offset to Coverage table--from 1858 * beginning of table */ 1859 OffsetArrayOf<ChainRuleSet> 1860 ruleSet; /* Array of ChainRuleSet tables 1861 * ordered by Coverage Index */ 1862 public: 1863 DEFINE_SIZE_ARRAY (6, ruleSet); 1864 }; 1865 1866 struct ChainContextFormat2 1867 { 1868 inline void closure (hb_closure_context_t *c) const 1869 { 1870 TRACE_CLOSURE (this); 1871 if (!(this+coverage).intersects (c->glyphs)) 1872 return; 1873 1874 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1875 const ClassDef &input_class_def = this+inputClassDef; 1876 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1877 1878 struct ChainContextClosureLookupContext lookup_context = { 1879 {intersects_class}, 1880 {&backtrack_class_def, 1881 &input_class_def, 1882 &lookahead_class_def} 1883 }; 1884 1885 unsigned int count = ruleSet.len; 1886 for (unsigned int i = 0; i < count; i++) 1887 if (input_class_def.intersects_class (c->glyphs, i)) { 1888 const ChainRuleSet &rule_set = this+ruleSet[i]; 1889 rule_set.closure (c, lookup_context); 1890 } 1891 } 1892 1893 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1894 { 1895 TRACE_COLLECT_GLYPHS (this); 1896 (this+coverage).add_coverage (c->input); 1897 1898 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1899 const ClassDef &input_class_def = this+inputClassDef; 1900 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1901 1902 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1903 {collect_class}, 1904 {&backtrack_class_def, 1905 &input_class_def, 1906 &lookahead_class_def} 1907 }; 1908 1909 unsigned int count = ruleSet.len; 1910 for (unsigned int i = 0; i < count; i++) 1911 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1912 } 1913 1914 inline bool would_apply (hb_would_apply_context_t *c) const 1915 { 1916 TRACE_WOULD_APPLY (this); 1917 1918 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1919 const ClassDef &input_class_def = this+inputClassDef; 1920 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1921 1922 unsigned int index = input_class_def.get_class (c->glyphs[0]); 1923 const ChainRuleSet &rule_set = this+ruleSet[index]; 1924 struct ChainContextApplyLookupContext lookup_context = { 1925 {match_class}, 1926 {&backtrack_class_def, 1927 &input_class_def, 1928 &lookahead_class_def} 1929 }; 1930 return_trace (rule_set.would_apply (c, lookup_context)); 1931 } 1932 1933 inline const Coverage &get_coverage (void) const 1934 { 1935 return this+coverage; 1936 } 1937 1938 inline bool apply (hb_apply_context_t *c) const 1939 { 1940 TRACE_APPLY (this); 1941 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1942 if (likely (index == NOT_COVERED)) return_trace (false); 1943 1944 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1945 const ClassDef &input_class_def = this+inputClassDef; 1946 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1947 1948 index = input_class_def.get_class (c->buffer->cur().codepoint); 1949 const ChainRuleSet &rule_set = this+ruleSet[index]; 1950 struct ChainContextApplyLookupContext lookup_context = { 1951 {match_class}, 1952 {&backtrack_class_def, 1953 &input_class_def, 1954 &lookahead_class_def} 1955 }; 1956 return_trace (rule_set.apply (c, lookup_context)); 1957 } 1958 1959 inline bool sanitize (hb_sanitize_context_t *c) const 1960 { 1961 TRACE_SANITIZE (this); 1962 return_trace (coverage.sanitize (c, this) && 1963 backtrackClassDef.sanitize (c, this) && 1964 inputClassDef.sanitize (c, this) && 1965 lookaheadClassDef.sanitize (c, this) && 1966 ruleSet.sanitize (c, this)); 1967 } 1968 1969 protected: 1970 USHORT format; /* Format identifier--format = 2 */ 1971 OffsetTo<Coverage> 1972 coverage; /* Offset to Coverage table--from 1973 * beginning of table */ 1974 OffsetTo<ClassDef> 1975 backtrackClassDef; /* Offset to glyph ClassDef table 1976 * containing backtrack sequence 1977 * data--from beginning of table */ 1978 OffsetTo<ClassDef> 1979 inputClassDef; /* Offset to glyph ClassDef 1980 * table containing input sequence 1981 * data--from beginning of table */ 1982 OffsetTo<ClassDef> 1983 lookaheadClassDef; /* Offset to glyph ClassDef table 1984 * containing lookahead sequence 1985 * data--from beginning of table */ 1986 OffsetArrayOf<ChainRuleSet> 1987 ruleSet; /* Array of ChainRuleSet tables 1988 * ordered by class */ 1989 public: 1990 DEFINE_SIZE_ARRAY (12, ruleSet); 1991 }; 1992 1993 struct ChainContextFormat3 1994 { 1995 inline void closure (hb_closure_context_t *c) const 1996 { 1997 TRACE_CLOSURE (this); 1998 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 1999 2000 if (!(this+input[0]).intersects (c->glyphs)) 2001 return; 2002 2003 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2004 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2005 struct ChainContextClosureLookupContext lookup_context = { 2006 {intersects_coverage}, 2007 {this, this, this} 2008 }; 2009 chain_context_closure_lookup (c, 2010 backtrack.len, (const USHORT *) backtrack.array, 2011 input.len, (const USHORT *) input.array + 1, 2012 lookahead.len, (const USHORT *) lookahead.array, 2013 lookup.len, lookup.array, 2014 lookup_context); 2015 } 2016 2017 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 2018 { 2019 TRACE_COLLECT_GLYPHS (this); 2020 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2021 2022 (this+input[0]).add_coverage (c->input); 2023 2024 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2025 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2026 struct ChainContextCollectGlyphsLookupContext lookup_context = { 2027 {collect_coverage}, 2028 {this, this, this} 2029 }; 2030 chain_context_collect_glyphs_lookup (c, 2031 backtrack.len, (const USHORT *) backtrack.array, 2032 input.len, (const USHORT *) input.array + 1, 2033 lookahead.len, (const USHORT *) lookahead.array, 2034 lookup.len, lookup.array, 2035 lookup_context); 2036 } 2037 2038 inline bool would_apply (hb_would_apply_context_t *c) const 2039 { 2040 TRACE_WOULD_APPLY (this); 2041 2042 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2043 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2044 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2045 struct ChainContextApplyLookupContext lookup_context = { 2046 {match_coverage}, 2047 {this, this, this} 2048 }; 2049 return_trace (chain_context_would_apply_lookup (c, 2050 backtrack.len, (const USHORT *) backtrack.array, 2051 input.len, (const USHORT *) input.array + 1, 2052 lookahead.len, (const USHORT *) lookahead.array, 2053 lookup.len, lookup.array, lookup_context)); 2054 } 2055 2056 inline const Coverage &get_coverage (void) const 2057 { 2058 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2059 return this+input[0]; 2060 } 2061 2062 inline bool apply (hb_apply_context_t *c) const 2063 { 2064 TRACE_APPLY (this); 2065 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2066 2067 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); 2068 if (likely (index == NOT_COVERED)) return_trace (false); 2069 2070 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2071 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2072 struct ChainContextApplyLookupContext lookup_context = { 2073 {match_coverage}, 2074 {this, this, this} 2075 }; 2076 return_trace (chain_context_apply_lookup (c, 2077 backtrack.len, (const USHORT *) backtrack.array, 2078 input.len, (const USHORT *) input.array + 1, 2079 lookahead.len, (const USHORT *) lookahead.array, 2080 lookup.len, lookup.array, lookup_context)); 2081 } 2082 2083 inline bool sanitize (hb_sanitize_context_t *c) const 2084 { 2085 TRACE_SANITIZE (this); 2086 if (!backtrack.sanitize (c, this)) return_trace (false); 2087 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2088 if (!input.sanitize (c, this)) return_trace (false); 2089 if (!input.len) return_trace (false); /* To be consistent with Context. */ 2090 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2091 if (!lookahead.sanitize (c, this)) return_trace (false); 2092 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2093 return_trace (lookup.sanitize (c)); 2094 } 2095 2096 protected: 2097 USHORT format; /* Format identifier--format = 3 */ 2098 OffsetArrayOf<Coverage> 2099 backtrack; /* Array of coverage tables 2100 * in backtracking sequence, in glyph 2101 * sequence order */ 2102 OffsetArrayOf<Coverage> 2103 inputX ; /* Array of coverage 2104 * tables in input sequence, in glyph 2105 * sequence order */ 2106 OffsetArrayOf<Coverage> 2107 lookaheadX; /* Array of coverage tables 2108 * in lookahead sequence, in glyph 2109 * sequence order */ 2110 ArrayOf<LookupRecord> 2111 lookupX; /* Array of LookupRecords--in 2112 * design order) */ 2113 public: 2114 DEFINE_SIZE_MIN (10); 2115 }; 2116 2117 struct ChainContext 2118 { 2119 template <typename context_t> 2120 inline typename context_t::return_t dispatch (context_t *c) const 2121 { 2122 TRACE_DISPATCH (this, u.format); 2123 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2124 switch (u.format) { 2125 case 1: return_trace (c->dispatch (u.format1)); 2126 case 2: return_trace (c->dispatch (u.format2)); 2127 case 3: return_trace (c->dispatch (u.format3)); 2128 default:return_trace (c->default_return_value ()); 2129 } 2130 } 2131 2132 protected: 2133 union { 2134 USHORT format; /* Format identifier */ 2135 ChainContextFormat1 format1; 2136 ChainContextFormat2 format2; 2137 ChainContextFormat3 format3; 2138 } u; 2139 }; 2140 2141 2142 template <typename T> 2143 struct ExtensionFormat1 2144 { 2145 inline unsigned int get_type (void) const { return extensionLookupType; } 2146 2147 template <typename X> 2148 inline const X& get_subtable (void) const 2149 { 2150 unsigned int offset = extensionOffset; 2151 if (unlikely (!offset)) return Null(typename T::LookupSubTable); 2152 return StructAtOffset<typename T::LookupSubTable> (this, offset); 2153 } 2154 2155 template <typename context_t> 2156 inline typename context_t::return_t dispatch (context_t *c) const 2157 { 2158 TRACE_DISPATCH (this, format); 2159 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ()); 2160 return_trace (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ())); 2161 } 2162 2163 /* This is called from may_dispatch() above with hb_sanitize_context_t. */ 2164 inline bool sanitize (hb_sanitize_context_t *c) const 2165 { 2166 TRACE_SANITIZE (this); 2167 return_trace (c->check_struct (this) && extensionOffset != 0); 2168 } 2169 2170 protected: 2171 USHORT format; /* Format identifier. Set to 1. */ 2172 USHORT extensionLookupType; /* Lookup type of subtable referenced 2173 * by ExtensionOffset (i.e. the 2174 * extension subtable). */ 2175 ULONG extensionOffset; /* Offset to the extension subtable, 2176 * of lookup type subtable. */ 2177 public: 2178 DEFINE_SIZE_STATIC (8); 2179 }; 2180 2181 template <typename T> 2182 struct Extension 2183 { 2184 inline unsigned int get_type (void) const 2185 { 2186 switch (u.format) { 2187 case 1: return u.format1.get_type (); 2188 default:return 0; 2189 } 2190 } 2191 template <typename X> 2192 inline const X& get_subtable (void) const 2193 { 2194 switch (u.format) { 2195 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> (); 2196 default:return Null(typename T::LookupSubTable); 2197 } 2198 } 2199 2200 template <typename context_t> 2201 inline typename context_t::return_t dispatch (context_t *c) const 2202 { 2203 TRACE_DISPATCH (this, u.format); 2204 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2205 switch (u.format) { 2206 case 1: return_trace (u.format1.dispatch (c)); 2207 default:return_trace (c->default_return_value ()); 2208 } 2209 } 2210 2211 protected: 2212 union { 2213 USHORT format; /* Format identifier */ 2214 ExtensionFormat1<T> format1; 2215 } u; 2216 }; 2217 2218 2219 /* 2220 * GSUB/GPOS Common 2221 */ 2222 2223 struct GSUBGPOS 2224 { 2225 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 2226 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 2227 2228 inline unsigned int get_script_count (void) const 2229 { return (this+scriptList).len; } 2230 inline const Tag& get_script_tag (unsigned int i) const 2231 { return (this+scriptList).get_tag (i); } 2232 inline unsigned int get_script_tags (unsigned int start_offset, 2233 unsigned int *script_count /* IN/OUT */, 2234 hb_tag_t *script_tags /* OUT */) const 2235 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 2236 inline const Script& get_script (unsigned int i) const 2237 { return (this+scriptList)[i]; } 2238 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 2239 { return (this+scriptList).find_index (tag, index); } 2240 2241 inline unsigned int get_feature_count (void) const 2242 { return (this+featureList).len; } 2243 inline hb_tag_t get_feature_tag (unsigned int i) const 2244 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); } 2245 inline unsigned int get_feature_tags (unsigned int start_offset, 2246 unsigned int *feature_count /* IN/OUT */, 2247 hb_tag_t *feature_tags /* OUT */) const 2248 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 2249 inline const Feature& get_feature (unsigned int i) const 2250 { return (this+featureList)[i]; } 2251 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 2252 { return (this+featureList).find_index (tag, index); } 2253 2254 inline unsigned int get_lookup_count (void) const 2255 { return (this+lookupList).len; } 2256 inline const Lookup& get_lookup (unsigned int i) const 2257 { return (this+lookupList)[i]; } 2258 2259 inline bool sanitize (hb_sanitize_context_t *c) const 2260 { 2261 TRACE_SANITIZE (this); 2262 return_trace (version.sanitize (c) && 2263 likely (version.major == 1) && 2264 scriptList.sanitize (c, this) && 2265 featureList.sanitize (c, this) && 2266 lookupList.sanitize (c, this)); 2267 } 2268 2269 protected: 2270 FixedVersion version; /* Version of the GSUB/GPOS table--initially set 2271 * to 0x00010000u */ 2272 OffsetTo<ScriptList> 2273 scriptList; /* ScriptList table */ 2274 OffsetTo<FeatureList> 2275 featureList; /* FeatureList table */ 2276 OffsetTo<LookupList> 2277 lookupList; /* LookupList table */ 2278 public: 2279 DEFINE_SIZE_STATIC (10); 2280 }; 2281 2282 2283 } /* namespace OT */ 2284 2285 2286 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */