1 /* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 30 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 31 32 #include "hb-private.hh" 33 #include "hb-debug.hh" 34 #include "hb-buffer-private.hh" 35 #include "hb-ot-layout-gdef-table.hh" 36 #include "hb-set-private.hh" 37 38 39 namespace OT { 40 41 42 struct hb_closure_context_t : 43 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE> 44 { 45 inline const char *get_name (void) { return "CLOSURE"; } 46 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 47 template <typename T> 48 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; } 49 static return_t default_return_value (void) { return HB_VOID; } 50 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 51 return_t recurse (unsigned int lookup_index) 52 { 53 if (unlikely (nesting_level_left == 0 || !recurse_func)) 54 return default_return_value (); 55 56 nesting_level_left--; 57 recurse_func (this, lookup_index); 58 nesting_level_left++; 59 return HB_VOID; 60 } 61 62 hb_face_t *face; 63 hb_set_t *glyphs; 64 recurse_func_t recurse_func; 65 unsigned int nesting_level_left; 66 unsigned int debug_depth; 67 68 hb_closure_context_t (hb_face_t *face_, 69 hb_set_t *glyphs_, 70 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : 71 face (face_), 72 glyphs (glyphs_), 73 recurse_func (nullptr), 74 nesting_level_left (nesting_level_left_), 75 debug_depth (0) {} 76 77 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 78 }; 79 80 81 struct hb_would_apply_context_t : 82 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY> 83 { 84 inline const char *get_name (void) { return "WOULD_APPLY"; } 85 template <typename T> 86 inline return_t dispatch (const T &obj) { return obj.would_apply (this); } 87 static return_t default_return_value (void) { return false; } 88 bool stop_sublookup_iteration (return_t r) const { return r; } 89 90 hb_face_t *face; 91 const hb_codepoint_t *glyphs; 92 unsigned int len; 93 bool zero_context; 94 unsigned int debug_depth; 95 96 hb_would_apply_context_t (hb_face_t *face_, 97 const hb_codepoint_t *glyphs_, 98 unsigned int len_, 99 bool zero_context_) : 100 face (face_), 101 glyphs (glyphs_), 102 len (len_), 103 zero_context (zero_context_), 104 debug_depth (0) {} 105 }; 106 107 108 struct hb_collect_glyphs_context_t : 109 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS> 110 { 111 inline const char *get_name (void) { return "COLLECT_GLYPHS"; } 112 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); 113 template <typename T> 114 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; } 115 static return_t default_return_value (void) { return HB_VOID; } 116 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 117 return_t recurse (unsigned int lookup_index) 118 { 119 if (unlikely (nesting_level_left == 0 || !recurse_func)) 120 return default_return_value (); 121 122 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get 123 * past the previous check. For GSUB, we only want to collect the output 124 * glyphs in the recursion. If output is not requested, we can go home now. 125 * 126 * Note further, that the above is not exactly correct. A recursed lookup 127 * is allowed to match input that is not matched in the context, but that's 128 * not how most fonts are built. It's possible to relax that and recurse 129 * with all sets here if it proves to be an issue. 130 */ 131 132 if (output == hb_set_get_empty ()) 133 return HB_VOID; 134 135 /* Return if new lookup was recursed to before. */ 136 if (recursed_lookups->has (lookup_index)) 137 return HB_VOID; 138 139 hb_set_t *old_before = before; 140 hb_set_t *old_input = input; 141 hb_set_t *old_after = after; 142 before = input = after = hb_set_get_empty (); 143 144 nesting_level_left--; 145 recurse_func (this, lookup_index); 146 nesting_level_left++; 147 148 before = old_before; 149 input = old_input; 150 after = old_after; 151 152 recursed_lookups->add (lookup_index); 153 154 return HB_VOID; 155 } 156 157 hb_face_t *face; 158 hb_set_t *before; 159 hb_set_t *input; 160 hb_set_t *after; 161 hb_set_t *output; 162 recurse_func_t recurse_func; 163 hb_set_t *recursed_lookups; 164 unsigned int nesting_level_left; 165 unsigned int debug_depth; 166 167 hb_collect_glyphs_context_t (hb_face_t *face_, 168 hb_set_t *glyphs_before, /* OUT. May be nullptr */ 169 hb_set_t *glyphs_input, /* OUT. May be nullptr */ 170 hb_set_t *glyphs_after, /* OUT. May be nullptr */ 171 hb_set_t *glyphs_output, /* OUT. May be nullptr */ 172 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) : 173 face (face_), 174 before (glyphs_before ? glyphs_before : hb_set_get_empty ()), 175 input (glyphs_input ? glyphs_input : hb_set_get_empty ()), 176 after (glyphs_after ? glyphs_after : hb_set_get_empty ()), 177 output (glyphs_output ? glyphs_output : hb_set_get_empty ()), 178 recurse_func (nullptr), 179 recursed_lookups (nullptr), 180 nesting_level_left (nesting_level_left_), 181 debug_depth (0) 182 { 183 recursed_lookups = hb_set_create (); 184 } 185 ~hb_collect_glyphs_context_t (void) 186 { 187 hb_set_destroy (recursed_lookups); 188 } 189 190 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 191 }; 192 193 194 195 /* XXX Can we remove this? */ 196 197 template <typename set_t> 198 struct hb_add_coverage_context_t : 199 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE> 200 { 201 inline const char *get_name (void) { return "GET_COVERAGE"; } 202 typedef const Coverage &return_t; 203 template <typename T> 204 inline return_t dispatch (const T &obj) { return obj.get_coverage (); } 205 static return_t default_return_value (void) { return Null(Coverage); } 206 bool stop_sublookup_iteration (return_t r) const 207 { 208 r.add_coverage (set); 209 return false; 210 } 211 212 hb_add_coverage_context_t (set_t *set_) : 213 set (set_), 214 debug_depth (0) {} 215 216 set_t *set; 217 unsigned int debug_depth; 218 }; 219 220 221 struct hb_apply_context_t : 222 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY> 223 { 224 struct matcher_t 225 { 226 inline matcher_t (void) : 227 lookup_props (0), 228 ignore_zwnj (false), 229 ignore_zwj (false), 230 mask (-1), 231 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */ 232 syllable arg1(0), 233 #undef arg1 234 match_func (nullptr), 235 match_data (nullptr) {}; 236 237 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 238 239 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } 240 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } 241 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } 242 inline void set_mask (hb_mask_t mask_) { mask = mask_; } 243 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; } 244 inline void set_match_func (match_func_t match_func_, 245 const void *match_data_) 246 { match_func = match_func_; match_data = match_data_; } 247 248 enum may_match_t { 249 MATCH_NO, 250 MATCH_YES, 251 MATCH_MAYBE 252 }; 253 254 inline may_match_t may_match (const hb_glyph_info_t &info, 255 const USHORT *glyph_data) const 256 { 257 if (!(info.mask & mask) || 258 (syllable && syllable != info.syllable ())) 259 return MATCH_NO; 260 261 if (match_func) 262 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO; 263 264 return MATCH_MAYBE; 265 } 266 267 enum may_skip_t { 268 SKIP_NO, 269 SKIP_YES, 270 SKIP_MAYBE 271 }; 272 273 inline may_skip_t 274 may_skip (const hb_apply_context_t *c, 275 const hb_glyph_info_t &info) const 276 { 277 if (!c->check_glyph_property (&info, lookup_props)) 278 return SKIP_YES; 279 280 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) && 281 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && 282 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)))) 283 return SKIP_MAYBE; 284 285 return SKIP_NO; 286 } 287 288 protected: 289 unsigned int lookup_props; 290 bool ignore_zwnj; 291 bool ignore_zwj; 292 hb_mask_t mask; 293 uint8_t syllable; 294 match_func_t match_func; 295 const void *match_data; 296 }; 297 298 struct skipping_iterator_t 299 { 300 inline void init (hb_apply_context_t *c_, bool context_match = false) 301 { 302 c = c_; 303 match_glyph_data = nullptr; 304 matcher.set_match_func (nullptr, nullptr); 305 matcher.set_lookup_props (c->lookup_props); 306 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ 307 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj)); 308 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ 309 matcher.set_ignore_zwj (c->table_index == 1 || (context_match || c->auto_zwj)); 310 matcher.set_mask (context_match ? -1 : c->lookup_mask); 311 } 312 inline void set_lookup_props (unsigned int lookup_props) 313 { 314 matcher.set_lookup_props (lookup_props); 315 } 316 inline void set_match_func (matcher_t::match_func_t match_func_, 317 const void *match_data_, 318 const USHORT glyph_data[]) 319 { 320 matcher.set_match_func (match_func_, match_data_); 321 match_glyph_data = glyph_data; 322 } 323 324 inline void reset (unsigned int start_index_, 325 unsigned int num_items_) 326 { 327 idx = start_index_; 328 num_items = num_items_; 329 end = c->buffer->len; 330 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); 331 } 332 333 inline void reject (void) { num_items++; match_glyph_data--; } 334 335 inline matcher_t::may_skip_t 336 may_skip (const hb_apply_context_t *c, 337 const hb_glyph_info_t &info) const 338 { 339 return matcher.may_skip (c, info); 340 } 341 342 inline bool next (void) 343 { 344 assert (num_items > 0); 345 while (idx + num_items < end) 346 { 347 idx++; 348 const hb_glyph_info_t &info = c->buffer->info[idx]; 349 350 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 351 if (unlikely (skip == matcher_t::SKIP_YES)) 352 continue; 353 354 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 355 if (match == matcher_t::MATCH_YES || 356 (match == matcher_t::MATCH_MAYBE && 357 skip == matcher_t::SKIP_NO)) 358 { 359 num_items--; 360 match_glyph_data++; 361 return true; 362 } 363 364 if (skip == matcher_t::SKIP_NO) 365 return false; 366 } 367 return false; 368 } 369 inline bool prev (void) 370 { 371 assert (num_items > 0); 372 while (idx >= num_items) 373 { 374 idx--; 375 const hb_glyph_info_t &info = c->buffer->out_info[idx]; 376 377 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 378 if (unlikely (skip == matcher_t::SKIP_YES)) 379 continue; 380 381 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 382 if (match == matcher_t::MATCH_YES || 383 (match == matcher_t::MATCH_MAYBE && 384 skip == matcher_t::SKIP_NO)) 385 { 386 num_items--; 387 match_glyph_data++; 388 return true; 389 } 390 391 if (skip == matcher_t::SKIP_NO) 392 return false; 393 } 394 return false; 395 } 396 397 unsigned int idx; 398 protected: 399 hb_apply_context_t *c; 400 matcher_t matcher; 401 const USHORT *match_glyph_data; 402 403 unsigned int num_items; 404 unsigned int end; 405 }; 406 407 408 inline const char *get_name (void) { return "APPLY"; } 409 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 410 template <typename T> 411 inline return_t dispatch (const T &obj) { return obj.apply (this); } 412 static return_t default_return_value (void) { return false; } 413 bool stop_sublookup_iteration (return_t r) const { return r; } 414 return_t recurse (unsigned int lookup_index) 415 { 416 if (unlikely (nesting_level_left == 0 || !recurse_func)) 417 return default_return_value (); 418 419 nesting_level_left--; 420 bool ret = recurse_func (this, lookup_index); 421 nesting_level_left++; 422 return ret; 423 } 424 425 skipping_iterator_t iter_input, iter_context; 426 427 hb_font_t *font; 428 hb_face_t *face; 429 hb_buffer_t *buffer; 430 recurse_func_t recurse_func; 431 const GDEF &gdef; 432 const VariationStore &var_store; 433 434 hb_direction_t direction; 435 hb_mask_t lookup_mask; 436 unsigned int table_index; /* GSUB/GPOS */ 437 unsigned int lookup_index; 438 unsigned int lookup_props; 439 unsigned int nesting_level_left; 440 unsigned int debug_depth; 441 442 bool auto_zwnj; 443 bool auto_zwj; 444 bool has_glyph_classes; 445 446 447 hb_apply_context_t (unsigned int table_index_, 448 hb_font_t *font_, 449 hb_buffer_t *buffer_) : 450 iter_input (), iter_context (), 451 font (font_), face (font->face), buffer (buffer_), 452 recurse_func (nullptr), 453 gdef (*hb_ot_layout_from_face (face)->gdef), 454 var_store (gdef.get_var_store ()), 455 direction (buffer_->props.direction), 456 lookup_mask (1), 457 table_index (table_index_), 458 lookup_index ((unsigned int) -1), 459 lookup_props (0), 460 nesting_level_left (HB_MAX_NESTING_LEVEL), 461 debug_depth (0), 462 auto_zwnj (true), 463 auto_zwj (true), 464 has_glyph_classes (gdef.has_glyph_classes ()) {} 465 466 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; } 467 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; } 468 inline void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; } 469 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; } 470 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; } 471 inline void set_lookup_props (unsigned int lookup_props_) 472 { 473 lookup_props = lookup_props_; 474 iter_input.init (this, false); 475 iter_context.init (this, true); 476 } 477 478 inline bool 479 match_properties_mark (hb_codepoint_t glyph, 480 unsigned int glyph_props, 481 unsigned int match_props) const 482 { 483 /* If using mark filtering sets, the high short of 484 * match_props has the set index. 485 */ 486 if (match_props & LookupFlag::UseMarkFilteringSet) 487 return gdef.mark_set_covers (match_props >> 16, glyph); 488 489 /* The second byte of match_props has the meaning 490 * "ignore marks of attachment type different than 491 * the attachment type specified." 492 */ 493 if (match_props & LookupFlag::MarkAttachmentType) 494 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); 495 496 return true; 497 } 498 499 inline bool 500 check_glyph_property (const hb_glyph_info_t *info, 501 unsigned int match_props) const 502 { 503 hb_codepoint_t glyph = info->codepoint; 504 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info); 505 506 /* Not covered, if, for example, glyph class is ligature and 507 * match_props includes LookupFlags::IgnoreLigatures 508 */ 509 if (glyph_props & match_props & LookupFlag::IgnoreFlags) 510 return false; 511 512 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) 513 return match_properties_mark (glyph, glyph_props, match_props); 514 515 return true; 516 } 517 518 inline void _set_glyph_props (hb_codepoint_t glyph_index, 519 unsigned int class_guess = 0, 520 bool ligature = false, 521 bool component = false) const 522 { 523 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) & 524 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; 525 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED; 526 if (ligature) 527 { 528 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED; 529 /* In the only place that the MULTIPLIED bit is used, Uniscribe 530 * seems to only care about the "last" transformation between 531 * Ligature and Multiple substitions. Ie. if you ligate, expand, 532 * and ligate again, it forgives the multiplication and acts as 533 * if only ligation happened. As such, clear MULTIPLIED bit. 534 */ 535 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 536 } 537 if (component) 538 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 539 if (likely (has_glyph_classes)) 540 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index)); 541 else if (class_guess) 542 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess); 543 } 544 545 inline void replace_glyph (hb_codepoint_t glyph_index) const 546 { 547 _set_glyph_props (glyph_index); 548 buffer->replace_glyph (glyph_index); 549 } 550 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const 551 { 552 _set_glyph_props (glyph_index); 553 buffer->cur().codepoint = glyph_index; 554 } 555 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index, 556 unsigned int class_guess) const 557 { 558 _set_glyph_props (glyph_index, class_guess, true); 559 buffer->replace_glyph (glyph_index); 560 } 561 inline void output_glyph_for_component (hb_codepoint_t glyph_index, 562 unsigned int class_guess) const 563 { 564 _set_glyph_props (glyph_index, class_guess, false, true); 565 buffer->output_glyph (glyph_index); 566 } 567 }; 568 569 570 571 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 572 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 573 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 574 575 struct ContextClosureFuncs 576 { 577 intersects_func_t intersects; 578 }; 579 struct ContextCollectGlyphsFuncs 580 { 581 collect_glyphs_func_t collect; 582 }; 583 struct ContextApplyFuncs 584 { 585 match_func_t match; 586 }; 587 588 589 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 590 { 591 return glyphs->has (value); 592 } 593 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data) 594 { 595 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 596 return class_def.intersects_class (glyphs, value); 597 } 598 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 599 { 600 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 601 return (data+coverage).intersects (glyphs); 602 } 603 604 static inline bool intersects_array (hb_closure_context_t *c, 605 unsigned int count, 606 const USHORT values[], 607 intersects_func_t intersects_func, 608 const void *intersects_data) 609 { 610 for (unsigned int i = 0; i < count; i++) 611 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 612 return false; 613 return true; 614 } 615 616 617 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 618 { 619 glyphs->add (value); 620 } 621 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data) 622 { 623 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 624 class_def.add_class (glyphs, value); 625 } 626 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 627 { 628 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 629 (data+coverage).add_coverage (glyphs); 630 } 631 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, 632 hb_set_t *glyphs, 633 unsigned int count, 634 const USHORT values[], 635 collect_glyphs_func_t collect_func, 636 const void *collect_data) 637 { 638 for (unsigned int i = 0; i < count; i++) 639 collect_func (glyphs, values[i], collect_data); 640 } 641 642 643 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 644 { 645 return glyph_id == value; 646 } 647 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 648 { 649 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 650 return class_def.get_class (glyph_id) == value; 651 } 652 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 653 { 654 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 655 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 656 } 657 658 static inline bool would_match_input (hb_would_apply_context_t *c, 659 unsigned int count, /* Including the first glyph (not matched) */ 660 const USHORT input[], /* Array of input values--start with second glyph */ 661 match_func_t match_func, 662 const void *match_data) 663 { 664 if (count != c->len) 665 return false; 666 667 for (unsigned int i = 1; i < count; i++) 668 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data))) 669 return false; 670 671 return true; 672 } 673 static inline bool match_input (hb_apply_context_t *c, 674 unsigned int count, /* Including the first glyph (not matched) */ 675 const USHORT input[], /* Array of input values--start with second glyph */ 676 match_func_t match_func, 677 const void *match_data, 678 unsigned int *end_offset, 679 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], 680 bool *p_is_mark_ligature = nullptr, 681 unsigned int *p_total_component_count = nullptr) 682 { 683 TRACE_APPLY (nullptr); 684 685 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false); 686 687 hb_buffer_t *buffer = c->buffer; 688 689 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; 690 skippy_iter.reset (buffer->idx, count - 1); 691 skippy_iter.set_match_func (match_func, match_data, input); 692 693 /* 694 * This is perhaps the trickiest part of OpenType... Remarks: 695 * 696 * - If all components of the ligature were marks, we call this a mark ligature. 697 * 698 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize 699 * it as a ligature glyph. 700 * 701 * - Ligatures cannot be formed across glyphs attached to different components 702 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and 703 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. 704 * However, it would be wrong to ligate that SHADDA,FATHA sequence. 705 * There are a couple of exceptions to this: 706 * 707 * o If a ligature tries ligating with marks that belong to it itself, go ahead, 708 * assuming that the font designer knows what they are doing (otherwise it can 709 * break Indic stuff when a matra wants to ligate with a conjunct, 710 * 711 * o If two marks want to ligate and they belong to different components of the 712 * same ligature glyph, and said ligature glyph is to be ignored according to 713 * mark-filtering rules, then allow. 714 * https://github.com/behdad/harfbuzz/issues/545 715 */ 716 717 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur()); 718 719 unsigned int total_component_count = 0; 720 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 721 722 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 723 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 724 725 enum { 726 LIGBASE_NOT_CHECKED, 727 LIGBASE_MAY_NOT_SKIP, 728 LIGBASE_MAY_SKIP 729 } ligbase = LIGBASE_NOT_CHECKED; 730 731 match_positions[0] = buffer->idx; 732 for (unsigned int i = 1; i < count; i++) 733 { 734 if (!skippy_iter.next ()) return_trace (false); 735 736 match_positions[i] = skippy_iter.idx; 737 738 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]); 739 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]); 740 741 if (first_lig_id && first_lig_comp) 742 { 743 /* If first component was attached to a previous ligature component, 744 * all subsequent components should be attached to the same ligature 745 * component, otherwise we shouldn't ligate them... */ 746 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) 747 { 748 /* ...unless, we are attached to a base ligature and that base 749 * ligature is ignorable. */ 750 if (ligbase == LIGBASE_NOT_CHECKED) 751 { 752 bool found = false; 753 const hb_glyph_info_t *out = buffer->out_info; 754 unsigned int j = buffer->out_len; 755 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id) 756 { 757 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0) 758 { 759 j--; 760 found = true; 761 break; 762 } 763 j--; 764 } 765 766 if (found && skippy_iter.may_skip (c, out[j]) == hb_apply_context_t::matcher_t::SKIP_YES) 767 ligbase = LIGBASE_MAY_SKIP; 768 else 769 ligbase = LIGBASE_MAY_NOT_SKIP; 770 } 771 772 if (ligbase == LIGBASE_MAY_NOT_SKIP) 773 return_trace (false); 774 } 775 } 776 else 777 { 778 /* If first component was NOT attached to a previous ligature component, 779 * all subsequent components should also NOT be attached to any ligature 780 * component, unless they are attached to the first component itself! */ 781 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) 782 return_trace (false); 783 } 784 785 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]); 786 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]); 787 } 788 789 *end_offset = skippy_iter.idx - buffer->idx + 1; 790 791 if (p_is_mark_ligature) 792 *p_is_mark_ligature = is_mark_ligature; 793 794 if (p_total_component_count) 795 *p_total_component_count = total_component_count; 796 797 return_trace (true); 798 } 799 static inline bool ligate_input (hb_apply_context_t *c, 800 unsigned int count, /* Including the first glyph */ 801 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ 802 unsigned int match_length, 803 hb_codepoint_t lig_glyph, 804 bool is_mark_ligature, 805 unsigned int total_component_count) 806 { 807 TRACE_APPLY (nullptr); 808 809 hb_buffer_t *buffer = c->buffer; 810 811 buffer->merge_clusters (buffer->idx, buffer->idx + match_length); 812 813 /* 814 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave 815 * the ligature to keep its old ligature id. This will allow it to attach to 816 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, 817 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a 818 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature 819 * later, we don't want them to lose their ligature id/component, otherwise 820 * GPOS will fail to correctly position the mark ligature on top of the 821 * LAM,LAM,HEH ligature. See: 822 * https://bugzilla.gnome.org/show_bug.cgi?id=676343 823 * 824 * - If a ligature is formed of components that some of which are also ligatures 825 * themselves, and those ligature components had marks attached to *their* 826 * components, we have to attach the marks to the new ligature component 827 * positions! Now *that*'s tricky! And these marks may be following the 828 * last component of the whole sequence, so we should loop forward looking 829 * for them and update them. 830 * 831 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a 832 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature 833 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature 834 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to 835 * the new ligature with a component value of 2. 836 * 837 * This in fact happened to a font... See: 838 * https://bugzilla.gnome.org/show_bug.cgi?id=437633 839 */ 840 841 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE; 842 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer); 843 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 844 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 845 unsigned int components_so_far = last_num_components; 846 847 if (!is_mark_ligature) 848 { 849 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count); 850 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK) 851 { 852 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER); 853 } 854 } 855 c->replace_glyph_with_ligature (lig_glyph, klass); 856 857 for (unsigned int i = 1; i < count; i++) 858 { 859 while (buffer->idx < match_positions[i] && !buffer->in_error) 860 { 861 if (!is_mark_ligature) { 862 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 863 if (this_comp == 0) 864 this_comp = last_num_components; 865 unsigned int new_lig_comp = components_so_far - last_num_components + 866 MIN (this_comp, last_num_components); 867 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp); 868 } 869 buffer->next_glyph (); 870 } 871 872 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 873 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 874 components_so_far += last_num_components; 875 876 /* Skip the base glyph */ 877 buffer->idx++; 878 } 879 880 if (!is_mark_ligature && last_lig_id) { 881 /* Re-adjust components for any marks following. */ 882 for (unsigned int i = buffer->idx; i < buffer->len; i++) { 883 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) { 884 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]); 885 if (!this_comp) 886 break; 887 unsigned int new_lig_comp = components_so_far - last_num_components + 888 MIN (this_comp, last_num_components); 889 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp); 890 } else 891 break; 892 } 893 } 894 return_trace (true); 895 } 896 897 static inline bool match_backtrack (hb_apply_context_t *c, 898 unsigned int count, 899 const USHORT backtrack[], 900 match_func_t match_func, 901 const void *match_data, 902 unsigned int *match_start) 903 { 904 TRACE_APPLY (nullptr); 905 906 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 907 skippy_iter.reset (c->buffer->backtrack_len (), count); 908 skippy_iter.set_match_func (match_func, match_data, backtrack); 909 910 for (unsigned int i = 0; i < count; i++) 911 if (!skippy_iter.prev ()) 912 return_trace (false); 913 914 *match_start = skippy_iter.idx; 915 916 return_trace (true); 917 } 918 919 static inline bool match_lookahead (hb_apply_context_t *c, 920 unsigned int count, 921 const USHORT lookahead[], 922 match_func_t match_func, 923 const void *match_data, 924 unsigned int offset, 925 unsigned int *end_index) 926 { 927 TRACE_APPLY (nullptr); 928 929 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 930 skippy_iter.reset (c->buffer->idx + offset - 1, count); 931 skippy_iter.set_match_func (match_func, match_data, lookahead); 932 933 for (unsigned int i = 0; i < count; i++) 934 if (!skippy_iter.next ()) 935 return_trace (false); 936 937 *end_index = skippy_iter.idx + 1; 938 939 return_trace (true); 940 } 941 942 943 944 struct LookupRecord 945 { 946 inline bool sanitize (hb_sanitize_context_t *c) const 947 { 948 TRACE_SANITIZE (this); 949 return_trace (c->check_struct (this)); 950 } 951 952 USHORT sequenceIndex; /* Index into current glyph 953 * sequence--first glyph = 0 */ 954 USHORT lookupListIndex; /* Lookup to apply to that 955 * position--zero--based */ 956 public: 957 DEFINE_SIZE_STATIC (4); 958 }; 959 960 961 template <typename context_t> 962 static inline void recurse_lookups (context_t *c, 963 unsigned int lookupCount, 964 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) 965 { 966 for (unsigned int i = 0; i < lookupCount; i++) 967 c->recurse (lookupRecord[i].lookupListIndex); 968 } 969 970 static inline bool apply_lookup (hb_apply_context_t *c, 971 unsigned int count, /* Including the first glyph */ 972 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */ 973 unsigned int lookupCount, 974 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 975 unsigned int match_length) 976 { 977 TRACE_APPLY (nullptr); 978 979 hb_buffer_t *buffer = c->buffer; 980 int end; 981 982 /* All positions are distance from beginning of *output* buffer. 983 * Adjust. */ 984 { 985 unsigned int bl = buffer->backtrack_len (); 986 end = bl + match_length; 987 988 int delta = bl - buffer->idx; 989 /* Convert positions to new indexing. */ 990 for (unsigned int j = 0; j < count; j++) 991 match_positions[j] += delta; 992 } 993 994 for (unsigned int i = 0; i < lookupCount && !buffer->in_error; i++) 995 { 996 unsigned int idx = lookupRecord[i].sequenceIndex; 997 if (idx >= count) 998 continue; 999 1000 /* Don't recurse to ourself at same position. 1001 * Note that this test is too naive, it doesn't catch longer loops. */ 1002 if (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index) 1003 continue; 1004 1005 buffer->move_to (match_positions[idx]); 1006 1007 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len (); 1008 if (!c->recurse (lookupRecord[i].lookupListIndex)) 1009 continue; 1010 1011 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len (); 1012 int delta = new_len - orig_len; 1013 1014 if (!delta) 1015 continue; 1016 1017 /* Recursed lookup changed buffer len. Adjust. 1018 * 1019 * TODO: 1020 * 1021 * Right now, if buffer length increased by n, we assume n new glyphs 1022 * were added right after the current position, and if buffer length 1023 * was decreased by n, we assume n match positions after the current 1024 * one where removed. The former (buffer length increased) case is 1025 * fine, but the decrease case can be improved in at least two ways, 1026 * both of which are significant: 1027 * 1028 * - If recursed-to lookup is MultipleSubst and buffer length 1029 * decreased, then it's current match position that was deleted, 1030 * NOT the one after it. 1031 * 1032 * - If buffer length was decreased by n, it does not necessarily 1033 * mean that n match positions where removed, as there might 1034 * have been marks and default-ignorables in the sequence. We 1035 * should instead drop match positions between current-position 1036 * and current-position + n instead. 1037 * 1038 * It should be possible to construct tests for both of these cases. 1039 */ 1040 1041 end += delta; 1042 if (end <= int (match_positions[idx])) 1043 { 1044 /* End might end up being smaller than match_positions[idx] if the recursed 1045 * lookup ended up removing many items, more than we have had matched. 1046 * Just never rewind end back and get out of here. 1047 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */ 1048 end = match_positions[idx]; 1049 /* There can't be any further changes. */ 1050 break; 1051 } 1052 1053 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */ 1054 1055 if (delta > 0) 1056 { 1057 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH)) 1058 break; 1059 } 1060 else 1061 { 1062 /* NOTE: delta is negative. */ 1063 delta = MAX (delta, (int) next - (int) count); 1064 next -= delta; 1065 } 1066 1067 /* Shift! */ 1068 memmove (match_positions + next + delta, match_positions + next, 1069 (count - next) * sizeof (match_positions[0])); 1070 next += delta; 1071 count += delta; 1072 1073 /* Fill in new entries. */ 1074 for (unsigned int j = idx + 1; j < next; j++) 1075 match_positions[j] = match_positions[j - 1] + 1; 1076 1077 /* And fixup the rest. */ 1078 for (; next < count; next++) 1079 match_positions[next] += delta; 1080 } 1081 1082 buffer->move_to (end); 1083 1084 return_trace (true); 1085 } 1086 1087 1088 1089 /* Contextual lookups */ 1090 1091 struct ContextClosureLookupContext 1092 { 1093 ContextClosureFuncs funcs; 1094 const void *intersects_data; 1095 }; 1096 1097 struct ContextCollectGlyphsLookupContext 1098 { 1099 ContextCollectGlyphsFuncs funcs; 1100 const void *collect_data; 1101 }; 1102 1103 struct ContextApplyLookupContext 1104 { 1105 ContextApplyFuncs funcs; 1106 const void *match_data; 1107 }; 1108 1109 static inline void context_closure_lookup (hb_closure_context_t *c, 1110 unsigned int inputCount, /* Including the first glyph (not matched) */ 1111 const USHORT input[], /* Array of input values--start with second glyph */ 1112 unsigned int lookupCount, 1113 const LookupRecord lookupRecord[], 1114 ContextClosureLookupContext &lookup_context) 1115 { 1116 if (intersects_array (c, 1117 inputCount ? inputCount - 1 : 0, input, 1118 lookup_context.funcs.intersects, lookup_context.intersects_data)) 1119 recurse_lookups (c, 1120 lookupCount, lookupRecord); 1121 } 1122 1123 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1124 unsigned int inputCount, /* Including the first glyph (not matched) */ 1125 const USHORT input[], /* Array of input values--start with second glyph */ 1126 unsigned int lookupCount, 1127 const LookupRecord lookupRecord[], 1128 ContextCollectGlyphsLookupContext &lookup_context) 1129 { 1130 collect_array (c, c->input, 1131 inputCount ? inputCount - 1 : 0, input, 1132 lookup_context.funcs.collect, lookup_context.collect_data); 1133 recurse_lookups (c, 1134 lookupCount, lookupRecord); 1135 } 1136 1137 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, 1138 unsigned int inputCount, /* Including the first glyph (not matched) */ 1139 const USHORT input[], /* Array of input values--start with second glyph */ 1140 unsigned int lookupCount HB_UNUSED, 1141 const LookupRecord lookupRecord[] HB_UNUSED, 1142 ContextApplyLookupContext &lookup_context) 1143 { 1144 return would_match_input (c, 1145 inputCount, input, 1146 lookup_context.funcs.match, lookup_context.match_data); 1147 } 1148 static inline bool context_apply_lookup (hb_apply_context_t *c, 1149 unsigned int inputCount, /* Including the first glyph (not matched) */ 1150 const USHORT input[], /* Array of input values--start with second glyph */ 1151 unsigned int lookupCount, 1152 const LookupRecord lookupRecord[], 1153 ContextApplyLookupContext &lookup_context) 1154 { 1155 unsigned int match_length = 0; 1156 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; 1157 return match_input (c, 1158 inputCount, input, 1159 lookup_context.funcs.match, lookup_context.match_data, 1160 &match_length, match_positions) 1161 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length), 1162 apply_lookup (c, 1163 inputCount, match_positions, 1164 lookupCount, lookupRecord, 1165 match_length)); 1166 } 1167 1168 struct Rule 1169 { 1170 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1171 { 1172 TRACE_CLOSURE (this); 1173 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1174 context_closure_lookup (c, 1175 inputCount, inputZ, 1176 lookupCount, lookupRecord, 1177 lookup_context); 1178 } 1179 1180 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1181 { 1182 TRACE_COLLECT_GLYPHS (this); 1183 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1184 context_collect_glyphs_lookup (c, 1185 inputCount, inputZ, 1186 lookupCount, lookupRecord, 1187 lookup_context); 1188 } 1189 1190 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1191 { 1192 TRACE_WOULD_APPLY (this); 1193 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1194 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1195 } 1196 1197 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1198 { 1199 TRACE_APPLY (this); 1200 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1201 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1202 } 1203 1204 public: 1205 inline bool sanitize (hb_sanitize_context_t *c) const 1206 { 1207 TRACE_SANITIZE (this); 1208 return_trace (inputCount.sanitize (c) && 1209 lookupCount.sanitize (c) && 1210 c->check_range (inputZ, 1211 inputZ[0].static_size * inputCount + 1212 lookupRecordX[0].static_size * lookupCount)); 1213 } 1214 1215 protected: 1216 USHORT inputCount; /* Total number of glyphs in input 1217 * glyph sequence--includes the first 1218 * glyph */ 1219 USHORT lookupCount; /* Number of LookupRecords */ 1220 USHORT inputZ[VAR]; /* Array of match inputs--start with 1221 * second glyph */ 1222 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1223 * design order */ 1224 public: 1225 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX); 1226 }; 1227 1228 struct RuleSet 1229 { 1230 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1231 { 1232 TRACE_CLOSURE (this); 1233 unsigned int num_rules = rule.len; 1234 for (unsigned int i = 0; i < num_rules; i++) 1235 (this+rule[i]).closure (c, lookup_context); 1236 } 1237 1238 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1239 { 1240 TRACE_COLLECT_GLYPHS (this); 1241 unsigned int num_rules = rule.len; 1242 for (unsigned int i = 0; i < num_rules; i++) 1243 (this+rule[i]).collect_glyphs (c, lookup_context); 1244 } 1245 1246 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1247 { 1248 TRACE_WOULD_APPLY (this); 1249 unsigned int num_rules = rule.len; 1250 for (unsigned int i = 0; i < num_rules; i++) 1251 { 1252 if ((this+rule[i]).would_apply (c, lookup_context)) 1253 return_trace (true); 1254 } 1255 return_trace (false); 1256 } 1257 1258 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1259 { 1260 TRACE_APPLY (this); 1261 unsigned int num_rules = rule.len; 1262 for (unsigned int i = 0; i < num_rules; i++) 1263 { 1264 if ((this+rule[i]).apply (c, lookup_context)) 1265 return_trace (true); 1266 } 1267 return_trace (false); 1268 } 1269 1270 inline bool sanitize (hb_sanitize_context_t *c) const 1271 { 1272 TRACE_SANITIZE (this); 1273 return_trace (rule.sanitize (c, this)); 1274 } 1275 1276 protected: 1277 OffsetArrayOf<Rule> 1278 rule; /* Array of Rule tables 1279 * ordered by preference */ 1280 public: 1281 DEFINE_SIZE_ARRAY (2, rule); 1282 }; 1283 1284 1285 struct ContextFormat1 1286 { 1287 inline void closure (hb_closure_context_t *c) const 1288 { 1289 TRACE_CLOSURE (this); 1290 1291 const Coverage &cov = (this+coverage); 1292 1293 struct ContextClosureLookupContext lookup_context = { 1294 {intersects_glyph}, 1295 nullptr 1296 }; 1297 1298 unsigned int count = ruleSet.len; 1299 for (unsigned int i = 0; i < count; i++) 1300 if (cov.intersects_coverage (c->glyphs, i)) { 1301 const RuleSet &rule_set = this+ruleSet[i]; 1302 rule_set.closure (c, lookup_context); 1303 } 1304 } 1305 1306 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1307 { 1308 TRACE_COLLECT_GLYPHS (this); 1309 (this+coverage).add_coverage (c->input); 1310 1311 struct ContextCollectGlyphsLookupContext lookup_context = { 1312 {collect_glyph}, 1313 nullptr 1314 }; 1315 1316 unsigned int count = ruleSet.len; 1317 for (unsigned int i = 0; i < count; i++) 1318 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1319 } 1320 1321 inline bool would_apply (hb_would_apply_context_t *c) const 1322 { 1323 TRACE_WOULD_APPLY (this); 1324 1325 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1326 struct ContextApplyLookupContext lookup_context = { 1327 {match_glyph}, 1328 nullptr 1329 }; 1330 return_trace (rule_set.would_apply (c, lookup_context)); 1331 } 1332 1333 inline const Coverage &get_coverage (void) const 1334 { 1335 return this+coverage; 1336 } 1337 1338 inline bool apply (hb_apply_context_t *c) const 1339 { 1340 TRACE_APPLY (this); 1341 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1342 if (likely (index == NOT_COVERED)) 1343 return_trace (false); 1344 1345 const RuleSet &rule_set = this+ruleSet[index]; 1346 struct ContextApplyLookupContext lookup_context = { 1347 {match_glyph}, 1348 nullptr 1349 }; 1350 return_trace (rule_set.apply (c, lookup_context)); 1351 } 1352 1353 inline bool sanitize (hb_sanitize_context_t *c) const 1354 { 1355 TRACE_SANITIZE (this); 1356 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1357 } 1358 1359 protected: 1360 USHORT format; /* Format identifier--format = 1 */ 1361 OffsetTo<Coverage> 1362 coverage; /* Offset to Coverage table--from 1363 * beginning of table */ 1364 OffsetArrayOf<RuleSet> 1365 ruleSet; /* Array of RuleSet tables 1366 * ordered by Coverage Index */ 1367 public: 1368 DEFINE_SIZE_ARRAY (6, ruleSet); 1369 }; 1370 1371 1372 struct ContextFormat2 1373 { 1374 inline void closure (hb_closure_context_t *c) const 1375 { 1376 TRACE_CLOSURE (this); 1377 if (!(this+coverage).intersects (c->glyphs)) 1378 return; 1379 1380 const ClassDef &class_def = this+classDef; 1381 1382 struct ContextClosureLookupContext lookup_context = { 1383 {intersects_class}, 1384 &class_def 1385 }; 1386 1387 unsigned int count = ruleSet.len; 1388 for (unsigned int i = 0; i < count; i++) 1389 if (class_def.intersects_class (c->glyphs, i)) { 1390 const RuleSet &rule_set = this+ruleSet[i]; 1391 rule_set.closure (c, lookup_context); 1392 } 1393 } 1394 1395 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1396 { 1397 TRACE_COLLECT_GLYPHS (this); 1398 (this+coverage).add_coverage (c->input); 1399 1400 const ClassDef &class_def = this+classDef; 1401 struct ContextCollectGlyphsLookupContext lookup_context = { 1402 {collect_class}, 1403 &class_def 1404 }; 1405 1406 unsigned int count = ruleSet.len; 1407 for (unsigned int i = 0; i < count; i++) 1408 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1409 } 1410 1411 inline bool would_apply (hb_would_apply_context_t *c) const 1412 { 1413 TRACE_WOULD_APPLY (this); 1414 1415 const ClassDef &class_def = this+classDef; 1416 unsigned int index = class_def.get_class (c->glyphs[0]); 1417 const RuleSet &rule_set = this+ruleSet[index]; 1418 struct ContextApplyLookupContext lookup_context = { 1419 {match_class}, 1420 &class_def 1421 }; 1422 return_trace (rule_set.would_apply (c, lookup_context)); 1423 } 1424 1425 inline const Coverage &get_coverage (void) const 1426 { 1427 return this+coverage; 1428 } 1429 1430 inline bool apply (hb_apply_context_t *c) const 1431 { 1432 TRACE_APPLY (this); 1433 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1434 if (likely (index == NOT_COVERED)) return_trace (false); 1435 1436 const ClassDef &class_def = this+classDef; 1437 index = class_def.get_class (c->buffer->cur().codepoint); 1438 const RuleSet &rule_set = this+ruleSet[index]; 1439 struct ContextApplyLookupContext lookup_context = { 1440 {match_class}, 1441 &class_def 1442 }; 1443 return_trace (rule_set.apply (c, lookup_context)); 1444 } 1445 1446 inline bool sanitize (hb_sanitize_context_t *c) const 1447 { 1448 TRACE_SANITIZE (this); 1449 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); 1450 } 1451 1452 protected: 1453 USHORT format; /* Format identifier--format = 2 */ 1454 OffsetTo<Coverage> 1455 coverage; /* Offset to Coverage table--from 1456 * beginning of table */ 1457 OffsetTo<ClassDef> 1458 classDef; /* Offset to glyph ClassDef table--from 1459 * beginning of table */ 1460 OffsetArrayOf<RuleSet> 1461 ruleSet; /* Array of RuleSet tables 1462 * ordered by class */ 1463 public: 1464 DEFINE_SIZE_ARRAY (8, ruleSet); 1465 }; 1466 1467 1468 struct ContextFormat3 1469 { 1470 inline void closure (hb_closure_context_t *c) const 1471 { 1472 TRACE_CLOSURE (this); 1473 if (!(this+coverageZ[0]).intersects (c->glyphs)) 1474 return; 1475 1476 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1477 struct ContextClosureLookupContext lookup_context = { 1478 {intersects_coverage}, 1479 this 1480 }; 1481 context_closure_lookup (c, 1482 glyphCount, (const USHORT *) (coverageZ + 1), 1483 lookupCount, lookupRecord, 1484 lookup_context); 1485 } 1486 1487 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1488 { 1489 TRACE_COLLECT_GLYPHS (this); 1490 (this+coverageZ[0]).add_coverage (c->input); 1491 1492 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1493 struct ContextCollectGlyphsLookupContext lookup_context = { 1494 {collect_coverage}, 1495 this 1496 }; 1497 1498 context_collect_glyphs_lookup (c, 1499 glyphCount, (const USHORT *) (coverageZ + 1), 1500 lookupCount, lookupRecord, 1501 lookup_context); 1502 } 1503 1504 inline bool would_apply (hb_would_apply_context_t *c) const 1505 { 1506 TRACE_WOULD_APPLY (this); 1507 1508 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1509 struct ContextApplyLookupContext lookup_context = { 1510 {match_coverage}, 1511 this 1512 }; 1513 return_trace (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1514 } 1515 1516 inline const Coverage &get_coverage (void) const 1517 { 1518 return this+coverageZ[0]; 1519 } 1520 1521 inline bool apply (hb_apply_context_t *c) const 1522 { 1523 TRACE_APPLY (this); 1524 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint); 1525 if (likely (index == NOT_COVERED)) return_trace (false); 1526 1527 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1528 struct ContextApplyLookupContext lookup_context = { 1529 {match_coverage}, 1530 this 1531 }; 1532 return_trace (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1533 } 1534 1535 inline bool sanitize (hb_sanitize_context_t *c) const 1536 { 1537 TRACE_SANITIZE (this); 1538 if (!c->check_struct (this)) return_trace (false); 1539 unsigned int count = glyphCount; 1540 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */ 1541 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false); 1542 for (unsigned int i = 0; i < count; i++) 1543 if (!coverageZ[i].sanitize (c, this)) return_trace (false); 1544 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count); 1545 return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); 1546 } 1547 1548 protected: 1549 USHORT format; /* Format identifier--format = 3 */ 1550 USHORT glyphCount; /* Number of glyphs in the input glyph 1551 * sequence */ 1552 USHORT lookupCount; /* Number of LookupRecords */ 1553 OffsetTo<Coverage> 1554 coverageZ[VAR]; /* Array of offsets to Coverage 1555 * table in glyph sequence order */ 1556 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1557 * design order */ 1558 public: 1559 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX); 1560 }; 1561 1562 struct Context 1563 { 1564 template <typename context_t> 1565 inline typename context_t::return_t dispatch (context_t *c) const 1566 { 1567 TRACE_DISPATCH (this, u.format); 1568 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 1569 switch (u.format) { 1570 case 1: return_trace (c->dispatch (u.format1)); 1571 case 2: return_trace (c->dispatch (u.format2)); 1572 case 3: return_trace (c->dispatch (u.format3)); 1573 default:return_trace (c->default_return_value ()); 1574 } 1575 } 1576 1577 protected: 1578 union { 1579 USHORT format; /* Format identifier */ 1580 ContextFormat1 format1; 1581 ContextFormat2 format2; 1582 ContextFormat3 format3; 1583 } u; 1584 }; 1585 1586 1587 /* Chaining Contextual lookups */ 1588 1589 struct ChainContextClosureLookupContext 1590 { 1591 ContextClosureFuncs funcs; 1592 const void *intersects_data[3]; 1593 }; 1594 1595 struct ChainContextCollectGlyphsLookupContext 1596 { 1597 ContextCollectGlyphsFuncs funcs; 1598 const void *collect_data[3]; 1599 }; 1600 1601 struct ChainContextApplyLookupContext 1602 { 1603 ContextApplyFuncs funcs; 1604 const void *match_data[3]; 1605 }; 1606 1607 static inline void chain_context_closure_lookup (hb_closure_context_t *c, 1608 unsigned int backtrackCount, 1609 const USHORT backtrack[], 1610 unsigned int inputCount, /* Including the first glyph (not matched) */ 1611 const USHORT input[], /* Array of input values--start with second glyph */ 1612 unsigned int lookaheadCount, 1613 const USHORT lookahead[], 1614 unsigned int lookupCount, 1615 const LookupRecord lookupRecord[], 1616 ChainContextClosureLookupContext &lookup_context) 1617 { 1618 if (intersects_array (c, 1619 backtrackCount, backtrack, 1620 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 1621 && intersects_array (c, 1622 inputCount ? inputCount - 1 : 0, input, 1623 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 1624 && intersects_array (c, 1625 lookaheadCount, lookahead, 1626 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) 1627 recurse_lookups (c, 1628 lookupCount, lookupRecord); 1629 } 1630 1631 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1632 unsigned int backtrackCount, 1633 const USHORT backtrack[], 1634 unsigned int inputCount, /* Including the first glyph (not matched) */ 1635 const USHORT input[], /* Array of input values--start with second glyph */ 1636 unsigned int lookaheadCount, 1637 const USHORT lookahead[], 1638 unsigned int lookupCount, 1639 const LookupRecord lookupRecord[], 1640 ChainContextCollectGlyphsLookupContext &lookup_context) 1641 { 1642 collect_array (c, c->before, 1643 backtrackCount, backtrack, 1644 lookup_context.funcs.collect, lookup_context.collect_data[0]); 1645 collect_array (c, c->input, 1646 inputCount ? inputCount - 1 : 0, input, 1647 lookup_context.funcs.collect, lookup_context.collect_data[1]); 1648 collect_array (c, c->after, 1649 lookaheadCount, lookahead, 1650 lookup_context.funcs.collect, lookup_context.collect_data[2]); 1651 recurse_lookups (c, 1652 lookupCount, lookupRecord); 1653 } 1654 1655 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, 1656 unsigned int backtrackCount, 1657 const USHORT backtrack[] HB_UNUSED, 1658 unsigned int inputCount, /* Including the first glyph (not matched) */ 1659 const USHORT input[], /* Array of input values--start with second glyph */ 1660 unsigned int lookaheadCount, 1661 const USHORT lookahead[] HB_UNUSED, 1662 unsigned int lookupCount HB_UNUSED, 1663 const LookupRecord lookupRecord[] HB_UNUSED, 1664 ChainContextApplyLookupContext &lookup_context) 1665 { 1666 return (c->zero_context ? !backtrackCount && !lookaheadCount : true) 1667 && would_match_input (c, 1668 inputCount, input, 1669 lookup_context.funcs.match, lookup_context.match_data[1]); 1670 } 1671 1672 static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 1673 unsigned int backtrackCount, 1674 const USHORT backtrack[], 1675 unsigned int inputCount, /* Including the first glyph (not matched) */ 1676 const USHORT input[], /* Array of input values--start with second glyph */ 1677 unsigned int lookaheadCount, 1678 const USHORT lookahead[], 1679 unsigned int lookupCount, 1680 const LookupRecord lookupRecord[], 1681 ChainContextApplyLookupContext &lookup_context) 1682 { 1683 unsigned int start_index = 0, match_length = 0, end_index = 0; 1684 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH]; 1685 return match_input (c, 1686 inputCount, input, 1687 lookup_context.funcs.match, lookup_context.match_data[1], 1688 &match_length, match_positions) 1689 && match_backtrack (c, 1690 backtrackCount, backtrack, 1691 lookup_context.funcs.match, lookup_context.match_data[0], 1692 &start_index) 1693 && match_lookahead (c, 1694 lookaheadCount, lookahead, 1695 lookup_context.funcs.match, lookup_context.match_data[2], 1696 match_length, &end_index) 1697 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index), 1698 apply_lookup (c, 1699 inputCount, match_positions, 1700 lookupCount, lookupRecord, 1701 match_length)); 1702 } 1703 1704 struct ChainRule 1705 { 1706 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1707 { 1708 TRACE_CLOSURE (this); 1709 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1710 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1711 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1712 chain_context_closure_lookup (c, 1713 backtrack.len, backtrack.array, 1714 input.len, input.array, 1715 lookahead.len, lookahead.array, 1716 lookup.len, lookup.array, 1717 lookup_context); 1718 } 1719 1720 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1721 { 1722 TRACE_COLLECT_GLYPHS (this); 1723 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1724 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1725 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1726 chain_context_collect_glyphs_lookup (c, 1727 backtrack.len, backtrack.array, 1728 input.len, input.array, 1729 lookahead.len, lookahead.array, 1730 lookup.len, lookup.array, 1731 lookup_context); 1732 } 1733 1734 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1735 { 1736 TRACE_WOULD_APPLY (this); 1737 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1738 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1739 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1740 return_trace (chain_context_would_apply_lookup (c, 1741 backtrack.len, backtrack.array, 1742 input.len, input.array, 1743 lookahead.len, lookahead.array, lookup.len, 1744 lookup.array, lookup_context)); 1745 } 1746 1747 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1748 { 1749 TRACE_APPLY (this); 1750 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1751 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1752 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1753 return_trace (chain_context_apply_lookup (c, 1754 backtrack.len, backtrack.array, 1755 input.len, input.array, 1756 lookahead.len, lookahead.array, lookup.len, 1757 lookup.array, lookup_context)); 1758 } 1759 1760 inline bool sanitize (hb_sanitize_context_t *c) const 1761 { 1762 TRACE_SANITIZE (this); 1763 if (!backtrack.sanitize (c)) return_trace (false); 1764 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1765 if (!input.sanitize (c)) return_trace (false); 1766 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1767 if (!lookahead.sanitize (c)) return_trace (false); 1768 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1769 return_trace (lookup.sanitize (c)); 1770 } 1771 1772 protected: 1773 ArrayOf<USHORT> 1774 backtrack; /* Array of backtracking values 1775 * (to be matched before the input 1776 * sequence) */ 1777 HeadlessArrayOf<USHORT> 1778 inputX; /* Array of input values (start with 1779 * second glyph) */ 1780 ArrayOf<USHORT> 1781 lookaheadX; /* Array of lookahead values's (to be 1782 * matched after the input sequence) */ 1783 ArrayOf<LookupRecord> 1784 lookupX; /* Array of LookupRecords--in 1785 * design order) */ 1786 public: 1787 DEFINE_SIZE_MIN (8); 1788 }; 1789 1790 struct ChainRuleSet 1791 { 1792 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1793 { 1794 TRACE_CLOSURE (this); 1795 unsigned int num_rules = rule.len; 1796 for (unsigned int i = 0; i < num_rules; i++) 1797 (this+rule[i]).closure (c, lookup_context); 1798 } 1799 1800 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1801 { 1802 TRACE_COLLECT_GLYPHS (this); 1803 unsigned int num_rules = rule.len; 1804 for (unsigned int i = 0; i < num_rules; i++) 1805 (this+rule[i]).collect_glyphs (c, lookup_context); 1806 } 1807 1808 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1809 { 1810 TRACE_WOULD_APPLY (this); 1811 unsigned int num_rules = rule.len; 1812 for (unsigned int i = 0; i < num_rules; i++) 1813 if ((this+rule[i]).would_apply (c, lookup_context)) 1814 return_trace (true); 1815 1816 return_trace (false); 1817 } 1818 1819 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1820 { 1821 TRACE_APPLY (this); 1822 unsigned int num_rules = rule.len; 1823 for (unsigned int i = 0; i < num_rules; i++) 1824 if ((this+rule[i]).apply (c, lookup_context)) 1825 return_trace (true); 1826 1827 return_trace (false); 1828 } 1829 1830 inline bool sanitize (hb_sanitize_context_t *c) const 1831 { 1832 TRACE_SANITIZE (this); 1833 return_trace (rule.sanitize (c, this)); 1834 } 1835 1836 protected: 1837 OffsetArrayOf<ChainRule> 1838 rule; /* Array of ChainRule tables 1839 * ordered by preference */ 1840 public: 1841 DEFINE_SIZE_ARRAY (2, rule); 1842 }; 1843 1844 struct ChainContextFormat1 1845 { 1846 inline void closure (hb_closure_context_t *c) const 1847 { 1848 TRACE_CLOSURE (this); 1849 const Coverage &cov = (this+coverage); 1850 1851 struct ChainContextClosureLookupContext lookup_context = { 1852 {intersects_glyph}, 1853 {nullptr, nullptr, nullptr} 1854 }; 1855 1856 unsigned int count = ruleSet.len; 1857 for (unsigned int i = 0; i < count; i++) 1858 if (cov.intersects_coverage (c->glyphs, i)) { 1859 const ChainRuleSet &rule_set = this+ruleSet[i]; 1860 rule_set.closure (c, lookup_context); 1861 } 1862 } 1863 1864 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1865 { 1866 TRACE_COLLECT_GLYPHS (this); 1867 (this+coverage).add_coverage (c->input); 1868 1869 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1870 {collect_glyph}, 1871 {nullptr, nullptr, nullptr} 1872 }; 1873 1874 unsigned int count = ruleSet.len; 1875 for (unsigned int i = 0; i < count; i++) 1876 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1877 } 1878 1879 inline bool would_apply (hb_would_apply_context_t *c) const 1880 { 1881 TRACE_WOULD_APPLY (this); 1882 1883 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1884 struct ChainContextApplyLookupContext lookup_context = { 1885 {match_glyph}, 1886 {nullptr, nullptr, nullptr} 1887 }; 1888 return_trace (rule_set.would_apply (c, lookup_context)); 1889 } 1890 1891 inline const Coverage &get_coverage (void) const 1892 { 1893 return this+coverage; 1894 } 1895 1896 inline bool apply (hb_apply_context_t *c) const 1897 { 1898 TRACE_APPLY (this); 1899 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1900 if (likely (index == NOT_COVERED)) return_trace (false); 1901 1902 const ChainRuleSet &rule_set = this+ruleSet[index]; 1903 struct ChainContextApplyLookupContext lookup_context = { 1904 {match_glyph}, 1905 {nullptr, nullptr, nullptr} 1906 }; 1907 return_trace (rule_set.apply (c, lookup_context)); 1908 } 1909 1910 inline bool sanitize (hb_sanitize_context_t *c) const 1911 { 1912 TRACE_SANITIZE (this); 1913 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1914 } 1915 1916 protected: 1917 USHORT format; /* Format identifier--format = 1 */ 1918 OffsetTo<Coverage> 1919 coverage; /* Offset to Coverage table--from 1920 * beginning of table */ 1921 OffsetArrayOf<ChainRuleSet> 1922 ruleSet; /* Array of ChainRuleSet tables 1923 * ordered by Coverage Index */ 1924 public: 1925 DEFINE_SIZE_ARRAY (6, ruleSet); 1926 }; 1927 1928 struct ChainContextFormat2 1929 { 1930 inline void closure (hb_closure_context_t *c) const 1931 { 1932 TRACE_CLOSURE (this); 1933 if (!(this+coverage).intersects (c->glyphs)) 1934 return; 1935 1936 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1937 const ClassDef &input_class_def = this+inputClassDef; 1938 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1939 1940 struct ChainContextClosureLookupContext lookup_context = { 1941 {intersects_class}, 1942 {&backtrack_class_def, 1943 &input_class_def, 1944 &lookahead_class_def} 1945 }; 1946 1947 unsigned int count = ruleSet.len; 1948 for (unsigned int i = 0; i < count; i++) 1949 if (input_class_def.intersects_class (c->glyphs, i)) { 1950 const ChainRuleSet &rule_set = this+ruleSet[i]; 1951 rule_set.closure (c, lookup_context); 1952 } 1953 } 1954 1955 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1956 { 1957 TRACE_COLLECT_GLYPHS (this); 1958 (this+coverage).add_coverage (c->input); 1959 1960 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1961 const ClassDef &input_class_def = this+inputClassDef; 1962 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1963 1964 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1965 {collect_class}, 1966 {&backtrack_class_def, 1967 &input_class_def, 1968 &lookahead_class_def} 1969 }; 1970 1971 unsigned int count = ruleSet.len; 1972 for (unsigned int i = 0; i < count; i++) 1973 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1974 } 1975 1976 inline bool would_apply (hb_would_apply_context_t *c) const 1977 { 1978 TRACE_WOULD_APPLY (this); 1979 1980 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1981 const ClassDef &input_class_def = this+inputClassDef; 1982 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1983 1984 unsigned int index = input_class_def.get_class (c->glyphs[0]); 1985 const ChainRuleSet &rule_set = this+ruleSet[index]; 1986 struct ChainContextApplyLookupContext lookup_context = { 1987 {match_class}, 1988 {&backtrack_class_def, 1989 &input_class_def, 1990 &lookahead_class_def} 1991 }; 1992 return_trace (rule_set.would_apply (c, lookup_context)); 1993 } 1994 1995 inline const Coverage &get_coverage (void) const 1996 { 1997 return this+coverage; 1998 } 1999 2000 inline bool apply (hb_apply_context_t *c) const 2001 { 2002 TRACE_APPLY (this); 2003 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 2004 if (likely (index == NOT_COVERED)) return_trace (false); 2005 2006 const ClassDef &backtrack_class_def = this+backtrackClassDef; 2007 const ClassDef &input_class_def = this+inputClassDef; 2008 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 2009 2010 index = input_class_def.get_class (c->buffer->cur().codepoint); 2011 const ChainRuleSet &rule_set = this+ruleSet[index]; 2012 struct ChainContextApplyLookupContext lookup_context = { 2013 {match_class}, 2014 {&backtrack_class_def, 2015 &input_class_def, 2016 &lookahead_class_def} 2017 }; 2018 return_trace (rule_set.apply (c, lookup_context)); 2019 } 2020 2021 inline bool sanitize (hb_sanitize_context_t *c) const 2022 { 2023 TRACE_SANITIZE (this); 2024 return_trace (coverage.sanitize (c, this) && 2025 backtrackClassDef.sanitize (c, this) && 2026 inputClassDef.sanitize (c, this) && 2027 lookaheadClassDef.sanitize (c, this) && 2028 ruleSet.sanitize (c, this)); 2029 } 2030 2031 protected: 2032 USHORT format; /* Format identifier--format = 2 */ 2033 OffsetTo<Coverage> 2034 coverage; /* Offset to Coverage table--from 2035 * beginning of table */ 2036 OffsetTo<ClassDef> 2037 backtrackClassDef; /* Offset to glyph ClassDef table 2038 * containing backtrack sequence 2039 * data--from beginning of table */ 2040 OffsetTo<ClassDef> 2041 inputClassDef; /* Offset to glyph ClassDef 2042 * table containing input sequence 2043 * data--from beginning of table */ 2044 OffsetTo<ClassDef> 2045 lookaheadClassDef; /* Offset to glyph ClassDef table 2046 * containing lookahead sequence 2047 * data--from beginning of table */ 2048 OffsetArrayOf<ChainRuleSet> 2049 ruleSet; /* Array of ChainRuleSet tables 2050 * ordered by class */ 2051 public: 2052 DEFINE_SIZE_ARRAY (12, ruleSet); 2053 }; 2054 2055 struct ChainContextFormat3 2056 { 2057 inline void closure (hb_closure_context_t *c) const 2058 { 2059 TRACE_CLOSURE (this); 2060 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2061 2062 if (!(this+input[0]).intersects (c->glyphs)) 2063 return; 2064 2065 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2066 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2067 struct ChainContextClosureLookupContext lookup_context = { 2068 {intersects_coverage}, 2069 {this, this, this} 2070 }; 2071 chain_context_closure_lookup (c, 2072 backtrack.len, (const USHORT *) backtrack.array, 2073 input.len, (const USHORT *) input.array + 1, 2074 lookahead.len, (const USHORT *) lookahead.array, 2075 lookup.len, lookup.array, 2076 lookup_context); 2077 } 2078 2079 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 2080 { 2081 TRACE_COLLECT_GLYPHS (this); 2082 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2083 2084 (this+input[0]).add_coverage (c->input); 2085 2086 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2087 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2088 struct ChainContextCollectGlyphsLookupContext lookup_context = { 2089 {collect_coverage}, 2090 {this, this, this} 2091 }; 2092 chain_context_collect_glyphs_lookup (c, 2093 backtrack.len, (const USHORT *) backtrack.array, 2094 input.len, (const USHORT *) input.array + 1, 2095 lookahead.len, (const USHORT *) lookahead.array, 2096 lookup.len, lookup.array, 2097 lookup_context); 2098 } 2099 2100 inline bool would_apply (hb_would_apply_context_t *c) const 2101 { 2102 TRACE_WOULD_APPLY (this); 2103 2104 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2105 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2106 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2107 struct ChainContextApplyLookupContext lookup_context = { 2108 {match_coverage}, 2109 {this, this, this} 2110 }; 2111 return_trace (chain_context_would_apply_lookup (c, 2112 backtrack.len, (const USHORT *) backtrack.array, 2113 input.len, (const USHORT *) input.array + 1, 2114 lookahead.len, (const USHORT *) lookahead.array, 2115 lookup.len, lookup.array, lookup_context)); 2116 } 2117 2118 inline const Coverage &get_coverage (void) const 2119 { 2120 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2121 return this+input[0]; 2122 } 2123 2124 inline bool apply (hb_apply_context_t *c) const 2125 { 2126 TRACE_APPLY (this); 2127 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2128 2129 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); 2130 if (likely (index == NOT_COVERED)) return_trace (false); 2131 2132 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2133 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2134 struct ChainContextApplyLookupContext lookup_context = { 2135 {match_coverage}, 2136 {this, this, this} 2137 }; 2138 return_trace (chain_context_apply_lookup (c, 2139 backtrack.len, (const USHORT *) backtrack.array, 2140 input.len, (const USHORT *) input.array + 1, 2141 lookahead.len, (const USHORT *) lookahead.array, 2142 lookup.len, lookup.array, lookup_context)); 2143 } 2144 2145 inline bool sanitize (hb_sanitize_context_t *c) const 2146 { 2147 TRACE_SANITIZE (this); 2148 if (!backtrack.sanitize (c, this)) return_trace (false); 2149 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2150 if (!input.sanitize (c, this)) return_trace (false); 2151 if (!input.len) return_trace (false); /* To be consistent with Context. */ 2152 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2153 if (!lookahead.sanitize (c, this)) return_trace (false); 2154 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2155 return_trace (lookup.sanitize (c)); 2156 } 2157 2158 protected: 2159 USHORT format; /* Format identifier--format = 3 */ 2160 OffsetArrayOf<Coverage> 2161 backtrack; /* Array of coverage tables 2162 * in backtracking sequence, in glyph 2163 * sequence order */ 2164 OffsetArrayOf<Coverage> 2165 inputX ; /* Array of coverage 2166 * tables in input sequence, in glyph 2167 * sequence order */ 2168 OffsetArrayOf<Coverage> 2169 lookaheadX; /* Array of coverage tables 2170 * in lookahead sequence, in glyph 2171 * sequence order */ 2172 ArrayOf<LookupRecord> 2173 lookupX; /* Array of LookupRecords--in 2174 * design order) */ 2175 public: 2176 DEFINE_SIZE_MIN (10); 2177 }; 2178 2179 struct ChainContext 2180 { 2181 template <typename context_t> 2182 inline typename context_t::return_t dispatch (context_t *c) const 2183 { 2184 TRACE_DISPATCH (this, u.format); 2185 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2186 switch (u.format) { 2187 case 1: return_trace (c->dispatch (u.format1)); 2188 case 2: return_trace (c->dispatch (u.format2)); 2189 case 3: return_trace (c->dispatch (u.format3)); 2190 default:return_trace (c->default_return_value ()); 2191 } 2192 } 2193 2194 protected: 2195 union { 2196 USHORT format; /* Format identifier */ 2197 ChainContextFormat1 format1; 2198 ChainContextFormat2 format2; 2199 ChainContextFormat3 format3; 2200 } u; 2201 }; 2202 2203 2204 template <typename T> 2205 struct ExtensionFormat1 2206 { 2207 inline unsigned int get_type (void) const { return extensionLookupType; } 2208 2209 template <typename X> 2210 inline const X& get_subtable (void) const 2211 { 2212 unsigned int offset = extensionOffset; 2213 if (unlikely (!offset)) return Null(typename T::LookupSubTable); 2214 return StructAtOffset<typename T::LookupSubTable> (this, offset); 2215 } 2216 2217 template <typename context_t> 2218 inline typename context_t::return_t dispatch (context_t *c) const 2219 { 2220 TRACE_DISPATCH (this, format); 2221 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ()); 2222 return_trace (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ())); 2223 } 2224 2225 /* This is called from may_dispatch() above with hb_sanitize_context_t. */ 2226 inline bool sanitize (hb_sanitize_context_t *c) const 2227 { 2228 TRACE_SANITIZE (this); 2229 return_trace (c->check_struct (this) && extensionOffset != 0); 2230 } 2231 2232 protected: 2233 USHORT format; /* Format identifier. Set to 1. */ 2234 USHORT extensionLookupType; /* Lookup type of subtable referenced 2235 * by ExtensionOffset (i.e. the 2236 * extension subtable). */ 2237 ULONG extensionOffset; /* Offset to the extension subtable, 2238 * of lookup type subtable. */ 2239 public: 2240 DEFINE_SIZE_STATIC (8); 2241 }; 2242 2243 template <typename T> 2244 struct Extension 2245 { 2246 inline unsigned int get_type (void) const 2247 { 2248 switch (u.format) { 2249 case 1: return u.format1.get_type (); 2250 default:return 0; 2251 } 2252 } 2253 template <typename X> 2254 inline const X& get_subtable (void) const 2255 { 2256 switch (u.format) { 2257 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> (); 2258 default:return Null(typename T::LookupSubTable); 2259 } 2260 } 2261 2262 template <typename context_t> 2263 inline typename context_t::return_t dispatch (context_t *c) const 2264 { 2265 TRACE_DISPATCH (this, u.format); 2266 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2267 switch (u.format) { 2268 case 1: return_trace (u.format1.dispatch (c)); 2269 default:return_trace (c->default_return_value ()); 2270 } 2271 } 2272 2273 protected: 2274 union { 2275 USHORT format; /* Format identifier */ 2276 ExtensionFormat1<T> format1; 2277 } u; 2278 }; 2279 2280 2281 /* 2282 * GSUB/GPOS Common 2283 */ 2284 2285 struct GSUBGPOS 2286 { 2287 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 2288 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 2289 2290 inline unsigned int get_script_count (void) const 2291 { return (this+scriptList).len; } 2292 inline const Tag& get_script_tag (unsigned int i) const 2293 { return (this+scriptList).get_tag (i); } 2294 inline unsigned int get_script_tags (unsigned int start_offset, 2295 unsigned int *script_count /* IN/OUT */, 2296 hb_tag_t *script_tags /* OUT */) const 2297 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 2298 inline const Script& get_script (unsigned int i) const 2299 { return (this+scriptList)[i]; } 2300 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 2301 { return (this+scriptList).find_index (tag, index); } 2302 2303 inline unsigned int get_feature_count (void) const 2304 { return (this+featureList).len; } 2305 inline hb_tag_t get_feature_tag (unsigned int i) const 2306 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); } 2307 inline unsigned int get_feature_tags (unsigned int start_offset, 2308 unsigned int *feature_count /* IN/OUT */, 2309 hb_tag_t *feature_tags /* OUT */) const 2310 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 2311 inline const Feature& get_feature (unsigned int i) const 2312 { return (this+featureList)[i]; } 2313 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 2314 { return (this+featureList).find_index (tag, index); } 2315 2316 inline unsigned int get_lookup_count (void) const 2317 { return (this+lookupList).len; } 2318 inline const Lookup& get_lookup (unsigned int i) const 2319 { return (this+lookupList)[i]; } 2320 2321 inline bool find_variations_index (const int *coords, unsigned int num_coords, 2322 unsigned int *index) const 2323 { return (version.to_int () >= 0x00010001u ? this+featureVars : Null(FeatureVariations)) 2324 .find_index (coords, num_coords, index); } 2325 inline const Feature& get_feature_variation (unsigned int feature_index, 2326 unsigned int variations_index) const 2327 { 2328 if (FeatureVariations::NOT_FOUND_INDEX != variations_index && 2329 version.to_int () >= 0x00010001u) 2330 { 2331 const Feature *feature = (this+featureVars).find_substitute (variations_index, 2332 feature_index); 2333 if (feature) 2334 return *feature; 2335 } 2336 return get_feature (feature_index); 2337 } 2338 2339 inline bool sanitize (hb_sanitize_context_t *c) const 2340 { 2341 TRACE_SANITIZE (this); 2342 return_trace (version.sanitize (c) && 2343 likely (version.major == 1) && 2344 scriptList.sanitize (c, this) && 2345 featureList.sanitize (c, this) && 2346 lookupList.sanitize (c, this) && 2347 (version.to_int () < 0x00010001u || featureVars.sanitize (c, this))); 2348 } 2349 2350 protected: 2351 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set 2352 * to 0x00010000u */ 2353 OffsetTo<ScriptList> 2354 scriptList; /* ScriptList table */ 2355 OffsetTo<FeatureList> 2356 featureList; /* FeatureList table */ 2357 OffsetTo<LookupList> 2358 lookupList; /* LookupList table */ 2359 LOffsetTo<FeatureVariations> 2360 featureVars; /* Offset to Feature Variations 2361 table--from beginning of table 2362 * (may be NULL). Introduced 2363 * in version 0x00010001. */ 2364 public: 2365 DEFINE_SIZE_MIN (10); 2366 }; 2367 2368 2369 } /* namespace OT */ 2370 2371 2372 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */