1 /* 2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. 3 * 4 * This code is free software; you can redistribute it and/or modify it 5 * under the terms of the GNU General Public License version 2 only, as 6 * published by the Free Software Foundation. Oracle designates this 7 * particular file as subject to the "Classpath" exception as provided 8 * by Oracle in the LICENSE file that accompanied this code. 9 * 10 * This code is distributed in the hope that it will be useful, but WITHOUT 11 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 12 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 13 * version 2 for more details (a copy is included in the LICENSE file that 14 * accompanied this code). 15 * 16 * You should have received a copy of the GNU General Public License version 17 * 2 along with this work; if not, write to the Free Software Foundation, 18 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. 19 * 20 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA 21 * or visit www.oracle.com if you need additional information or have any 22 * questions. 23 */ 24 25 // This file is available under and governed by the GNU General Public 26 // License version 2 only, as published by the Free Software Foundation. 27 // However, the following notice accompanied the original version of this 28 // file: 29 // 30 /* 31 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 32 * Copyright © 2010,2012 Google, Inc. 33 * 34 * This is part of HarfBuzz, a text shaping library. 35 * 36 * Permission is hereby granted, without written agreement and without 37 * license or royalty fees, to use, copy, modify, and distribute this 38 * software and its documentation for any purpose, provided that the 39 * above copyright notice and the following two paragraphs appear in 40 * all copies of this software. 41 * 42 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 43 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 44 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 45 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 46 * DAMAGE. 47 * 48 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 49 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 50 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 51 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 52 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 53 * 54 * Red Hat Author(s): Behdad Esfahbod 55 * Google Author(s): Behdad Esfahbod 56 */ 57 58 #ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 59 #define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH 60 61 #include "hb-buffer-private.hh" 62 #include "hb-ot-layout-gdef-table.hh" 63 #include "hb-set-private.hh" 64 65 66 namespace OT { 67 68 69 #ifndef HB_DEBUG_CLOSURE 70 #define HB_DEBUG_CLOSURE (HB_DEBUG+0) 71 #endif 72 73 #define TRACE_CLOSURE(this) \ 74 hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \ 75 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 76 ""); 77 78 struct hb_closure_context_t : 79 hb_dispatch_context_t<hb_closure_context_t, hb_void_t, HB_DEBUG_CLOSURE> 80 { 81 inline const char *get_name (void) { return "CLOSURE"; } 82 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index); 83 template <typename T> 84 inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; } 85 static return_t default_return_value (void) { return HB_VOID; } 86 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 87 return_t recurse (unsigned int lookup_index) 88 { 89 if (unlikely (nesting_level_left == 0 || !recurse_func)) 90 return default_return_value (); 91 92 nesting_level_left--; 93 recurse_func (this, lookup_index); 94 nesting_level_left++; 95 return HB_VOID; 96 } 97 98 hb_face_t *face; 99 hb_set_t *glyphs; 100 recurse_func_t recurse_func; 101 unsigned int nesting_level_left; 102 unsigned int debug_depth; 103 104 hb_closure_context_t (hb_face_t *face_, 105 hb_set_t *glyphs_, 106 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 107 face (face_), 108 glyphs (glyphs_), 109 recurse_func (NULL), 110 nesting_level_left (nesting_level_left_), 111 debug_depth (0) {} 112 113 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 114 }; 115 116 117 118 #ifndef HB_DEBUG_WOULD_APPLY 119 #define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0) 120 #endif 121 122 #define TRACE_WOULD_APPLY(this) \ 123 hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \ 124 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 125 "%d glyphs", c->len); 126 127 struct hb_would_apply_context_t : 128 hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY> 129 { 130 inline const char *get_name (void) { return "WOULD_APPLY"; } 131 template <typename T> 132 inline return_t dispatch (const T &obj) { return obj.would_apply (this); } 133 static return_t default_return_value (void) { return false; } 134 bool stop_sublookup_iteration (return_t r) const { return r; } 135 136 hb_face_t *face; 137 const hb_codepoint_t *glyphs; 138 unsigned int len; 139 bool zero_context; 140 unsigned int debug_depth; 141 142 hb_would_apply_context_t (hb_face_t *face_, 143 const hb_codepoint_t *glyphs_, 144 unsigned int len_, 145 bool zero_context_) : 146 face (face_), 147 glyphs (glyphs_), 148 len (len_), 149 zero_context (zero_context_), 150 debug_depth (0) {} 151 }; 152 153 154 155 #ifndef HB_DEBUG_COLLECT_GLYPHS 156 #define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0) 157 #endif 158 159 #define TRACE_COLLECT_GLYPHS(this) \ 160 hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \ 161 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 162 ""); 163 164 struct hb_collect_glyphs_context_t : 165 hb_dispatch_context_t<hb_collect_glyphs_context_t, hb_void_t, HB_DEBUG_COLLECT_GLYPHS> 166 { 167 inline const char *get_name (void) { return "COLLECT_GLYPHS"; } 168 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index); 169 template <typename T> 170 inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; } 171 static return_t default_return_value (void) { return HB_VOID; } 172 bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; } 173 return_t recurse (unsigned int lookup_index) 174 { 175 if (unlikely (nesting_level_left == 0 || !recurse_func)) 176 return default_return_value (); 177 178 /* Note that GPOS sets recurse_func to NULL already, so it doesn't get 179 * past the previous check. For GSUB, we only want to collect the output 180 * glyphs in the recursion. If output is not requested, we can go home now. 181 * 182 * Note further, that the above is not exactly correct. A recursed lookup 183 * is allowed to match input that is not matched in the context, but that's 184 * not how most fonts are built. It's possible to relax that and recurse 185 * with all sets here if it proves to be an issue. 186 */ 187 188 if (output == hb_set_get_empty ()) 189 return HB_VOID; 190 191 /* Return if new lookup was recursed to before. */ 192 if (recursed_lookups.has (lookup_index)) 193 return HB_VOID; 194 195 hb_set_t *old_before = before; 196 hb_set_t *old_input = input; 197 hb_set_t *old_after = after; 198 before = input = after = hb_set_get_empty (); 199 200 nesting_level_left--; 201 recurse_func (this, lookup_index); 202 nesting_level_left++; 203 204 before = old_before; 205 input = old_input; 206 after = old_after; 207 208 recursed_lookups.add (lookup_index); 209 210 return HB_VOID; 211 } 212 213 hb_face_t *face; 214 hb_set_t *before; 215 hb_set_t *input; 216 hb_set_t *after; 217 hb_set_t *output; 218 recurse_func_t recurse_func; 219 hb_set_t recursed_lookups; 220 unsigned int nesting_level_left; 221 unsigned int debug_depth; 222 223 hb_collect_glyphs_context_t (hb_face_t *face_, 224 hb_set_t *glyphs_before, /* OUT. May be NULL */ 225 hb_set_t *glyphs_input, /* OUT. May be NULL */ 226 hb_set_t *glyphs_after, /* OUT. May be NULL */ 227 hb_set_t *glyphs_output, /* OUT. May be NULL */ 228 unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) : 229 face (face_), 230 before (glyphs_before ? glyphs_before : hb_set_get_empty ()), 231 input (glyphs_input ? glyphs_input : hb_set_get_empty ()), 232 after (glyphs_after ? glyphs_after : hb_set_get_empty ()), 233 output (glyphs_output ? glyphs_output : hb_set_get_empty ()), 234 recurse_func (NULL), 235 recursed_lookups (), 236 nesting_level_left (nesting_level_left_), 237 debug_depth (0) 238 { 239 recursed_lookups.init (); 240 } 241 ~hb_collect_glyphs_context_t (void) 242 { 243 recursed_lookups.fini (); 244 } 245 246 void set_recurse_func (recurse_func_t func) { recurse_func = func; } 247 }; 248 249 250 251 #ifndef HB_DEBUG_GET_COVERAGE 252 #define HB_DEBUG_GET_COVERAGE (HB_DEBUG+0) 253 #endif 254 255 /* XXX Can we remove this? */ 256 257 template <typename set_t> 258 struct hb_add_coverage_context_t : 259 hb_dispatch_context_t<hb_add_coverage_context_t<set_t>, const Coverage &, HB_DEBUG_GET_COVERAGE> 260 { 261 inline const char *get_name (void) { return "GET_COVERAGE"; } 262 typedef const Coverage &return_t; 263 template <typename T> 264 inline return_t dispatch (const T &obj) { return obj.get_coverage (); } 265 static return_t default_return_value (void) { return Null(Coverage); } 266 bool stop_sublookup_iteration (return_t r) const 267 { 268 r.add_coverage (set); 269 return false; 270 } 271 272 hb_add_coverage_context_t (set_t *set_) : 273 set (set_), 274 debug_depth (0) {} 275 276 set_t *set; 277 unsigned int debug_depth; 278 }; 279 280 281 282 #ifndef HB_DEBUG_APPLY 283 #define HB_DEBUG_APPLY (HB_DEBUG+0) 284 #endif 285 286 #define TRACE_APPLY(this) \ 287 hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \ 288 (&c->debug_depth, c->get_name (), this, HB_FUNC, \ 289 "idx %d gid %u lookup %d", \ 290 c->buffer->idx, c->buffer->cur().codepoint, (int) c->lookup_index); 291 292 struct hb_apply_context_t : 293 hb_dispatch_context_t<hb_apply_context_t, bool, HB_DEBUG_APPLY> 294 { 295 struct matcher_t 296 { 297 inline matcher_t (void) : 298 lookup_props (0), 299 ignore_zwnj (false), 300 ignore_zwj (false), 301 mask (-1), 302 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */ 303 syllable arg1(0), 304 #undef arg1 305 match_func (NULL), 306 match_data (NULL) {}; 307 308 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 309 310 inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; } 311 inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; } 312 inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; } 313 inline void set_mask (hb_mask_t mask_) { mask = mask_; } 314 inline void set_syllable (uint8_t syllable_) { syllable = syllable_; } 315 inline void set_match_func (match_func_t match_func_, 316 const void *match_data_) 317 { match_func = match_func_; match_data = match_data_; } 318 319 enum may_match_t { 320 MATCH_NO, 321 MATCH_YES, 322 MATCH_MAYBE 323 }; 324 325 inline may_match_t may_match (const hb_glyph_info_t &info, 326 const USHORT *glyph_data) const 327 { 328 if (!(info.mask & mask) || 329 (syllable && syllable != info.syllable ())) 330 return MATCH_NO; 331 332 if (match_func) 333 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO; 334 335 return MATCH_MAYBE; 336 } 337 338 enum may_skip_t { 339 SKIP_NO, 340 SKIP_YES, 341 SKIP_MAYBE 342 }; 343 344 inline may_skip_t 345 may_skip (const hb_apply_context_t *c, 346 const hb_glyph_info_t &info) const 347 { 348 if (!c->check_glyph_property (&info, lookup_props)) 349 return SKIP_YES; 350 351 if (unlikely (_hb_glyph_info_is_default_ignorable (&info) && 352 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) && 353 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)))) 354 return SKIP_MAYBE; 355 356 return SKIP_NO; 357 } 358 359 protected: 360 unsigned int lookup_props; 361 bool ignore_zwnj; 362 bool ignore_zwj; 363 hb_mask_t mask; 364 uint8_t syllable; 365 match_func_t match_func; 366 const void *match_data; 367 }; 368 369 struct skipping_iterator_t 370 { 371 inline void init (hb_apply_context_t *c_, bool context_match = false) 372 { 373 c = c_; 374 match_glyph_data = NULL, 375 matcher.set_match_func (NULL, NULL); 376 matcher.set_lookup_props (c->lookup_props); 377 /* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */ 378 matcher.set_ignore_zwnj (context_match || c->table_index == 1); 379 /* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */ 380 matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj); 381 matcher.set_mask (context_match ? -1 : c->lookup_mask); 382 } 383 inline void set_lookup_props (unsigned int lookup_props) 384 { 385 matcher.set_lookup_props (lookup_props); 386 } 387 inline void set_match_func (matcher_t::match_func_t match_func, 388 const void *match_data, 389 const USHORT glyph_data[]) 390 { 391 matcher.set_match_func (match_func, match_data); 392 match_glyph_data = glyph_data; 393 } 394 395 inline void reset (unsigned int start_index_, 396 unsigned int num_items_) 397 { 398 idx = start_index_; 399 num_items = num_items_; 400 end = c->buffer->len; 401 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0); 402 } 403 404 inline void reject (void) { num_items++; match_glyph_data--; } 405 406 inline bool next (void) 407 { 408 assert (num_items > 0); 409 while (idx + num_items < end) 410 { 411 idx++; 412 const hb_glyph_info_t &info = c->buffer->info[idx]; 413 414 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 415 if (unlikely (skip == matcher_t::SKIP_YES)) 416 continue; 417 418 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 419 if (match == matcher_t::MATCH_YES || 420 (match == matcher_t::MATCH_MAYBE && 421 skip == matcher_t::SKIP_NO)) 422 { 423 num_items--; 424 match_glyph_data++; 425 return true; 426 } 427 428 if (skip == matcher_t::SKIP_NO) 429 return false; 430 } 431 return false; 432 } 433 inline bool prev (void) 434 { 435 assert (num_items > 0); 436 while (idx >= num_items) 437 { 438 idx--; 439 const hb_glyph_info_t &info = c->buffer->out_info[idx]; 440 441 matcher_t::may_skip_t skip = matcher.may_skip (c, info); 442 if (unlikely (skip == matcher_t::SKIP_YES)) 443 continue; 444 445 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data); 446 if (match == matcher_t::MATCH_YES || 447 (match == matcher_t::MATCH_MAYBE && 448 skip == matcher_t::SKIP_NO)) 449 { 450 num_items--; 451 match_glyph_data++; 452 return true; 453 } 454 455 if (skip == matcher_t::SKIP_NO) 456 return false; 457 } 458 return false; 459 } 460 461 unsigned int idx; 462 protected: 463 hb_apply_context_t *c; 464 matcher_t matcher; 465 const USHORT *match_glyph_data; 466 467 unsigned int num_items; 468 unsigned int end; 469 }; 470 471 472 inline const char *get_name (void) { return "APPLY"; } 473 typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index); 474 template <typename T> 475 inline return_t dispatch (const T &obj) { return obj.apply (this); } 476 static return_t default_return_value (void) { return false; } 477 bool stop_sublookup_iteration (return_t r) const { return r; } 478 return_t recurse (unsigned int lookup_index) 479 { 480 if (unlikely (nesting_level_left == 0 || !recurse_func)) 481 return default_return_value (); 482 483 nesting_level_left--; 484 bool ret = recurse_func (this, lookup_index); 485 nesting_level_left++; 486 return ret; 487 } 488 489 unsigned int table_index; /* GSUB/GPOS */ 490 hb_font_t *font; 491 hb_face_t *face; 492 hb_buffer_t *buffer; 493 hb_direction_t direction; 494 hb_mask_t lookup_mask; 495 bool auto_zwj; 496 recurse_func_t recurse_func; 497 unsigned int nesting_level_left; 498 unsigned int lookup_props; 499 const GDEF &gdef; 500 bool has_glyph_classes; 501 skipping_iterator_t iter_input, iter_context; 502 unsigned int lookup_index; 503 unsigned int debug_depth; 504 505 506 hb_apply_context_t (unsigned int table_index_, 507 hb_font_t *font_, 508 hb_buffer_t *buffer_) : 509 table_index (table_index_), 510 font (font_), face (font->face), buffer (buffer_), 511 direction (buffer_->props.direction), 512 lookup_mask (1), 513 auto_zwj (true), 514 recurse_func (NULL), 515 nesting_level_left (MAX_NESTING_LEVEL), 516 lookup_props (0), 517 gdef (*hb_ot_layout_from_face (face)->gdef), 518 has_glyph_classes (gdef.has_glyph_classes ()), 519 iter_input (), 520 iter_context (), 521 lookup_index ((unsigned int) -1), 522 debug_depth (0) {} 523 524 inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; } 525 inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; } 526 inline void set_recurse_func (recurse_func_t func) { recurse_func = func; } 527 inline void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; } 528 inline void set_lookup_props (unsigned int lookup_props_) 529 { 530 lookup_props = lookup_props_; 531 iter_input.init (this, false); 532 iter_context.init (this, true); 533 } 534 535 inline bool 536 match_properties_mark (hb_codepoint_t glyph, 537 unsigned int glyph_props, 538 unsigned int match_props) const 539 { 540 /* If using mark filtering sets, the high short of 541 * match_props has the set index. 542 */ 543 if (match_props & LookupFlag::UseMarkFilteringSet) 544 return gdef.mark_set_covers (match_props >> 16, glyph); 545 546 /* The second byte of match_props has the meaning 547 * "ignore marks of attachment type different than 548 * the attachment type specified." 549 */ 550 if (match_props & LookupFlag::MarkAttachmentType) 551 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType); 552 553 return true; 554 } 555 556 inline bool 557 check_glyph_property (const hb_glyph_info_t *info, 558 unsigned int match_props) const 559 { 560 hb_codepoint_t glyph = info->codepoint; 561 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info); 562 563 /* Not covered, if, for example, glyph class is ligature and 564 * match_props includes LookupFlags::IgnoreLigatures 565 */ 566 if (glyph_props & match_props & LookupFlag::IgnoreFlags) 567 return false; 568 569 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK)) 570 return match_properties_mark (glyph, glyph_props, match_props); 571 572 return true; 573 } 574 575 inline void _set_glyph_props (hb_codepoint_t glyph_index, 576 unsigned int class_guess = 0, 577 bool ligature = false, 578 bool component = false) const 579 { 580 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) & 581 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE; 582 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED; 583 if (ligature) 584 { 585 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED; 586 /* In the only place that the MULTIPLIED bit is used, Uniscribe 587 * seems to only care about the "last" transformation between 588 * Ligature and Multiple substitions. Ie. if you ligate, expand, 589 * and ligate again, it forgives the multiplication and acts as 590 * if only ligation happened. As such, clear MULTIPLIED bit. 591 */ 592 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 593 } 594 if (component) 595 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED; 596 if (likely (has_glyph_classes)) 597 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index)); 598 else if (class_guess) 599 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess); 600 } 601 602 inline void replace_glyph (hb_codepoint_t glyph_index) const 603 { 604 _set_glyph_props (glyph_index); 605 buffer->replace_glyph (glyph_index); 606 } 607 inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const 608 { 609 _set_glyph_props (glyph_index); 610 buffer->cur().codepoint = glyph_index; 611 } 612 inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index, 613 unsigned int class_guess) const 614 { 615 _set_glyph_props (glyph_index, class_guess, true); 616 buffer->replace_glyph (glyph_index); 617 } 618 inline void output_glyph_for_component (hb_codepoint_t glyph_index, 619 unsigned int class_guess) const 620 { 621 _set_glyph_props (glyph_index, class_guess, false, true); 622 buffer->output_glyph (glyph_index); 623 } 624 }; 625 626 627 628 typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 629 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data); 630 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data); 631 632 struct ContextClosureFuncs 633 { 634 intersects_func_t intersects; 635 }; 636 struct ContextCollectGlyphsFuncs 637 { 638 collect_glyphs_func_t collect; 639 }; 640 struct ContextApplyFuncs 641 { 642 match_func_t match; 643 }; 644 645 646 static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 647 { 648 return glyphs->has (value); 649 } 650 static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data) 651 { 652 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 653 return class_def.intersects_class (glyphs, value); 654 } 655 static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 656 { 657 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 658 return (data+coverage).intersects (glyphs); 659 } 660 661 static inline bool intersects_array (hb_closure_context_t *c, 662 unsigned int count, 663 const USHORT values[], 664 intersects_func_t intersects_func, 665 const void *intersects_data) 666 { 667 for (unsigned int i = 0; i < count; i++) 668 if (likely (!intersects_func (c->glyphs, values[i], intersects_data))) 669 return false; 670 return true; 671 } 672 673 674 static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED) 675 { 676 glyphs->add (value); 677 } 678 static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data) 679 { 680 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 681 class_def.add_class (glyphs, value); 682 } 683 static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data) 684 { 685 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 686 (data+coverage).add_coverage (glyphs); 687 } 688 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED, 689 hb_set_t *glyphs, 690 unsigned int count, 691 const USHORT values[], 692 collect_glyphs_func_t collect_func, 693 const void *collect_data) 694 { 695 for (unsigned int i = 0; i < count; i++) 696 collect_func (glyphs, values[i], collect_data); 697 } 698 699 700 static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED) 701 { 702 return glyph_id == value; 703 } 704 static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 705 { 706 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data); 707 return class_def.get_class (glyph_id) == value; 708 } 709 static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data) 710 { 711 const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value; 712 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED; 713 } 714 715 static inline bool would_match_input (hb_would_apply_context_t *c, 716 unsigned int count, /* Including the first glyph (not matched) */ 717 const USHORT input[], /* Array of input values--start with second glyph */ 718 match_func_t match_func, 719 const void *match_data) 720 { 721 if (count != c->len) 722 return false; 723 724 for (unsigned int i = 1; i < count; i++) 725 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data))) 726 return false; 727 728 return true; 729 } 730 static inline bool match_input (hb_apply_context_t *c, 731 unsigned int count, /* Including the first glyph (not matched) */ 732 const USHORT input[], /* Array of input values--start with second glyph */ 733 match_func_t match_func, 734 const void *match_data, 735 unsigned int *end_offset, 736 unsigned int match_positions[MAX_CONTEXT_LENGTH], 737 bool *p_is_mark_ligature = NULL, 738 unsigned int *p_total_component_count = NULL) 739 { 740 TRACE_APPLY (NULL); 741 742 if (unlikely (count > MAX_CONTEXT_LENGTH)) return_trace (false); 743 744 hb_buffer_t *buffer = c->buffer; 745 746 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input; 747 skippy_iter.reset (buffer->idx, count - 1); 748 skippy_iter.set_match_func (match_func, match_data, input); 749 750 /* 751 * This is perhaps the trickiest part of OpenType... Remarks: 752 * 753 * - If all components of the ligature were marks, we call this a mark ligature. 754 * 755 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize 756 * it as a ligature glyph. 757 * 758 * - Ligatures cannot be formed across glyphs attached to different components 759 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and 760 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother. 761 * However, it would be wrong to ligate that SHADDA,FATHA sequence.o 762 * There is an exception to this: If a ligature tries ligating with marks that 763 * belong to it itself, go ahead, assuming that the font designer knows what 764 * they are doing (otherwise it can break Indic stuff when a matra wants to 765 * ligate with a conjunct...) 766 */ 767 768 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur()); 769 770 unsigned int total_component_count = 0; 771 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 772 773 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 774 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur()); 775 776 match_positions[0] = buffer->idx; 777 for (unsigned int i = 1; i < count; i++) 778 { 779 if (!skippy_iter.next ()) return_trace (false); 780 781 match_positions[i] = skippy_iter.idx; 782 783 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]); 784 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]); 785 786 if (first_lig_id && first_lig_comp) { 787 /* If first component was attached to a previous ligature component, 788 * all subsequent components should be attached to the same ligature 789 * component, otherwise we shouldn't ligate them. */ 790 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp) 791 return_trace (false); 792 } else { 793 /* If first component was NOT attached to a previous ligature component, 794 * all subsequent components should also NOT be attached to any ligature 795 * component, unless they are attached to the first component itself! */ 796 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id)) 797 return_trace (false); 798 } 799 800 is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]); 801 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]); 802 } 803 804 *end_offset = skippy_iter.idx - buffer->idx + 1; 805 806 if (p_is_mark_ligature) 807 *p_is_mark_ligature = is_mark_ligature; 808 809 if (p_total_component_count) 810 *p_total_component_count = total_component_count; 811 812 return_trace (true); 813 } 814 static inline bool ligate_input (hb_apply_context_t *c, 815 unsigned int count, /* Including the first glyph */ 816 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */ 817 unsigned int match_length, 818 hb_codepoint_t lig_glyph, 819 bool is_mark_ligature, 820 unsigned int total_component_count) 821 { 822 TRACE_APPLY (NULL); 823 824 hb_buffer_t *buffer = c->buffer; 825 826 buffer->merge_clusters (buffer->idx, buffer->idx + match_length); 827 828 /* 829 * - If it *is* a mark ligature, we don't allocate a new ligature id, and leave 830 * the ligature to keep its old ligature id. This will allow it to attach to 831 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH, 832 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a 833 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature 834 * later, we don't want them to lose their ligature id/component, otherwise 835 * GPOS will fail to correctly position the mark ligature on top of the 836 * LAM,LAM,HEH ligature. See: 837 * https://bugzilla.gnome.org/show_bug.cgi?id=676343 838 * 839 * - If a ligature is formed of components that some of which are also ligatures 840 * themselves, and those ligature components had marks attached to *their* 841 * components, we have to attach the marks to the new ligature component 842 * positions! Now *that*'s tricky! And these marks may be following the 843 * last component of the whole sequence, so we should loop forward looking 844 * for them and update them. 845 * 846 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a 847 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature 848 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature 849 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to 850 * the new ligature with a component value of 2. 851 * 852 * This in fact happened to a font... See: 853 * https://bugzilla.gnome.org/show_bug.cgi?id=437633 854 */ 855 856 unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE; 857 unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer); 858 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 859 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 860 unsigned int components_so_far = last_num_components; 861 862 if (!is_mark_ligature) 863 { 864 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count); 865 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK) 866 { 867 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER); 868 _hb_glyph_info_set_modified_combining_class (&buffer->cur(), 0); 869 } 870 } 871 c->replace_glyph_with_ligature (lig_glyph, klass); 872 873 for (unsigned int i = 1; i < count; i++) 874 { 875 while (buffer->idx < match_positions[i]) 876 { 877 if (!is_mark_ligature) { 878 unsigned int new_lig_comp = components_so_far - last_num_components + 879 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->cur()), 1u), last_num_components); 880 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp); 881 } 882 buffer->next_glyph (); 883 } 884 885 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur()); 886 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur()); 887 components_so_far += last_num_components; 888 889 /* Skip the base glyph */ 890 buffer->idx++; 891 } 892 893 if (!is_mark_ligature && last_lig_id) { 894 /* Re-adjust components for any marks following. */ 895 for (unsigned int i = buffer->idx; i < buffer->len; i++) { 896 if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) { 897 unsigned int new_lig_comp = components_so_far - last_num_components + 898 MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->info[i]), 1u), last_num_components); 899 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp); 900 } else 901 break; 902 } 903 } 904 return_trace (true); 905 } 906 907 static inline bool match_backtrack (hb_apply_context_t *c, 908 unsigned int count, 909 const USHORT backtrack[], 910 match_func_t match_func, 911 const void *match_data) 912 { 913 TRACE_APPLY (NULL); 914 915 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 916 skippy_iter.reset (c->buffer->backtrack_len (), count); 917 skippy_iter.set_match_func (match_func, match_data, backtrack); 918 919 for (unsigned int i = 0; i < count; i++) 920 if (!skippy_iter.prev ()) 921 return_trace (false); 922 923 return_trace (true); 924 } 925 926 static inline bool match_lookahead (hb_apply_context_t *c, 927 unsigned int count, 928 const USHORT lookahead[], 929 match_func_t match_func, 930 const void *match_data, 931 unsigned int offset) 932 { 933 TRACE_APPLY (NULL); 934 935 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context; 936 skippy_iter.reset (c->buffer->idx + offset - 1, count); 937 skippy_iter.set_match_func (match_func, match_data, lookahead); 938 939 for (unsigned int i = 0; i < count; i++) 940 if (!skippy_iter.next ()) 941 return_trace (false); 942 943 return_trace (true); 944 } 945 946 947 948 struct LookupRecord 949 { 950 inline bool sanitize (hb_sanitize_context_t *c) const 951 { 952 TRACE_SANITIZE (this); 953 return_trace (c->check_struct (this)); 954 } 955 956 USHORT sequenceIndex; /* Index into current glyph 957 * sequence--first glyph = 0 */ 958 USHORT lookupListIndex; /* Lookup to apply to that 959 * position--zero--based */ 960 public: 961 DEFINE_SIZE_STATIC (4); 962 }; 963 964 965 template <typename context_t> 966 static inline void recurse_lookups (context_t *c, 967 unsigned int lookupCount, 968 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */) 969 { 970 for (unsigned int i = 0; i < lookupCount; i++) 971 c->recurse (lookupRecord[i].lookupListIndex); 972 } 973 974 static inline bool apply_lookup (hb_apply_context_t *c, 975 unsigned int count, /* Including the first glyph */ 976 unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */ 977 unsigned int lookupCount, 978 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */ 979 unsigned int match_length) 980 { 981 TRACE_APPLY (NULL); 982 983 hb_buffer_t *buffer = c->buffer; 984 unsigned int end; 985 986 /* All positions are distance from beginning of *output* buffer. 987 * Adjust. */ 988 { 989 unsigned int bl = buffer->backtrack_len (); 990 end = bl + match_length; 991 992 int delta = bl - buffer->idx; 993 /* Convert positions to new indexing. */ 994 for (unsigned int j = 0; j < count; j++) 995 match_positions[j] += delta; 996 } 997 998 for (unsigned int i = 0; i < lookupCount; i++) 999 { 1000 unsigned int idx = lookupRecord[i].sequenceIndex; 1001 if (idx >= count) 1002 continue; 1003 1004 buffer->move_to (match_positions[idx]); 1005 1006 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len (); 1007 if (!c->recurse (lookupRecord[i].lookupListIndex)) 1008 continue; 1009 1010 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len (); 1011 int delta = new_len - orig_len; 1012 1013 if (!delta) 1014 continue; 1015 1016 /* Recursed lookup changed buffer len. Adjust. */ 1017 1018 /* end can't go back past the current match position. 1019 * Note: this is only true because we do NOT allow MultipleSubst 1020 * with zero sequence len. */ 1021 end = MAX ((int) match_positions[idx] + 1, int (end) + delta); 1022 1023 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */ 1024 1025 if (delta > 0) 1026 { 1027 if (unlikely (delta + count > MAX_CONTEXT_LENGTH)) 1028 break; 1029 } 1030 else 1031 { 1032 /* NOTE: delta is negative. */ 1033 delta = MAX (delta, (int) next - (int) count); 1034 next -= delta; 1035 } 1036 1037 /* Shift! */ 1038 memmove (match_positions + next + delta, match_positions + next, 1039 (count - next) * sizeof (match_positions[0])); 1040 next += delta; 1041 count += delta; 1042 1043 /* Fill in new entries. */ 1044 for (unsigned int j = idx + 1; j < next; j++) 1045 match_positions[j] = match_positions[j - 1] + 1; 1046 1047 /* And fixup the rest. */ 1048 for (; next < count; next++) 1049 match_positions[next] += delta; 1050 } 1051 1052 buffer->move_to (end); 1053 1054 return_trace (true); 1055 } 1056 1057 1058 1059 /* Contextual lookups */ 1060 1061 struct ContextClosureLookupContext 1062 { 1063 ContextClosureFuncs funcs; 1064 const void *intersects_data; 1065 }; 1066 1067 struct ContextCollectGlyphsLookupContext 1068 { 1069 ContextCollectGlyphsFuncs funcs; 1070 const void *collect_data; 1071 }; 1072 1073 struct ContextApplyLookupContext 1074 { 1075 ContextApplyFuncs funcs; 1076 const void *match_data; 1077 }; 1078 1079 static inline void context_closure_lookup (hb_closure_context_t *c, 1080 unsigned int inputCount, /* Including the first glyph (not matched) */ 1081 const USHORT input[], /* Array of input values--start with second glyph */ 1082 unsigned int lookupCount, 1083 const LookupRecord lookupRecord[], 1084 ContextClosureLookupContext &lookup_context) 1085 { 1086 if (intersects_array (c, 1087 inputCount ? inputCount - 1 : 0, input, 1088 lookup_context.funcs.intersects, lookup_context.intersects_data)) 1089 recurse_lookups (c, 1090 lookupCount, lookupRecord); 1091 } 1092 1093 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1094 unsigned int inputCount, /* Including the first glyph (not matched) */ 1095 const USHORT input[], /* Array of input values--start with second glyph */ 1096 unsigned int lookupCount, 1097 const LookupRecord lookupRecord[], 1098 ContextCollectGlyphsLookupContext &lookup_context) 1099 { 1100 collect_array (c, c->input, 1101 inputCount ? inputCount - 1 : 0, input, 1102 lookup_context.funcs.collect, lookup_context.collect_data); 1103 recurse_lookups (c, 1104 lookupCount, lookupRecord); 1105 } 1106 1107 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c, 1108 unsigned int inputCount, /* Including the first glyph (not matched) */ 1109 const USHORT input[], /* Array of input values--start with second glyph */ 1110 unsigned int lookupCount HB_UNUSED, 1111 const LookupRecord lookupRecord[] HB_UNUSED, 1112 ContextApplyLookupContext &lookup_context) 1113 { 1114 return would_match_input (c, 1115 inputCount, input, 1116 lookup_context.funcs.match, lookup_context.match_data); 1117 } 1118 static inline bool context_apply_lookup (hb_apply_context_t *c, 1119 unsigned int inputCount, /* Including the first glyph (not matched) */ 1120 const USHORT input[], /* Array of input values--start with second glyph */ 1121 unsigned int lookupCount, 1122 const LookupRecord lookupRecord[], 1123 ContextApplyLookupContext &lookup_context) 1124 { 1125 unsigned int match_length = 0; 1126 unsigned int match_positions[MAX_CONTEXT_LENGTH]; 1127 return match_input (c, 1128 inputCount, input, 1129 lookup_context.funcs.match, lookup_context.match_data, 1130 &match_length, match_positions) 1131 && apply_lookup (c, 1132 inputCount, match_positions, 1133 lookupCount, lookupRecord, 1134 match_length); 1135 } 1136 1137 struct Rule 1138 { 1139 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1140 { 1141 TRACE_CLOSURE (this); 1142 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1143 context_closure_lookup (c, 1144 inputCount, inputZ, 1145 lookupCount, lookupRecord, 1146 lookup_context); 1147 } 1148 1149 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1150 { 1151 TRACE_COLLECT_GLYPHS (this); 1152 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1153 context_collect_glyphs_lookup (c, 1154 inputCount, inputZ, 1155 lookupCount, lookupRecord, 1156 lookup_context); 1157 } 1158 1159 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1160 { 1161 TRACE_WOULD_APPLY (this); 1162 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1163 return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1164 } 1165 1166 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1167 { 1168 TRACE_APPLY (this); 1169 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0)); 1170 return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context)); 1171 } 1172 1173 public: 1174 inline bool sanitize (hb_sanitize_context_t *c) const 1175 { 1176 TRACE_SANITIZE (this); 1177 return inputCount.sanitize (c) 1178 && lookupCount.sanitize (c) 1179 && c->check_range (inputZ, 1180 inputZ[0].static_size * inputCount 1181 + lookupRecordX[0].static_size * lookupCount); 1182 } 1183 1184 protected: 1185 USHORT inputCount; /* Total number of glyphs in input 1186 * glyph sequence--includes the first 1187 * glyph */ 1188 USHORT lookupCount; /* Number of LookupRecords */ 1189 USHORT inputZ[VAR]; /* Array of match inputs--start with 1190 * second glyph */ 1191 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1192 * design order */ 1193 public: 1194 DEFINE_SIZE_ARRAY2 (4, inputZ, lookupRecordX); 1195 }; 1196 1197 struct RuleSet 1198 { 1199 inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const 1200 { 1201 TRACE_CLOSURE (this); 1202 unsigned int num_rules = rule.len; 1203 for (unsigned int i = 0; i < num_rules; i++) 1204 (this+rule[i]).closure (c, lookup_context); 1205 } 1206 1207 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const 1208 { 1209 TRACE_COLLECT_GLYPHS (this); 1210 unsigned int num_rules = rule.len; 1211 for (unsigned int i = 0; i < num_rules; i++) 1212 (this+rule[i]).collect_glyphs (c, lookup_context); 1213 } 1214 1215 inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1216 { 1217 TRACE_WOULD_APPLY (this); 1218 unsigned int num_rules = rule.len; 1219 for (unsigned int i = 0; i < num_rules; i++) 1220 { 1221 if ((this+rule[i]).would_apply (c, lookup_context)) 1222 return_trace (true); 1223 } 1224 return_trace (false); 1225 } 1226 1227 inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const 1228 { 1229 TRACE_APPLY (this); 1230 unsigned int num_rules = rule.len; 1231 for (unsigned int i = 0; i < num_rules; i++) 1232 { 1233 if ((this+rule[i]).apply (c, lookup_context)) 1234 return_trace (true); 1235 } 1236 return_trace (false); 1237 } 1238 1239 inline bool sanitize (hb_sanitize_context_t *c) const 1240 { 1241 TRACE_SANITIZE (this); 1242 return_trace (rule.sanitize (c, this)); 1243 } 1244 1245 protected: 1246 OffsetArrayOf<Rule> 1247 rule; /* Array of Rule tables 1248 * ordered by preference */ 1249 public: 1250 DEFINE_SIZE_ARRAY (2, rule); 1251 }; 1252 1253 1254 struct ContextFormat1 1255 { 1256 inline void closure (hb_closure_context_t *c) const 1257 { 1258 TRACE_CLOSURE (this); 1259 1260 const Coverage &cov = (this+coverage); 1261 1262 struct ContextClosureLookupContext lookup_context = { 1263 {intersects_glyph}, 1264 NULL 1265 }; 1266 1267 unsigned int count = ruleSet.len; 1268 for (unsigned int i = 0; i < count; i++) 1269 if (cov.intersects_coverage (c->glyphs, i)) { 1270 const RuleSet &rule_set = this+ruleSet[i]; 1271 rule_set.closure (c, lookup_context); 1272 } 1273 } 1274 1275 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1276 { 1277 TRACE_COLLECT_GLYPHS (this); 1278 (this+coverage).add_coverage (c->input); 1279 1280 struct ContextCollectGlyphsLookupContext lookup_context = { 1281 {collect_glyph}, 1282 NULL 1283 }; 1284 1285 unsigned int count = ruleSet.len; 1286 for (unsigned int i = 0; i < count; i++) 1287 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1288 } 1289 1290 inline bool would_apply (hb_would_apply_context_t *c) const 1291 { 1292 TRACE_WOULD_APPLY (this); 1293 1294 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1295 struct ContextApplyLookupContext lookup_context = { 1296 {match_glyph}, 1297 NULL 1298 }; 1299 return_trace (rule_set.would_apply (c, lookup_context)); 1300 } 1301 1302 inline const Coverage &get_coverage (void) const 1303 { 1304 return this+coverage; 1305 } 1306 1307 inline bool apply (hb_apply_context_t *c) const 1308 { 1309 TRACE_APPLY (this); 1310 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1311 if (likely (index == NOT_COVERED)) 1312 return_trace (false); 1313 1314 const RuleSet &rule_set = this+ruleSet[index]; 1315 struct ContextApplyLookupContext lookup_context = { 1316 {match_glyph}, 1317 NULL 1318 }; 1319 return_trace (rule_set.apply (c, lookup_context)); 1320 } 1321 1322 inline bool sanitize (hb_sanitize_context_t *c) const 1323 { 1324 TRACE_SANITIZE (this); 1325 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1326 } 1327 1328 protected: 1329 USHORT format; /* Format identifier--format = 1 */ 1330 OffsetTo<Coverage> 1331 coverage; /* Offset to Coverage table--from 1332 * beginning of table */ 1333 OffsetArrayOf<RuleSet> 1334 ruleSet; /* Array of RuleSet tables 1335 * ordered by Coverage Index */ 1336 public: 1337 DEFINE_SIZE_ARRAY (6, ruleSet); 1338 }; 1339 1340 1341 struct ContextFormat2 1342 { 1343 inline void closure (hb_closure_context_t *c) const 1344 { 1345 TRACE_CLOSURE (this); 1346 if (!(this+coverage).intersects (c->glyphs)) 1347 return; 1348 1349 const ClassDef &class_def = this+classDef; 1350 1351 struct ContextClosureLookupContext lookup_context = { 1352 {intersects_class}, 1353 &class_def 1354 }; 1355 1356 unsigned int count = ruleSet.len; 1357 for (unsigned int i = 0; i < count; i++) 1358 if (class_def.intersects_class (c->glyphs, i)) { 1359 const RuleSet &rule_set = this+ruleSet[i]; 1360 rule_set.closure (c, lookup_context); 1361 } 1362 } 1363 1364 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1365 { 1366 TRACE_COLLECT_GLYPHS (this); 1367 (this+coverage).add_coverage (c->input); 1368 1369 const ClassDef &class_def = this+classDef; 1370 struct ContextCollectGlyphsLookupContext lookup_context = { 1371 {collect_class}, 1372 &class_def 1373 }; 1374 1375 unsigned int count = ruleSet.len; 1376 for (unsigned int i = 0; i < count; i++) 1377 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1378 } 1379 1380 inline bool would_apply (hb_would_apply_context_t *c) const 1381 { 1382 TRACE_WOULD_APPLY (this); 1383 1384 const ClassDef &class_def = this+classDef; 1385 unsigned int index = class_def.get_class (c->glyphs[0]); 1386 const RuleSet &rule_set = this+ruleSet[index]; 1387 struct ContextApplyLookupContext lookup_context = { 1388 {match_class}, 1389 &class_def 1390 }; 1391 return_trace (rule_set.would_apply (c, lookup_context)); 1392 } 1393 1394 inline const Coverage &get_coverage (void) const 1395 { 1396 return this+coverage; 1397 } 1398 1399 inline bool apply (hb_apply_context_t *c) const 1400 { 1401 TRACE_APPLY (this); 1402 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1403 if (likely (index == NOT_COVERED)) return_trace (false); 1404 1405 const ClassDef &class_def = this+classDef; 1406 index = class_def.get_class (c->buffer->cur().codepoint); 1407 const RuleSet &rule_set = this+ruleSet[index]; 1408 struct ContextApplyLookupContext lookup_context = { 1409 {match_class}, 1410 &class_def 1411 }; 1412 return_trace (rule_set.apply (c, lookup_context)); 1413 } 1414 1415 inline bool sanitize (hb_sanitize_context_t *c) const 1416 { 1417 TRACE_SANITIZE (this); 1418 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this)); 1419 } 1420 1421 protected: 1422 USHORT format; /* Format identifier--format = 2 */ 1423 OffsetTo<Coverage> 1424 coverage; /* Offset to Coverage table--from 1425 * beginning of table */ 1426 OffsetTo<ClassDef> 1427 classDef; /* Offset to glyph ClassDef table--from 1428 * beginning of table */ 1429 OffsetArrayOf<RuleSet> 1430 ruleSet; /* Array of RuleSet tables 1431 * ordered by class */ 1432 public: 1433 DEFINE_SIZE_ARRAY (8, ruleSet); 1434 }; 1435 1436 1437 struct ContextFormat3 1438 { 1439 inline void closure (hb_closure_context_t *c) const 1440 { 1441 TRACE_CLOSURE (this); 1442 if (!(this+coverageZ[0]).intersects (c->glyphs)) 1443 return; 1444 1445 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1446 struct ContextClosureLookupContext lookup_context = { 1447 {intersects_coverage}, 1448 this 1449 }; 1450 context_closure_lookup (c, 1451 glyphCount, (const USHORT *) (coverageZ + 1), 1452 lookupCount, lookupRecord, 1453 lookup_context); 1454 } 1455 1456 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1457 { 1458 TRACE_COLLECT_GLYPHS (this); 1459 (this+coverageZ[0]).add_coverage (c->input); 1460 1461 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1462 struct ContextCollectGlyphsLookupContext lookup_context = { 1463 {collect_coverage}, 1464 this 1465 }; 1466 1467 context_collect_glyphs_lookup (c, 1468 glyphCount, (const USHORT *) (coverageZ + 1), 1469 lookupCount, lookupRecord, 1470 lookup_context); 1471 } 1472 1473 inline bool would_apply (hb_would_apply_context_t *c) const 1474 { 1475 TRACE_WOULD_APPLY (this); 1476 1477 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1478 struct ContextApplyLookupContext lookup_context = { 1479 {match_coverage}, 1480 this 1481 }; 1482 return_trace (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1483 } 1484 1485 inline const Coverage &get_coverage (void) const 1486 { 1487 return this+coverageZ[0]; 1488 } 1489 1490 inline bool apply (hb_apply_context_t *c) const 1491 { 1492 TRACE_APPLY (this); 1493 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint); 1494 if (likely (index == NOT_COVERED)) return_trace (false); 1495 1496 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount); 1497 struct ContextApplyLookupContext lookup_context = { 1498 {match_coverage}, 1499 this 1500 }; 1501 return_trace (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context)); 1502 } 1503 1504 inline bool sanitize (hb_sanitize_context_t *c) const 1505 { 1506 TRACE_SANITIZE (this); 1507 if (!c->check_struct (this)) return_trace (false); 1508 unsigned int count = glyphCount; 1509 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */ 1510 if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false); 1511 for (unsigned int i = 0; i < count; i++) 1512 if (!coverageZ[i].sanitize (c, this)) return_trace (false); 1513 const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count); 1514 return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount)); 1515 } 1516 1517 protected: 1518 USHORT format; /* Format identifier--format = 3 */ 1519 USHORT glyphCount; /* Number of glyphs in the input glyph 1520 * sequence */ 1521 USHORT lookupCount; /* Number of LookupRecords */ 1522 OffsetTo<Coverage> 1523 coverageZ[VAR]; /* Array of offsets to Coverage 1524 * table in glyph sequence order */ 1525 LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in 1526 * design order */ 1527 public: 1528 DEFINE_SIZE_ARRAY2 (6, coverageZ, lookupRecordX); 1529 }; 1530 1531 struct Context 1532 { 1533 template <typename context_t> 1534 inline typename context_t::return_t dispatch (context_t *c) const 1535 { 1536 TRACE_DISPATCH (this, u.format); 1537 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 1538 switch (u.format) { 1539 case 1: return_trace (c->dispatch (u.format1)); 1540 case 2: return_trace (c->dispatch (u.format2)); 1541 case 3: return_trace (c->dispatch (u.format3)); 1542 default:return_trace (c->default_return_value ()); 1543 } 1544 } 1545 1546 protected: 1547 union { 1548 USHORT format; /* Format identifier */ 1549 ContextFormat1 format1; 1550 ContextFormat2 format2; 1551 ContextFormat3 format3; 1552 } u; 1553 }; 1554 1555 1556 /* Chaining Contextual lookups */ 1557 1558 struct ChainContextClosureLookupContext 1559 { 1560 ContextClosureFuncs funcs; 1561 const void *intersects_data[3]; 1562 }; 1563 1564 struct ChainContextCollectGlyphsLookupContext 1565 { 1566 ContextCollectGlyphsFuncs funcs; 1567 const void *collect_data[3]; 1568 }; 1569 1570 struct ChainContextApplyLookupContext 1571 { 1572 ContextApplyFuncs funcs; 1573 const void *match_data[3]; 1574 }; 1575 1576 static inline void chain_context_closure_lookup (hb_closure_context_t *c, 1577 unsigned int backtrackCount, 1578 const USHORT backtrack[], 1579 unsigned int inputCount, /* Including the first glyph (not matched) */ 1580 const USHORT input[], /* Array of input values--start with second glyph */ 1581 unsigned int lookaheadCount, 1582 const USHORT lookahead[], 1583 unsigned int lookupCount, 1584 const LookupRecord lookupRecord[], 1585 ChainContextClosureLookupContext &lookup_context) 1586 { 1587 if (intersects_array (c, 1588 backtrackCount, backtrack, 1589 lookup_context.funcs.intersects, lookup_context.intersects_data[0]) 1590 && intersects_array (c, 1591 inputCount ? inputCount - 1 : 0, input, 1592 lookup_context.funcs.intersects, lookup_context.intersects_data[1]) 1593 && intersects_array (c, 1594 lookaheadCount, lookahead, 1595 lookup_context.funcs.intersects, lookup_context.intersects_data[2])) 1596 recurse_lookups (c, 1597 lookupCount, lookupRecord); 1598 } 1599 1600 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c, 1601 unsigned int backtrackCount, 1602 const USHORT backtrack[], 1603 unsigned int inputCount, /* Including the first glyph (not matched) */ 1604 const USHORT input[], /* Array of input values--start with second glyph */ 1605 unsigned int lookaheadCount, 1606 const USHORT lookahead[], 1607 unsigned int lookupCount, 1608 const LookupRecord lookupRecord[], 1609 ChainContextCollectGlyphsLookupContext &lookup_context) 1610 { 1611 collect_array (c, c->before, 1612 backtrackCount, backtrack, 1613 lookup_context.funcs.collect, lookup_context.collect_data[0]); 1614 collect_array (c, c->input, 1615 inputCount ? inputCount - 1 : 0, input, 1616 lookup_context.funcs.collect, lookup_context.collect_data[1]); 1617 collect_array (c, c->after, 1618 lookaheadCount, lookahead, 1619 lookup_context.funcs.collect, lookup_context.collect_data[2]); 1620 recurse_lookups (c, 1621 lookupCount, lookupRecord); 1622 } 1623 1624 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c, 1625 unsigned int backtrackCount, 1626 const USHORT backtrack[] HB_UNUSED, 1627 unsigned int inputCount, /* Including the first glyph (not matched) */ 1628 const USHORT input[], /* Array of input values--start with second glyph */ 1629 unsigned int lookaheadCount, 1630 const USHORT lookahead[] HB_UNUSED, 1631 unsigned int lookupCount HB_UNUSED, 1632 const LookupRecord lookupRecord[] HB_UNUSED, 1633 ChainContextApplyLookupContext &lookup_context) 1634 { 1635 return (c->zero_context ? !backtrackCount && !lookaheadCount : true) 1636 && would_match_input (c, 1637 inputCount, input, 1638 lookup_context.funcs.match, lookup_context.match_data[1]); 1639 } 1640 1641 static inline bool chain_context_apply_lookup (hb_apply_context_t *c, 1642 unsigned int backtrackCount, 1643 const USHORT backtrack[], 1644 unsigned int inputCount, /* Including the first glyph (not matched) */ 1645 const USHORT input[], /* Array of input values--start with second glyph */ 1646 unsigned int lookaheadCount, 1647 const USHORT lookahead[], 1648 unsigned int lookupCount, 1649 const LookupRecord lookupRecord[], 1650 ChainContextApplyLookupContext &lookup_context) 1651 { 1652 unsigned int match_length = 0; 1653 unsigned int match_positions[MAX_CONTEXT_LENGTH]; 1654 return match_input (c, 1655 inputCount, input, 1656 lookup_context.funcs.match, lookup_context.match_data[1], 1657 &match_length, match_positions) 1658 && match_backtrack (c, 1659 backtrackCount, backtrack, 1660 lookup_context.funcs.match, lookup_context.match_data[0]) 1661 && match_lookahead (c, 1662 lookaheadCount, lookahead, 1663 lookup_context.funcs.match, lookup_context.match_data[2], 1664 match_length) 1665 && apply_lookup (c, 1666 inputCount, match_positions, 1667 lookupCount, lookupRecord, 1668 match_length); 1669 } 1670 1671 struct ChainRule 1672 { 1673 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1674 { 1675 TRACE_CLOSURE (this); 1676 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1677 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1678 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1679 chain_context_closure_lookup (c, 1680 backtrack.len, backtrack.array, 1681 input.len, input.array, 1682 lookahead.len, lookahead.array, 1683 lookup.len, lookup.array, 1684 lookup_context); 1685 } 1686 1687 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1688 { 1689 TRACE_COLLECT_GLYPHS (this); 1690 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1691 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1692 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1693 chain_context_collect_glyphs_lookup (c, 1694 backtrack.len, backtrack.array, 1695 input.len, input.array, 1696 lookahead.len, lookahead.array, 1697 lookup.len, lookup.array, 1698 lookup_context); 1699 } 1700 1701 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1702 { 1703 TRACE_WOULD_APPLY (this); 1704 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1705 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1706 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1707 return_trace (chain_context_would_apply_lookup (c, 1708 backtrack.len, backtrack.array, 1709 input.len, input.array, 1710 lookahead.len, lookahead.array, lookup.len, 1711 lookup.array, lookup_context)); 1712 } 1713 1714 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1715 { 1716 TRACE_APPLY (this); 1717 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1718 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1719 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1720 return_trace (chain_context_apply_lookup (c, 1721 backtrack.len, backtrack.array, 1722 input.len, input.array, 1723 lookahead.len, lookahead.array, lookup.len, 1724 lookup.array, lookup_context)); 1725 } 1726 1727 inline bool sanitize (hb_sanitize_context_t *c) const 1728 { 1729 TRACE_SANITIZE (this); 1730 if (!backtrack.sanitize (c)) return_trace (false); 1731 const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack); 1732 if (!input.sanitize (c)) return_trace (false); 1733 const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input); 1734 if (!lookahead.sanitize (c)) return_trace (false); 1735 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 1736 return_trace (lookup.sanitize (c)); 1737 } 1738 1739 protected: 1740 ArrayOf<USHORT> 1741 backtrack; /* Array of backtracking values 1742 * (to be matched before the input 1743 * sequence) */ 1744 HeadlessArrayOf<USHORT> 1745 inputX; /* Array of input values (start with 1746 * second glyph) */ 1747 ArrayOf<USHORT> 1748 lookaheadX; /* Array of lookahead values's (to be 1749 * matched after the input sequence) */ 1750 ArrayOf<LookupRecord> 1751 lookupX; /* Array of LookupRecords--in 1752 * design order) */ 1753 public: 1754 DEFINE_SIZE_MIN (8); 1755 }; 1756 1757 struct ChainRuleSet 1758 { 1759 inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const 1760 { 1761 TRACE_CLOSURE (this); 1762 unsigned int num_rules = rule.len; 1763 for (unsigned int i = 0; i < num_rules; i++) 1764 (this+rule[i]).closure (c, lookup_context); 1765 } 1766 1767 inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const 1768 { 1769 TRACE_COLLECT_GLYPHS (this); 1770 unsigned int num_rules = rule.len; 1771 for (unsigned int i = 0; i < num_rules; i++) 1772 (this+rule[i]).collect_glyphs (c, lookup_context); 1773 } 1774 1775 inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1776 { 1777 TRACE_WOULD_APPLY (this); 1778 unsigned int num_rules = rule.len; 1779 for (unsigned int i = 0; i < num_rules; i++) 1780 if ((this+rule[i]).would_apply (c, lookup_context)) 1781 return_trace (true); 1782 1783 return_trace (false); 1784 } 1785 1786 inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const 1787 { 1788 TRACE_APPLY (this); 1789 unsigned int num_rules = rule.len; 1790 for (unsigned int i = 0; i < num_rules; i++) 1791 if ((this+rule[i]).apply (c, lookup_context)) 1792 return_trace (true); 1793 1794 return_trace (false); 1795 } 1796 1797 inline bool sanitize (hb_sanitize_context_t *c) const 1798 { 1799 TRACE_SANITIZE (this); 1800 return_trace (rule.sanitize (c, this)); 1801 } 1802 1803 protected: 1804 OffsetArrayOf<ChainRule> 1805 rule; /* Array of ChainRule tables 1806 * ordered by preference */ 1807 public: 1808 DEFINE_SIZE_ARRAY (2, rule); 1809 }; 1810 1811 struct ChainContextFormat1 1812 { 1813 inline void closure (hb_closure_context_t *c) const 1814 { 1815 TRACE_CLOSURE (this); 1816 const Coverage &cov = (this+coverage); 1817 1818 struct ChainContextClosureLookupContext lookup_context = { 1819 {intersects_glyph}, 1820 {NULL, NULL, NULL} 1821 }; 1822 1823 unsigned int count = ruleSet.len; 1824 for (unsigned int i = 0; i < count; i++) 1825 if (cov.intersects_coverage (c->glyphs, i)) { 1826 const ChainRuleSet &rule_set = this+ruleSet[i]; 1827 rule_set.closure (c, lookup_context); 1828 } 1829 } 1830 1831 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1832 { 1833 TRACE_COLLECT_GLYPHS (this); 1834 (this+coverage).add_coverage (c->input); 1835 1836 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1837 {collect_glyph}, 1838 {NULL, NULL, NULL} 1839 }; 1840 1841 unsigned int count = ruleSet.len; 1842 for (unsigned int i = 0; i < count; i++) 1843 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1844 } 1845 1846 inline bool would_apply (hb_would_apply_context_t *c) const 1847 { 1848 TRACE_WOULD_APPLY (this); 1849 1850 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])]; 1851 struct ChainContextApplyLookupContext lookup_context = { 1852 {match_glyph}, 1853 {NULL, NULL, NULL} 1854 }; 1855 return_trace (rule_set.would_apply (c, lookup_context)); 1856 } 1857 1858 inline const Coverage &get_coverage (void) const 1859 { 1860 return this+coverage; 1861 } 1862 1863 inline bool apply (hb_apply_context_t *c) const 1864 { 1865 TRACE_APPLY (this); 1866 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1867 if (likely (index == NOT_COVERED)) return_trace (false); 1868 1869 const ChainRuleSet &rule_set = this+ruleSet[index]; 1870 struct ChainContextApplyLookupContext lookup_context = { 1871 {match_glyph}, 1872 {NULL, NULL, NULL} 1873 }; 1874 return_trace (rule_set.apply (c, lookup_context)); 1875 } 1876 1877 inline bool sanitize (hb_sanitize_context_t *c) const 1878 { 1879 TRACE_SANITIZE (this); 1880 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this)); 1881 } 1882 1883 protected: 1884 USHORT format; /* Format identifier--format = 1 */ 1885 OffsetTo<Coverage> 1886 coverage; /* Offset to Coverage table--from 1887 * beginning of table */ 1888 OffsetArrayOf<ChainRuleSet> 1889 ruleSet; /* Array of ChainRuleSet tables 1890 * ordered by Coverage Index */ 1891 public: 1892 DEFINE_SIZE_ARRAY (6, ruleSet); 1893 }; 1894 1895 struct ChainContextFormat2 1896 { 1897 inline void closure (hb_closure_context_t *c) const 1898 { 1899 TRACE_CLOSURE (this); 1900 if (!(this+coverage).intersects (c->glyphs)) 1901 return; 1902 1903 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1904 const ClassDef &input_class_def = this+inputClassDef; 1905 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1906 1907 struct ChainContextClosureLookupContext lookup_context = { 1908 {intersects_class}, 1909 {&backtrack_class_def, 1910 &input_class_def, 1911 &lookahead_class_def} 1912 }; 1913 1914 unsigned int count = ruleSet.len; 1915 for (unsigned int i = 0; i < count; i++) 1916 if (input_class_def.intersects_class (c->glyphs, i)) { 1917 const ChainRuleSet &rule_set = this+ruleSet[i]; 1918 rule_set.closure (c, lookup_context); 1919 } 1920 } 1921 1922 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 1923 { 1924 TRACE_COLLECT_GLYPHS (this); 1925 (this+coverage).add_coverage (c->input); 1926 1927 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1928 const ClassDef &input_class_def = this+inputClassDef; 1929 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1930 1931 struct ChainContextCollectGlyphsLookupContext lookup_context = { 1932 {collect_class}, 1933 {&backtrack_class_def, 1934 &input_class_def, 1935 &lookahead_class_def} 1936 }; 1937 1938 unsigned int count = ruleSet.len; 1939 for (unsigned int i = 0; i < count; i++) 1940 (this+ruleSet[i]).collect_glyphs (c, lookup_context); 1941 } 1942 1943 inline bool would_apply (hb_would_apply_context_t *c) const 1944 { 1945 TRACE_WOULD_APPLY (this); 1946 1947 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1948 const ClassDef &input_class_def = this+inputClassDef; 1949 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1950 1951 unsigned int index = input_class_def.get_class (c->glyphs[0]); 1952 const ChainRuleSet &rule_set = this+ruleSet[index]; 1953 struct ChainContextApplyLookupContext lookup_context = { 1954 {match_class}, 1955 {&backtrack_class_def, 1956 &input_class_def, 1957 &lookahead_class_def} 1958 }; 1959 return_trace (rule_set.would_apply (c, lookup_context)); 1960 } 1961 1962 inline const Coverage &get_coverage (void) const 1963 { 1964 return this+coverage; 1965 } 1966 1967 inline bool apply (hb_apply_context_t *c) const 1968 { 1969 TRACE_APPLY (this); 1970 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint); 1971 if (likely (index == NOT_COVERED)) return_trace (false); 1972 1973 const ClassDef &backtrack_class_def = this+backtrackClassDef; 1974 const ClassDef &input_class_def = this+inputClassDef; 1975 const ClassDef &lookahead_class_def = this+lookaheadClassDef; 1976 1977 index = input_class_def.get_class (c->buffer->cur().codepoint); 1978 const ChainRuleSet &rule_set = this+ruleSet[index]; 1979 struct ChainContextApplyLookupContext lookup_context = { 1980 {match_class}, 1981 {&backtrack_class_def, 1982 &input_class_def, 1983 &lookahead_class_def} 1984 }; 1985 return_trace (rule_set.apply (c, lookup_context)); 1986 } 1987 1988 inline bool sanitize (hb_sanitize_context_t *c) const 1989 { 1990 TRACE_SANITIZE (this); 1991 return_trace (coverage.sanitize (c, this) && 1992 backtrackClassDef.sanitize (c, this) && 1993 inputClassDef.sanitize (c, this) && 1994 lookaheadClassDef.sanitize (c, this) && 1995 ruleSet.sanitize (c, this)); 1996 } 1997 1998 protected: 1999 USHORT format; /* Format identifier--format = 2 */ 2000 OffsetTo<Coverage> 2001 coverage; /* Offset to Coverage table--from 2002 * beginning of table */ 2003 OffsetTo<ClassDef> 2004 backtrackClassDef; /* Offset to glyph ClassDef table 2005 * containing backtrack sequence 2006 * data--from beginning of table */ 2007 OffsetTo<ClassDef> 2008 inputClassDef; /* Offset to glyph ClassDef 2009 * table containing input sequence 2010 * data--from beginning of table */ 2011 OffsetTo<ClassDef> 2012 lookaheadClassDef; /* Offset to glyph ClassDef table 2013 * containing lookahead sequence 2014 * data--from beginning of table */ 2015 OffsetArrayOf<ChainRuleSet> 2016 ruleSet; /* Array of ChainRuleSet tables 2017 * ordered by class */ 2018 public: 2019 DEFINE_SIZE_ARRAY (12, ruleSet); 2020 }; 2021 2022 struct ChainContextFormat3 2023 { 2024 inline void closure (hb_closure_context_t *c) const 2025 { 2026 TRACE_CLOSURE (this); 2027 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2028 2029 if (!(this+input[0]).intersects (c->glyphs)) 2030 return; 2031 2032 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2033 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2034 struct ChainContextClosureLookupContext lookup_context = { 2035 {intersects_coverage}, 2036 {this, this, this} 2037 }; 2038 chain_context_closure_lookup (c, 2039 backtrack.len, (const USHORT *) backtrack.array, 2040 input.len, (const USHORT *) input.array + 1, 2041 lookahead.len, (const USHORT *) lookahead.array, 2042 lookup.len, lookup.array, 2043 lookup_context); 2044 } 2045 2046 inline void collect_glyphs (hb_collect_glyphs_context_t *c) const 2047 { 2048 TRACE_COLLECT_GLYPHS (this); 2049 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2050 2051 (this+input[0]).add_coverage (c->input); 2052 2053 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2054 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2055 struct ChainContextCollectGlyphsLookupContext lookup_context = { 2056 {collect_coverage}, 2057 {this, this, this} 2058 }; 2059 chain_context_collect_glyphs_lookup (c, 2060 backtrack.len, (const USHORT *) backtrack.array, 2061 input.len, (const USHORT *) input.array + 1, 2062 lookahead.len, (const USHORT *) lookahead.array, 2063 lookup.len, lookup.array, 2064 lookup_context); 2065 } 2066 2067 inline bool would_apply (hb_would_apply_context_t *c) const 2068 { 2069 TRACE_WOULD_APPLY (this); 2070 2071 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2072 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2073 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2074 struct ChainContextApplyLookupContext lookup_context = { 2075 {match_coverage}, 2076 {this, this, this} 2077 }; 2078 return_trace (chain_context_would_apply_lookup (c, 2079 backtrack.len, (const USHORT *) backtrack.array, 2080 input.len, (const USHORT *) input.array + 1, 2081 lookahead.len, (const USHORT *) lookahead.array, 2082 lookup.len, lookup.array, lookup_context)); 2083 } 2084 2085 inline const Coverage &get_coverage (void) const 2086 { 2087 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2088 return this+input[0]; 2089 } 2090 2091 inline bool apply (hb_apply_context_t *c) const 2092 { 2093 TRACE_APPLY (this); 2094 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2095 2096 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint); 2097 if (likely (index == NOT_COVERED)) return_trace (false); 2098 2099 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2100 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2101 struct ChainContextApplyLookupContext lookup_context = { 2102 {match_coverage}, 2103 {this, this, this} 2104 }; 2105 return_trace (chain_context_apply_lookup (c, 2106 backtrack.len, (const USHORT *) backtrack.array, 2107 input.len, (const USHORT *) input.array + 1, 2108 lookahead.len, (const USHORT *) lookahead.array, 2109 lookup.len, lookup.array, lookup_context)); 2110 } 2111 2112 inline bool sanitize (hb_sanitize_context_t *c) const 2113 { 2114 TRACE_SANITIZE (this); 2115 if (!backtrack.sanitize (c, this)) return_trace (false); 2116 const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack); 2117 if (!input.sanitize (c, this)) return_trace (false); 2118 if (!input.len) return_trace (false); /* To be consistent with Context. */ 2119 const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input); 2120 if (!lookahead.sanitize (c, this)) return_trace (false); 2121 const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead); 2122 return_trace (lookup.sanitize (c)); 2123 } 2124 2125 protected: 2126 USHORT format; /* Format identifier--format = 3 */ 2127 OffsetArrayOf<Coverage> 2128 backtrack; /* Array of coverage tables 2129 * in backtracking sequence, in glyph 2130 * sequence order */ 2131 OffsetArrayOf<Coverage> 2132 inputX ; /* Array of coverage 2133 * tables in input sequence, in glyph 2134 * sequence order */ 2135 OffsetArrayOf<Coverage> 2136 lookaheadX; /* Array of coverage tables 2137 * in lookahead sequence, in glyph 2138 * sequence order */ 2139 ArrayOf<LookupRecord> 2140 lookupX; /* Array of LookupRecords--in 2141 * design order) */ 2142 public: 2143 DEFINE_SIZE_MIN (10); 2144 }; 2145 2146 struct ChainContext 2147 { 2148 template <typename context_t> 2149 inline typename context_t::return_t dispatch (context_t *c) const 2150 { 2151 TRACE_DISPATCH (this, u.format); 2152 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2153 switch (u.format) { 2154 case 1: return_trace (c->dispatch (u.format1)); 2155 case 2: return_trace (c->dispatch (u.format2)); 2156 case 3: return_trace (c->dispatch (u.format3)); 2157 default:return_trace (c->default_return_value ()); 2158 } 2159 } 2160 2161 protected: 2162 union { 2163 USHORT format; /* Format identifier */ 2164 ChainContextFormat1 format1; 2165 ChainContextFormat2 format2; 2166 ChainContextFormat3 format3; 2167 } u; 2168 }; 2169 2170 2171 template <typename T> 2172 struct ExtensionFormat1 2173 { 2174 inline unsigned int get_type (void) const { return extensionLookupType; } 2175 2176 template <typename X> 2177 inline const X& get_subtable (void) const 2178 { 2179 unsigned int offset = extensionOffset; 2180 if (unlikely (!offset)) return Null(typename T::LookupSubTable); 2181 return StructAtOffset<typename T::LookupSubTable> (this, offset); 2182 } 2183 2184 template <typename context_t> 2185 inline typename context_t::return_t dispatch (context_t *c) const 2186 { 2187 TRACE_DISPATCH (this, format); 2188 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ()); 2189 return_trace (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ())); 2190 } 2191 2192 /* This is called from may_dispatch() above with hb_sanitize_context_t. */ 2193 inline bool sanitize (hb_sanitize_context_t *c) const 2194 { 2195 TRACE_SANITIZE (this); 2196 return_trace (c->check_struct (this) && extensionOffset != 0); 2197 } 2198 2199 protected: 2200 USHORT format; /* Format identifier. Set to 1. */ 2201 USHORT extensionLookupType; /* Lookup type of subtable referenced 2202 * by ExtensionOffset (i.e. the 2203 * extension subtable). */ 2204 ULONG extensionOffset; /* Offset to the extension subtable, 2205 * of lookup type subtable. */ 2206 public: 2207 DEFINE_SIZE_STATIC (8); 2208 }; 2209 2210 template <typename T> 2211 struct Extension 2212 { 2213 inline unsigned int get_type (void) const 2214 { 2215 switch (u.format) { 2216 case 1: return u.format1.get_type (); 2217 default:return 0; 2218 } 2219 } 2220 template <typename X> 2221 inline const X& get_subtable (void) const 2222 { 2223 switch (u.format) { 2224 case 1: return u.format1.template get_subtable<typename T::LookupSubTable> (); 2225 default:return Null(typename T::LookupSubTable); 2226 } 2227 } 2228 2229 template <typename context_t> 2230 inline typename context_t::return_t dispatch (context_t *c) const 2231 { 2232 TRACE_DISPATCH (this, u.format); 2233 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ()); 2234 switch (u.format) { 2235 case 1: return_trace (u.format1.dispatch (c)); 2236 default:return_trace (c->default_return_value ()); 2237 } 2238 } 2239 2240 protected: 2241 union { 2242 USHORT format; /* Format identifier */ 2243 ExtensionFormat1<T> format1; 2244 } u; 2245 }; 2246 2247 2248 /* 2249 * GSUB/GPOS Common 2250 */ 2251 2252 struct GSUBGPOS 2253 { 2254 static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB; 2255 static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS; 2256 2257 inline unsigned int get_script_count (void) const 2258 { return (this+scriptList).len; } 2259 inline const Tag& get_script_tag (unsigned int i) const 2260 { return (this+scriptList).get_tag (i); } 2261 inline unsigned int get_script_tags (unsigned int start_offset, 2262 unsigned int *script_count /* IN/OUT */, 2263 hb_tag_t *script_tags /* OUT */) const 2264 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); } 2265 inline const Script& get_script (unsigned int i) const 2266 { return (this+scriptList)[i]; } 2267 inline bool find_script_index (hb_tag_t tag, unsigned int *index) const 2268 { return (this+scriptList).find_index (tag, index); } 2269 2270 inline unsigned int get_feature_count (void) const 2271 { return (this+featureList).len; } 2272 inline hb_tag_t get_feature_tag (unsigned int i) const 2273 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); } 2274 inline unsigned int get_feature_tags (unsigned int start_offset, 2275 unsigned int *feature_count /* IN/OUT */, 2276 hb_tag_t *feature_tags /* OUT */) const 2277 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); } 2278 inline const Feature& get_feature (unsigned int i) const 2279 { return (this+featureList)[i]; } 2280 inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const 2281 { return (this+featureList).find_index (tag, index); } 2282 2283 inline unsigned int get_lookup_count (void) const 2284 { return (this+lookupList).len; } 2285 inline const Lookup& get_lookup (unsigned int i) const 2286 { return (this+lookupList)[i]; } 2287 2288 inline bool sanitize (hb_sanitize_context_t *c) const 2289 { 2290 TRACE_SANITIZE (this); 2291 return_trace (version.sanitize (c) && 2292 likely (version.major == 1) && 2293 scriptList.sanitize (c, this) && 2294 featureList.sanitize (c, this) && 2295 lookupList.sanitize (c, this)); 2296 } 2297 2298 protected: 2299 FixedVersion version; /* Version of the GSUB/GPOS table--initially set 2300 * to 0x00010000u */ 2301 OffsetTo<ScriptList> 2302 scriptList; /* ScriptList table */ 2303 OffsetTo<FeatureList> 2304 featureList; /* FeatureList table */ 2305 OffsetTo<LookupList> 2306 lookupList; /* LookupList table */ 2307 public: 2308 DEFINE_SIZE_STATIC (10); 2309 }; 2310 2311 2312 } /* namespace OT */ 2313 2314 2315 #endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */