415 {
416 inline bool apply (hb_apply_context_t *c,
417 unsigned int mark_index, unsigned int glyph_index,
418 const AnchorMatrix &anchors, unsigned int class_count,
419 unsigned int glyph_pos) const
420 {
421 TRACE_APPLY (this);
422 hb_buffer_t *buffer = c->buffer;
423 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index);
424 unsigned int mark_class = record.klass;
425
426 const Anchor& mark_anchor = this + record.markAnchor;
427 bool found;
428 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
429 /* If this subtable doesn't have an anchor for this base and this class,
430 * return false such that the subsequent subtables have a chance at it. */
431 if (unlikely (!found)) return_trace (false);
432
433 hb_position_t mark_x, mark_y, base_x, base_y;
434
435 mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
436 glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
437
438 hb_glyph_position_t &o = buffer->cur_pos();
439 o.x_offset = base_x - mark_x;
440 o.y_offset = base_y - mark_y;
441 o.attach_type() = ATTACH_TYPE_MARK;
442 o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
443 buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
444
445 buffer->idx++;
446 return_trace (true);
447 }
448
449 inline bool sanitize (hb_sanitize_context_t *c) const
450 {
451 TRACE_SANITIZE (this);
452 return_trace (ArrayOf<MarkRecord>::sanitize (c, this));
453 }
454 };
626
627 const PairValueRecord *record_array = CastP<PairValueRecord> (arrayZ);
628 unsigned int count = len;
629
630 /* Hand-coded bsearch. */
631 if (unlikely (!count))
632 return_trace (false);
633 hb_codepoint_t x = buffer->info[pos].codepoint;
634 int min = 0, max = (int) count - 1;
635 while (min <= max)
636 {
637 int mid = (min + max) / 2;
638 const PairValueRecord *record = &StructAtOffset<PairValueRecord> (record_array, record_size * mid);
639 hb_codepoint_t mid_x = record->secondGlyph;
640 if (x < mid_x)
641 max = mid - 1;
642 else if (x > mid_x)
643 min = mid + 1;
644 else
645 {
646 valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos());
647 valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]);
648 if (len2)
649 pos++;
650 buffer->idx = pos;
651 return_trace (true);
652 }
653 }
654
655 return_trace (false);
656 }
657
658 struct sanitize_closure_t {
659 const void *base;
660 const ValueFormat *valueFormats;
661 unsigned int len1; /* valueFormats[0].get_len() */
662 unsigned int stride; /* 1 + len1 + len2 */
663 };
664
665 inline bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
773
774 inline bool apply (hb_apply_context_t *c) const
775 {
776 TRACE_APPLY (this);
777 hb_buffer_t *buffer = c->buffer;
778 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
779 if (likely (index == NOT_COVERED)) return_trace (false);
780
781 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
782 skippy_iter.reset (buffer->idx, 1);
783 if (!skippy_iter.next ()) return_trace (false);
784
785 unsigned int len1 = valueFormat1.get_len ();
786 unsigned int len2 = valueFormat2.get_len ();
787 unsigned int record_len = len1 + len2;
788
789 unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
790 unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
791 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
792
793 const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
794 valueFormat1.apply_value (c, this, v, buffer->cur_pos());
795 valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]);
796
797 buffer->idx = skippy_iter.idx;
798 if (len2)
799 buffer->idx++;
800
801 return_trace (true);
802 }
803
804 inline bool sanitize (hb_sanitize_context_t *c) const
805 {
806 TRACE_SANITIZE (this);
807 if (!(c->check_struct (this)
808 && coverage.sanitize (c, this)
809 && classDef1.sanitize (c, this)
810 && classDef2.sanitize (c, this))) return_trace (false);
811
812 unsigned int len1 = valueFormat1.get_len ();
912 }
913
914 inline bool apply (hb_apply_context_t *c) const
915 {
916 TRACE_APPLY (this);
917 hb_buffer_t *buffer = c->buffer;
918
919 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
920 if (!this_record.exitAnchor) return_trace (false);
921
922 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
923 skippy_iter.reset (buffer->idx, 1);
924 if (!skippy_iter.next ()) return_trace (false);
925
926 const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)];
927 if (!next_record.entryAnchor) return_trace (false);
928
929 unsigned int i = buffer->idx;
930 unsigned int j = skippy_iter.idx;
931
932 hb_position_t entry_x, entry_y, exit_x, exit_y;
933 (this+this_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
934 (this+next_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
935
936 hb_glyph_position_t *pos = buffer->pos;
937
938 hb_position_t d;
939 /* Main-direction adjustment */
940 switch (c->direction) {
941 case HB_DIRECTION_LTR:
942 pos[i].x_advance = exit_x + pos[i].x_offset;
943
944 d = entry_x + pos[j].x_offset;
945 pos[j].x_advance -= d;
946 pos[j].x_offset -= d;
947 break;
948 case HB_DIRECTION_RTL:
949 d = exit_x + pos[i].x_offset;
950 pos[i].x_advance -= d;
951 pos[i].x_offset -= d;
1066
1067 inline const Coverage &get_coverage (void) const
1068 {
1069 return this+markCoverage;
1070 }
1071
1072 inline bool apply (hb_apply_context_t *c) const
1073 {
1074 TRACE_APPLY (this);
1075 hb_buffer_t *buffer = c->buffer;
1076 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
1077 if (likely (mark_index == NOT_COVERED)) return_trace (false);
1078
1079 /* Now we search backwards for a non-mark glyph */
1080 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1081 skippy_iter.reset (buffer->idx, 1);
1082 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
1083 do {
1084 if (!skippy_iter.prev ()) return_trace (false);
1085 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */
1086 if (0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx])) break;
1087 skippy_iter.reject ();
1088 } while (1);
1089
1090 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
1091 //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1092
1093 unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint);
1094 if (base_index == NOT_COVERED) return_trace (false);
1095
1096 return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
1097 }
1098
1099 inline bool sanitize (hb_sanitize_context_t *c) const
1100 {
1101 TRACE_SANITIZE (this);
1102 return_trace (c->check_struct (this) &&
1103 markCoverage.sanitize (c, this) &&
1104 baseCoverage.sanitize (c, this) &&
1105 markArray.sanitize (c, this) &&
1106 baseArray.sanitize (c, this, (unsigned int) classCount));
|
415 {
416 inline bool apply (hb_apply_context_t *c,
417 unsigned int mark_index, unsigned int glyph_index,
418 const AnchorMatrix &anchors, unsigned int class_count,
419 unsigned int glyph_pos) const
420 {
421 TRACE_APPLY (this);
422 hb_buffer_t *buffer = c->buffer;
423 const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index);
424 unsigned int mark_class = record.klass;
425
426 const Anchor& mark_anchor = this + record.markAnchor;
427 bool found;
428 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
429 /* If this subtable doesn't have an anchor for this base and this class,
430 * return false such that the subsequent subtables have a chance at it. */
431 if (unlikely (!found)) return_trace (false);
432
433 hb_position_t mark_x, mark_y, base_x, base_y;
434
435 buffer->unsafe_to_break (glyph_pos, buffer->idx);
436 mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
437 glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
438
439 hb_glyph_position_t &o = buffer->cur_pos();
440 o.x_offset = base_x - mark_x;
441 o.y_offset = base_y - mark_y;
442 o.attach_type() = ATTACH_TYPE_MARK;
443 o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
444 buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
445
446 buffer->idx++;
447 return_trace (true);
448 }
449
450 inline bool sanitize (hb_sanitize_context_t *c) const
451 {
452 TRACE_SANITIZE (this);
453 return_trace (ArrayOf<MarkRecord>::sanitize (c, this));
454 }
455 };
627
628 const PairValueRecord *record_array = CastP<PairValueRecord> (arrayZ);
629 unsigned int count = len;
630
631 /* Hand-coded bsearch. */
632 if (unlikely (!count))
633 return_trace (false);
634 hb_codepoint_t x = buffer->info[pos].codepoint;
635 int min = 0, max = (int) count - 1;
636 while (min <= max)
637 {
638 int mid = (min + max) / 2;
639 const PairValueRecord *record = &StructAtOffset<PairValueRecord> (record_array, record_size * mid);
640 hb_codepoint_t mid_x = record->secondGlyph;
641 if (x < mid_x)
642 max = mid - 1;
643 else if (x > mid_x)
644 min = mid + 1;
645 else
646 {
647 buffer->unsafe_to_break (buffer->idx, pos + 1);
648 valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos());
649 valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]);
650 if (len2)
651 pos++;
652 buffer->idx = pos;
653 return_trace (true);
654 }
655 }
656
657 return_trace (false);
658 }
659
660 struct sanitize_closure_t {
661 const void *base;
662 const ValueFormat *valueFormats;
663 unsigned int len1; /* valueFormats[0].get_len() */
664 unsigned int stride; /* 1 + len1 + len2 */
665 };
666
667 inline bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
775
776 inline bool apply (hb_apply_context_t *c) const
777 {
778 TRACE_APPLY (this);
779 hb_buffer_t *buffer = c->buffer;
780 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
781 if (likely (index == NOT_COVERED)) return_trace (false);
782
783 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
784 skippy_iter.reset (buffer->idx, 1);
785 if (!skippy_iter.next ()) return_trace (false);
786
787 unsigned int len1 = valueFormat1.get_len ();
788 unsigned int len2 = valueFormat2.get_len ();
789 unsigned int record_len = len1 + len2;
790
791 unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
792 unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
793 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
794
795 buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
796 const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
797 valueFormat1.apply_value (c, this, v, buffer->cur_pos());
798 valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]);
799
800 buffer->idx = skippy_iter.idx;
801 if (len2)
802 buffer->idx++;
803
804 return_trace (true);
805 }
806
807 inline bool sanitize (hb_sanitize_context_t *c) const
808 {
809 TRACE_SANITIZE (this);
810 if (!(c->check_struct (this)
811 && coverage.sanitize (c, this)
812 && classDef1.sanitize (c, this)
813 && classDef2.sanitize (c, this))) return_trace (false);
814
815 unsigned int len1 = valueFormat1.get_len ();
915 }
916
917 inline bool apply (hb_apply_context_t *c) const
918 {
919 TRACE_APPLY (this);
920 hb_buffer_t *buffer = c->buffer;
921
922 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
923 if (!this_record.exitAnchor) return_trace (false);
924
925 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
926 skippy_iter.reset (buffer->idx, 1);
927 if (!skippy_iter.next ()) return_trace (false);
928
929 const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)];
930 if (!next_record.entryAnchor) return_trace (false);
931
932 unsigned int i = buffer->idx;
933 unsigned int j = skippy_iter.idx;
934
935 buffer->unsafe_to_break (i, j);
936 hb_position_t entry_x, entry_y, exit_x, exit_y;
937 (this+this_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
938 (this+next_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
939
940 hb_glyph_position_t *pos = buffer->pos;
941
942 hb_position_t d;
943 /* Main-direction adjustment */
944 switch (c->direction) {
945 case HB_DIRECTION_LTR:
946 pos[i].x_advance = exit_x + pos[i].x_offset;
947
948 d = entry_x + pos[j].x_offset;
949 pos[j].x_advance -= d;
950 pos[j].x_offset -= d;
951 break;
952 case HB_DIRECTION_RTL:
953 d = exit_x + pos[i].x_offset;
954 pos[i].x_advance -= d;
955 pos[i].x_offset -= d;
1070
1071 inline const Coverage &get_coverage (void) const
1072 {
1073 return this+markCoverage;
1074 }
1075
1076 inline bool apply (hb_apply_context_t *c) const
1077 {
1078 TRACE_APPLY (this);
1079 hb_buffer_t *buffer = c->buffer;
1080 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
1081 if (likely (mark_index == NOT_COVERED)) return_trace (false);
1082
1083 /* Now we search backwards for a non-mark glyph */
1084 hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1085 skippy_iter.reset (buffer->idx, 1);
1086 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
1087 do {
1088 if (!skippy_iter.prev ()) return_trace (false);
1089 /* We only want to attach to the first of a MultipleSubst sequence. Reject others. */
1090 if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
1091 0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]))
1092 break;
1093 skippy_iter.reject ();
1094 } while (1);
1095
1096 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
1097 //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1098
1099 unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint);
1100 if (base_index == NOT_COVERED) return_trace (false);
1101
1102 return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
1103 }
1104
1105 inline bool sanitize (hb_sanitize_context_t *c) const
1106 {
1107 TRACE_SANITIZE (this);
1108 return_trace (c->check_struct (this) &&
1109 markCoverage.sanitize (c, this) &&
1110 baseCoverage.sanitize (c, this) &&
1111 markArray.sanitize (c, this) &&
1112 baseArray.sanitize (c, this, (unsigned int) classCount));
|