< prev index next >

src/java.desktop/share/native/libfontmanager/harfbuzz/hb-open-type-private.hh

Print this page




  13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17  * DAMAGE.
  18  *
  19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
  22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24  *
  25  * Red Hat Author(s): Behdad Esfahbod
  26  * Google Author(s): Behdad Esfahbod
  27  */
  28 
  29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
  30 #define HB_OPEN_TYPE_PRIVATE_HH
  31 
  32 #include "hb-private.hh"


  33 
  34 
  35 namespace OT {
  36 
  37 
  38 
  39 /*
  40  * Casts
  41  */
  42 
  43 /* Cast to struct T, reference to reference */
  44 template<typename Type, typename TObject>
  45 static inline const Type& CastR(const TObject &X)
  46 { return reinterpret_cast<const Type&> (X); }
  47 template<typename Type, typename TObject>
  48 static inline Type& CastR(TObject &X)
  49 { return reinterpret_cast<Type&> (X); }
  50 
  51 /* Cast to struct T, pointer to pointer */
  52 template<typename Type, typename TObject>


  67 
  68 /* StructAfter<T>(X) returns the struct T& that is placed after X.
  69  * Works with X of variable size also.  X must implement get_size() */
  70 template<typename Type, typename TObject>
  71 static inline const Type& StructAfter(const TObject &X)
  72 { return StructAtOffset<Type>(&X, X.get_size()); }
  73 template<typename Type, typename TObject>
  74 static inline Type& StructAfter(TObject &X)
  75 { return StructAtOffset<Type>(&X, X.get_size()); }
  76 
  77 
  78 
  79 /*
  80  * Size checking
  81  */
  82 
  83 /* Check _assertion in a method environment */
  84 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
  85   inline void _instance_assertion_on_line_##_line (void) const \
  86   { \
  87     ASSERT_STATIC (_assertion); \
  88     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
  89   }
  90 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
  91 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
  92 
  93 /* Check that _code compiles in a method environment */
  94 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
  95   inline void _compiles_assertion_on_line_##_line (void) const \
  96   { _code; }
  97 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
  98 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
  99 
 100 
 101 #define DEFINE_SIZE_STATIC(size) \
 102   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
 103   static const unsigned int static_size = (size); \
 104   static const unsigned int min_size = (size); \
 105   inline unsigned int get_size (void) const { return (size); }
 106 
 107 #define DEFINE_SIZE_UNION(size, _member) \


 112   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
 113   static const unsigned int min_size = (size)
 114 
 115 #define DEFINE_SIZE_ARRAY(size, array) \
 116   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
 117   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
 118   static const unsigned int min_size = (size)
 119 
 120 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
 121   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
 122   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
 123   static const unsigned int min_size = (size)
 124 
 125 
 126 
 127 /*
 128  * Null objects
 129  */
 130 
 131 /* Global nul-content Null pool.  Enlarge as necessary. */
 132 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
 133 static const void *_NullPool[(256+8) / sizeof (void *)];


 134 
 135 /* Generic nul-content Null objects. */
 136 template <typename Type>
 137 static inline const Type& Null (void) {
 138   ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool));
 139   return *CastP<Type> (_NullPool);
 140 }
 141 
 142 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
 143 #define DEFINE_NULL_DATA(Type, data) \
 144 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
 145 template <> \
 146 /*static*/ inline const Type& Null<Type> (void) { \
 147   return *CastP<Type> (_Null##Type); \
 148 } /* The following line really exists such that we end in a place needing semicolon */ \
 149 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
 150 
 151 /* Accessor macro. */
 152 #define Null(Type) Null<Type>()
 153 
 154 
 155 /*
 156  * Dispatch
 157  */
 158 
 159 template <typename Context, typename Return, unsigned int MaxDebugDepth>
 160 struct hb_dispatch_context_t
 161 {
 162   static const unsigned int max_debug_depth = MaxDebugDepth;
 163   typedef Return return_t;
 164   template <typename T, typename F>
 165   inline bool may_dispatch (const T *obj, const F *format) { return true; }
 166   static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
 167 };
 168 
 169 
 170 /*
 171  * Sanitize
 172  */
 173 
 174 #ifndef HB_DEBUG_SANITIZE
 175 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
 176 #endif
 177 
 178 
 179 #define TRACE_SANITIZE(this) \
 180         hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
 181         (&c->debug_depth, c->get_name (), this, HB_FUNC, \
 182          "");
 183 
 184 /* This limits sanitizing time on really broken fonts. */
 185 #ifndef HB_SANITIZE_MAX_EDITS
 186 #define HB_SANITIZE_MAX_EDITS 32
 187 #endif
 188 
 189 struct hb_sanitize_context_t :
 190        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
 191 {
 192   inline hb_sanitize_context_t (void) :
 193         debug_depth (0),
 194         start (NULL), end (NULL),
 195         writable (false), edit_count (0),
 196         blob (NULL) {}
 197 
 198   inline const char *get_name (void) { return "SANITIZE"; }
 199   template <typename T, typename F>
 200   inline bool may_dispatch (const T *obj, const F *format)
 201   { return format->sanitize (this); }
 202   template <typename T>
 203   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
 204   static return_t default_return_value (void) { return true; }
 205   static return_t no_dispatch_return_value (void) { return false; }
 206   bool stop_sublookup_iteration (const return_t r) const { return !r; }
 207 
 208   inline void init (hb_blob_t *b)
 209   {
 210     this->blob = hb_blob_reference (b);
 211     this->writable = false;
 212   }
 213 
 214   inline void start_processing (void)
 215   {
 216     this->start = hb_blob_get_data (this->blob, NULL);
 217     this->end = this->start + hb_blob_get_length (this->blob);
 218     assert (this->start <= this->end); /* Must not overflow. */
 219     this->edit_count = 0;
 220     this->debug_depth = 0;
 221 
 222     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
 223                      "start [%p..%p] (%lu bytes)",
 224                      this->start, this->end,
 225                      (unsigned long) (this->end - this->start));
 226   }
 227 
 228   inline void end_processing (void)
 229   {
 230     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
 231                      "end [%p..%p] %u edit requests",
 232                      this->start, this->end, this->edit_count);
 233 
 234     hb_blob_destroy (this->blob);
 235     this->blob = NULL;
 236     this->start = this->end = NULL;
 237   }
 238 
 239   inline bool check_range (const void *base, unsigned int len) const
 240   {
 241     const char *p = (const char *) base;
 242     bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
 243 
 244     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
 245        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
 246        p, p + len, len,
 247        this->start, this->end,
 248        ok ? "OK" : "OUT-OF-RANGE");
 249 
 250     return likely (ok);
 251   }
 252 
 253   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
 254   {
 255     const char *p = (const char *) base;
 256     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);


 331     }
 332 
 333     Type *t = CastP<Type> (const_cast<char *> (c->start));
 334 
 335     sane = t->sanitize (c);
 336     if (sane) {
 337       if (c->edit_count) {
 338         DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
 339 
 340         /* sanitize again to ensure no toe-stepping */
 341         c->edit_count = 0;
 342         sane = t->sanitize (c);
 343         if (c->edit_count) {
 344           DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
 345           sane = false;
 346         }
 347       }
 348     } else {
 349       unsigned int edit_count = c->edit_count;
 350       if (edit_count && !c->writable) {
 351         c->start = hb_blob_get_data_writable (blob, NULL);
 352         c->end = c->start + hb_blob_get_length (blob);
 353 
 354         if (c->start) {
 355           c->writable = true;
 356           /* ok, we made it writable by relocating.  try again */
 357           DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
 358           goto retry;
 359         }
 360       }
 361     }
 362 
 363     c->end_processing ();
 364 
 365     DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
 366     if (sane)
 367       return blob;
 368     else {
 369       hb_blob_destroy (blob);
 370       return hb_blob_get_empty ();
 371     }
 372   }
 373 
 374   static const Type* lock_instance (hb_blob_t *blob) {
 375     hb_blob_make_immutable (blob);
 376     const char *base = hb_blob_get_data (blob, NULL);
 377     return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
 378   }
 379 };
 380 
 381 
 382 
 383 /*
 384  * Serialize
 385  */
 386 
 387 #ifndef HB_DEBUG_SERIALIZE
 388 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
 389 #endif
 390 
 391 
 392 #define TRACE_SERIALIZE(this) \
 393         hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
 394         (&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
 395          "");
 396 
 397 
 398 struct hb_serialize_context_t
 399 {
 400   inline hb_serialize_context_t (void *start_, unsigned int size)
 401   {
 402     this->start = (char *) start_;
 403     this->end = this->start + size;
 404 
 405     this->ran_out_of_room = false;
 406     this->head = this->start;
 407     this->debug_depth = 0;
 408   }
 409 
 410   template <typename Type>
 411   inline Type *start_serialize (void)
 412   {
 413     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
 414                      "start [%p..%p] (%lu bytes)",
 415                      this->start, this->end,
 416                      (unsigned long) (this->end - this->start));


 427                      this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
 428 
 429   }
 430 
 431   template <typename Type>
 432   inline Type *copy (void)
 433   {
 434     assert (!this->ran_out_of_room);
 435     unsigned int len = this->head - this->start;
 436     void *p = malloc (len);
 437     if (p)
 438       memcpy (p, this->start, len);
 439     return reinterpret_cast<Type *> (p);
 440   }
 441 
 442   template <typename Type>
 443   inline Type *allocate_size (unsigned int size)
 444   {
 445     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
 446       this->ran_out_of_room = true;
 447       return NULL;
 448     }
 449     memset (this->head, 0, size);
 450     char *ret = this->head;
 451     this->head += size;
 452     return reinterpret_cast<Type *> (ret);
 453   }
 454 
 455   template <typename Type>
 456   inline Type *allocate_min (void)
 457   {
 458     return this->allocate_size<Type> (Type::min_size);
 459   }
 460 
 461   template <typename Type>
 462   inline Type *start_embed (void)
 463   {
 464     Type *ret = reinterpret_cast<Type *> (this->head);
 465     return ret;
 466   }
 467 
 468   template <typename Type>
 469   inline Type *embed (const Type &obj)
 470   {
 471     unsigned int size = obj.get_size ();
 472     Type *ret = this->allocate_size<Type> (size);
 473     if (unlikely (!ret)) return NULL;
 474     memcpy (ret, obj, size);
 475     return ret;
 476   }
 477 
 478   template <typename Type>
 479   inline Type *extend_min (Type &obj)
 480   {
 481     unsigned int size = obj.min_size;
 482     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
 483     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
 484     return reinterpret_cast<Type *> (&obj);
 485   }
 486 
 487   template <typename Type>
 488   inline Type *extend (Type &obj)
 489   {
 490     unsigned int size = obj.get_size ();
 491     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
 492     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
 493     return reinterpret_cast<Type *> (&obj);
 494   }
 495 
 496   inline void truncate (void *new_head)
 497   {
 498     assert (this->start < new_head && new_head <= this->head);
 499     this->head = (char *) new_head;
 500   }
 501 
 502   unsigned int debug_depth;
 503   char *start, *end, *head;
 504   bool ran_out_of_room;
 505 };
 506 
 507 template <typename Type>
 508 struct Supplier
 509 {
 510   inline Supplier (const Type *array, unsigned int len_)
 511   {
 512     head = array;


 614   }
 615   inline operator Type (void) const
 616   {
 617     return (v[0] << 24)
 618          + (v[1] << 16)
 619          + (v[2] <<  8)
 620          + (v[3]      );
 621   }
 622   private: uint8_t v[4];
 623 };
 624 
 625 /* Integer types in big-endian order and no alignment requirement */
 626 template <typename Type, unsigned int Size>
 627 struct IntType
 628 {
 629   inline void set (Type i) { v.set (i); }
 630   inline operator Type(void) const { return v; }
 631   inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
 632   inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
 633   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
 634   inline int cmp (Type a) const

 635   {
 636     Type b = v;
 637     if (sizeof (Type) < sizeof (int))
 638       return (int) a - (int) b;
 639     else
 640       return a < b ? -1 : a == b ? 0 : +1;
 641   }
 642   inline bool sanitize (hb_sanitize_context_t *c) const
 643   {
 644     TRACE_SANITIZE (this);
 645     return_trace (likely (c->check_struct (this)));
 646   }
 647   protected:
 648   BEInt<Type, Size> v;
 649   public:
 650   DEFINE_SIZE_STATIC (Size);
 651 };
 652 
 653 typedef IntType<int8_t  , 1> CHAR;      /* 8-bit signed integer. */
 654 typedef IntType<uint8_t , 1> BYTE;      /* 8-bit unsigned integer. */
 655 typedef IntType<int8_t  , 1> INT8;      /* 8-bit signed integer. */
 656 typedef IntType<uint16_t, 2> USHORT;    /* 16-bit unsigned integer. */
 657 typedef IntType<int16_t,  2> SHORT;     /* 16-bit signed integer. */
 658 typedef IntType<uint32_t, 4> ULONG;     /* 32-bit unsigned integer. */
 659 typedef IntType<int32_t,  4> LONG;      /* 32-bit signed integer. */
 660 typedef IntType<uint32_t, 3> UINT24;    /* 24-bit unsigned integer. */
 661 
 662 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
 663 typedef SHORT FWORD;
 664 
 665 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
 666 typedef USHORT UFWORD;
 667 
 668 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
 669 struct F2DOT14 : SHORT
 670 {
 671   //inline float to_float (void) const { return ???; }
 672   //inline void set_float (float f) { v.set (f * ???); }
 673   public:
 674   DEFINE_SIZE_STATIC (2);
 675 };


 695   protected:
 696   LONG major;
 697   ULONG minor;
 698   public:
 699   DEFINE_SIZE_STATIC (8);
 700 };
 701 
 702 /* Array of four uint8s (length = 32 bits) used to identify a script, language
 703  * system, feature, or baseline */
 704 struct Tag : ULONG
 705 {
 706   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
 707   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
 708   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
 709   public:
 710   DEFINE_SIZE_STATIC (4);
 711 };
 712 DEFINE_NULL_DATA (Tag, "    ");
 713 
 714 /* Glyph index number, same as uint16 (length = 16 bits) */
 715 struct GlyphID : USHORT {
 716   static inline int cmp (const GlyphID *a, const GlyphID *b) { return b->USHORT::cmp (*a); }
 717   inline int cmp (hb_codepoint_t a) const { return (int) a - (int) *this; }
 718 };
 719 
 720 /* Script/language-system/feature index */
 721 struct Index : USHORT {
 722   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
 723 };
 724 DEFINE_NULL_DATA (Index, "\xff\xff");
 725 
 726 /* Offset, Null offset = 0 */
 727 template <typename Type=USHORT>
 728 struct Offset : Type
 729 {
 730   inline bool is_null (void) const { return 0 == *this; }
 731   public:
 732   DEFINE_SIZE_STATIC (sizeof(Type));
 733 };
 734 
 735 
 736 /* CheckSum */
 737 struct CheckSum : ULONG
 738 {


 812     return_trace (likely (obj.sanitize (c)) || neuter (c));
 813   }
 814   template <typename T>
 815   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 816   {
 817     TRACE_SANITIZE (this);
 818     if (unlikely (!c->check_struct (this))) return_trace (false);
 819     unsigned int offset = *this;
 820     if (unlikely (!offset)) return_trace (true);
 821     if (unlikely (!c->check_range (base, offset))) return_trace (false);
 822     const Type &obj = StructAtOffset<Type> (base, offset);
 823     return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
 824   }
 825 
 826   /* Set the offset to Null */
 827   inline bool neuter (hb_sanitize_context_t *c) const {
 828     return c->try_set (this, 0);
 829   }
 830   DEFINE_SIZE_STATIC (sizeof(OffsetType));
 831 };

 832 template <typename Base, typename OffsetType, typename Type>
 833 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
 834 template <typename Base, typename OffsetType, typename Type>
 835 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
 836 
 837 
 838 /*
 839  * Array Types
 840  */
 841 
 842 /* An array with a number of elements. */
 843 template <typename Type, typename LenType=USHORT>
 844 struct ArrayOf
 845 {
 846   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
 847   {
 848     unsigned int count = len;
 849     if (unlikely (start_offset > count))
 850       count = 0;
 851     else


 924     for (unsigned int i = 0; i < count; i++)
 925       if (unlikely (!array[i].sanitize (c, base, user_data)))
 926         return_trace (false);
 927     return_trace (true);
 928   }
 929 
 930   template <typename SearchType>
 931   inline int lsearch (const SearchType &x) const
 932   {
 933     unsigned int count = len;
 934     for (unsigned int i = 0; i < count; i++)
 935       if (!this->array[i].cmp (x))
 936         return i;
 937     return -1;
 938   }
 939 
 940   private:
 941   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
 942   {
 943     TRACE_SANITIZE (this);
 944     return_trace (c->check_struct (this) && c->check_array (array, Type::static_size, len));
 945   }
 946 
 947   public:
 948   LenType len;
 949   Type array[VAR];
 950   public:
 951   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
 952 };

 953 
 954 /* Array of Offset's */
 955 template <typename Type, typename OffsetType=USHORT>
 956 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, OffsetType> > {};
 957 
 958 /* Array of offsets relative to the beginning of the array itself. */
 959 template <typename Type>
 960 struct OffsetListOf : OffsetArrayOf<Type>
 961 {
 962   inline const Type& operator [] (unsigned int i) const
 963   {
 964     if (unlikely (i >= this->len)) return Null(Type);
 965     return this+this->array[i];
 966   }
 967 
 968   inline bool sanitize (hb_sanitize_context_t *c) const
 969   {
 970     TRACE_SANITIZE (this);
 971     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
 972   }


 989     return array[i-1];
 990   }
 991   inline unsigned int get_size (void) const
 992   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
 993 
 994   inline bool serialize (hb_serialize_context_t *c,
 995                          Supplier<Type> &items,
 996                          unsigned int items_len)
 997   {
 998     TRACE_SERIALIZE (this);
 999     if (unlikely (!c->extend_min (*this))) return_trace (false);
1000     len.set (items_len); /* TODO(serialize) Overflow? */
1001     if (unlikely (!items_len)) return_trace (true);
1002     if (unlikely (!c->extend (*this))) return_trace (false);
1003     for (unsigned int i = 0; i < items_len - 1; i++)
1004       array[i] = items[i];
1005     items.advance (items_len - 1);
1006     return_trace (true);
1007   }
1008 
1009   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1010   {
1011     return c->check_struct (this)
1012         && c->check_array (this, Type::static_size, len);
1013   }
1014 
1015   inline bool sanitize (hb_sanitize_context_t *c) const
1016   {
1017     TRACE_SANITIZE (this);
1018     if (unlikely (!sanitize_shallow (c))) return_trace (false);
1019 
1020     /* Note: for structs that do not reference other structs,
1021      * we do not need to call their sanitize() as we already did
1022      * a bound check on the aggregate array size.  We just include
1023      * a small unreachable expression to make sure the structs
1024      * pointed to do have a simple sanitize(), ie. they do not
1025      * reference other structs via offsets.
1026      */
1027     (void) (false && array[0].sanitize (c));
1028 
1029     return_trace (true);
1030   }
1031 









1032   LenType len;
1033   Type array[VAR];
1034   public:
1035   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
1036 };
1037 
1038 
1039 /* An array with sorted elements.  Supports binary searching. */


1040 template <typename Type, typename LenType=USHORT>
1041 struct SortedArrayOf : ArrayOf<Type, LenType>
1042 {
1043   template <typename SearchType>
1044   inline int bsearch (const SearchType &x) const
1045   {
1046     /* Hand-coded bsearch here since this is in the hot inner loop. */

1047     int min = 0, max = (int) this->len - 1;
1048     while (min <= max)
1049     {
1050       int mid = (min + max) / 2;
1051       int c = this->array[mid].cmp (x);
1052       if (c < 0)
1053         max = mid - 1;
1054       else if (c > 0)
1055         min = mid + 1;
1056       else
1057         return mid;
1058     }
1059     return -1;
1060   }





























































































































1061 };
1062 
1063 
1064 } /* namespace OT */
1065 
1066 
1067 #endif /* HB_OPEN_TYPE_PRIVATE_HH */


  13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17  * DAMAGE.
  18  *
  19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
  22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24  *
  25  * Red Hat Author(s): Behdad Esfahbod
  26  * Google Author(s): Behdad Esfahbod
  27  */
  28 
  29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
  30 #define HB_OPEN_TYPE_PRIVATE_HH
  31 
  32 #include "hb-private.hh"
  33 #include "hb-debug.hh"
  34 #include "hb-face-private.hh"
  35 
  36 
  37 namespace OT {
  38 
  39 
  40 
  41 /*
  42  * Casts
  43  */
  44 
  45 /* Cast to struct T, reference to reference */
  46 template<typename Type, typename TObject>
  47 static inline const Type& CastR(const TObject &X)
  48 { return reinterpret_cast<const Type&> (X); }
  49 template<typename Type, typename TObject>
  50 static inline Type& CastR(TObject &X)
  51 { return reinterpret_cast<Type&> (X); }
  52 
  53 /* Cast to struct T, pointer to pointer */
  54 template<typename Type, typename TObject>


  69 
  70 /* StructAfter<T>(X) returns the struct T& that is placed after X.
  71  * Works with X of variable size also.  X must implement get_size() */
  72 template<typename Type, typename TObject>
  73 static inline const Type& StructAfter(const TObject &X)
  74 { return StructAtOffset<Type>(&X, X.get_size()); }
  75 template<typename Type, typename TObject>
  76 static inline Type& StructAfter(TObject &X)
  77 { return StructAtOffset<Type>(&X, X.get_size()); }
  78 
  79 
  80 
  81 /*
  82  * Size checking
  83  */
  84 
  85 /* Check _assertion in a method environment */
  86 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
  87   inline void _instance_assertion_on_line_##_line (void) const \
  88   { \
  89     static_assert ((_assertion), ""); \
  90     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
  91   }
  92 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
  93 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
  94 
  95 /* Check that _code compiles in a method environment */
  96 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
  97   inline void _compiles_assertion_on_line_##_line (void) const \
  98   { _code; }
  99 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
 100 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
 101 
 102 
 103 #define DEFINE_SIZE_STATIC(size) \
 104   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
 105   static const unsigned int static_size = (size); \
 106   static const unsigned int min_size = (size); \
 107   inline unsigned int get_size (void) const { return (size); }
 108 
 109 #define DEFINE_SIZE_UNION(size, _member) \


 114   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
 115   static const unsigned int min_size = (size)
 116 
 117 #define DEFINE_SIZE_ARRAY(size, array) \
 118   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
 119   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
 120   static const unsigned int min_size = (size)
 121 
 122 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
 123   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
 124   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
 125   static const unsigned int min_size = (size)
 126 
 127 
 128 
 129 /*
 130  * Null objects
 131  */
 132 
 133 /* Global nul-content Null pool.  Enlarge as necessary. */
 134 
 135 #define HB_NULL_POOL_SIZE 264
 136 static_assert (HB_NULL_POOL_SIZE % sizeof (void *) == 0, "Align HB_NULL_POOL_SIZE.");
 137 extern HB_INTERNAL const void * const _hb_NullPool[HB_NULL_POOL_SIZE / sizeof (void *)];
 138 
 139 /* Generic nul-content Null objects. */
 140 template <typename Type>
 141 static inline const Type& Null (void) {
 142   static_assert (sizeof (Type) <= HB_NULL_POOL_SIZE, "Increase HB_NULL_POOL_SIZE.");
 143   return *CastP<Type> (_hb_NullPool);
 144 }
 145 
 146 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
 147 #define DEFINE_NULL_DATA(Type, data) \
 148 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
 149 template <> \
 150 /*static*/ inline const Type& Null<Type> (void) { \
 151   return *CastP<Type> (_Null##Type); \
 152 } /* The following line really exists such that we end in a place needing semicolon */ \
 153 static_assert (Type::min_size + 1 <= sizeof (_Null##Type), "Null pool too small.  Enlarge.")
 154 
 155 /* Accessor macro. */
 156 #define Null(Type) Null<Type>()
 157 
 158 
 159 /*
 160  * Dispatch
 161  */
 162 
 163 template <typename Context, typename Return, unsigned int MaxDebugDepth>
 164 struct hb_dispatch_context_t
 165 {
 166   static const unsigned int max_debug_depth = MaxDebugDepth;
 167   typedef Return return_t;
 168   template <typename T, typename F>
 169   inline bool may_dispatch (const T *obj, const F *format) { return true; }
 170   static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
 171 };
 172 
 173 
 174 /*
 175  * Sanitize
 176  */
 177 










 178 /* This limits sanitizing time on really broken fonts. */
 179 #ifndef HB_SANITIZE_MAX_EDITS
 180 #define HB_SANITIZE_MAX_EDITS 32
 181 #endif
 182 
 183 struct hb_sanitize_context_t :
 184        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
 185 {
 186   inline hb_sanitize_context_t (void) :
 187         debug_depth (0),
 188         start (nullptr), end (nullptr),
 189         writable (false), edit_count (0),
 190         blob (nullptr) {}
 191 
 192   inline const char *get_name (void) { return "SANITIZE"; }
 193   template <typename T, typename F>
 194   inline bool may_dispatch (const T *obj, const F *format)
 195   { return format->sanitize (this); }
 196   template <typename T>
 197   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
 198   static return_t default_return_value (void) { return true; }
 199   static return_t no_dispatch_return_value (void) { return false; }
 200   bool stop_sublookup_iteration (const return_t r) const { return !r; }
 201 
 202   inline void init (hb_blob_t *b)
 203   {
 204     this->blob = hb_blob_reference (b);
 205     this->writable = false;
 206   }
 207 
 208   inline void start_processing (void)
 209   {
 210     this->start = hb_blob_get_data (this->blob, nullptr);
 211     this->end = this->start + hb_blob_get_length (this->blob);
 212     assert (this->start <= this->end); /* Must not overflow. */
 213     this->edit_count = 0;
 214     this->debug_depth = 0;
 215 
 216     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
 217                      "start [%p..%p] (%lu bytes)",
 218                      this->start, this->end,
 219                      (unsigned long) (this->end - this->start));
 220   }
 221 
 222   inline void end_processing (void)
 223   {
 224     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
 225                      "end [%p..%p] %u edit requests",
 226                      this->start, this->end, this->edit_count);
 227 
 228     hb_blob_destroy (this->blob);
 229     this->blob = nullptr;
 230     this->start = this->end = nullptr;
 231   }
 232 
 233   inline bool check_range (const void *base, unsigned int len) const
 234   {
 235     const char *p = (const char *) base;
 236     bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
 237 
 238     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
 239        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
 240        p, p + len, len,
 241        this->start, this->end,
 242        ok ? "OK" : "OUT-OF-RANGE");
 243 
 244     return likely (ok);
 245   }
 246 
 247   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
 248   {
 249     const char *p = (const char *) base;
 250     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);


 325     }
 326 
 327     Type *t = CastP<Type> (const_cast<char *> (c->start));
 328 
 329     sane = t->sanitize (c);
 330     if (sane) {
 331       if (c->edit_count) {
 332         DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
 333 
 334         /* sanitize again to ensure no toe-stepping */
 335         c->edit_count = 0;
 336         sane = t->sanitize (c);
 337         if (c->edit_count) {
 338           DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
 339           sane = false;
 340         }
 341       }
 342     } else {
 343       unsigned int edit_count = c->edit_count;
 344       if (edit_count && !c->writable) {
 345         c->start = hb_blob_get_data_writable (blob, nullptr);
 346         c->end = c->start + hb_blob_get_length (blob);
 347 
 348         if (c->start) {
 349           c->writable = true;
 350           /* ok, we made it writable by relocating.  try again */
 351           DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
 352           goto retry;
 353         }
 354       }
 355     }
 356 
 357     c->end_processing ();
 358 
 359     DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
 360     if (sane)
 361       return blob;
 362     else {
 363       hb_blob_destroy (blob);
 364       return hb_blob_get_empty ();
 365     }
 366   }
 367 
 368   static const Type* lock_instance (hb_blob_t *blob) {
 369     hb_blob_make_immutable (blob);
 370     const char *base = hb_blob_get_data (blob, nullptr);
 371     return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
 372   }
 373 };
 374 
 375 
 376 
 377 /*
 378  * Serialize
 379  */
 380 










 381 
 382 struct hb_serialize_context_t
 383 {
 384   inline hb_serialize_context_t (void *start_, unsigned int size)
 385   {
 386     this->start = (char *) start_;
 387     this->end = this->start + size;
 388 
 389     this->ran_out_of_room = false;
 390     this->head = this->start;
 391     this->debug_depth = 0;
 392   }
 393 
 394   template <typename Type>
 395   inline Type *start_serialize (void)
 396   {
 397     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
 398                      "start [%p..%p] (%lu bytes)",
 399                      this->start, this->end,
 400                      (unsigned long) (this->end - this->start));


 411                      this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
 412 
 413   }
 414 
 415   template <typename Type>
 416   inline Type *copy (void)
 417   {
 418     assert (!this->ran_out_of_room);
 419     unsigned int len = this->head - this->start;
 420     void *p = malloc (len);
 421     if (p)
 422       memcpy (p, this->start, len);
 423     return reinterpret_cast<Type *> (p);
 424   }
 425 
 426   template <typename Type>
 427   inline Type *allocate_size (unsigned int size)
 428   {
 429     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
 430       this->ran_out_of_room = true;
 431       return nullptr;
 432     }
 433     memset (this->head, 0, size);
 434     char *ret = this->head;
 435     this->head += size;
 436     return reinterpret_cast<Type *> (ret);
 437   }
 438 
 439   template <typename Type>
 440   inline Type *allocate_min (void)
 441   {
 442     return this->allocate_size<Type> (Type::min_size);
 443   }
 444 
 445   template <typename Type>
 446   inline Type *start_embed (void)
 447   {
 448     Type *ret = reinterpret_cast<Type *> (this->head);
 449     return ret;
 450   }
 451 
 452   template <typename Type>
 453   inline Type *embed (const Type &obj)
 454   {
 455     unsigned int size = obj.get_size ();
 456     Type *ret = this->allocate_size<Type> (size);
 457     if (unlikely (!ret)) return nullptr;
 458     memcpy (ret, obj, size);
 459     return ret;
 460   }
 461 
 462   template <typename Type>
 463   inline Type *extend_min (Type &obj)
 464   {
 465     unsigned int size = obj.min_size;
 466     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
 467     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
 468     return reinterpret_cast<Type *> (&obj);
 469   }
 470 
 471   template <typename Type>
 472   inline Type *extend (Type &obj)
 473   {
 474     unsigned int size = obj.get_size ();
 475     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
 476     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
 477     return reinterpret_cast<Type *> (&obj);
 478   }
 479 
 480   inline void truncate (void *new_head)
 481   {
 482     assert (this->start < new_head && new_head <= this->head);
 483     this->head = (char *) new_head;
 484   }
 485 
 486   unsigned int debug_depth;
 487   char *start, *end, *head;
 488   bool ran_out_of_room;
 489 };
 490 
 491 template <typename Type>
 492 struct Supplier
 493 {
 494   inline Supplier (const Type *array, unsigned int len_)
 495   {
 496     head = array;


 598   }
 599   inline operator Type (void) const
 600   {
 601     return (v[0] << 24)
 602          + (v[1] << 16)
 603          + (v[2] <<  8)
 604          + (v[3]      );
 605   }
 606   private: uint8_t v[4];
 607 };
 608 
 609 /* Integer types in big-endian order and no alignment requirement */
 610 template <typename Type, unsigned int Size>
 611 struct IntType
 612 {
 613   inline void set (Type i) { v.set (i); }
 614   inline operator Type(void) const { return v; }
 615   inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
 616   inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
 617   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
 618   template <typename Type2>
 619   inline int cmp (Type2 a) const
 620   {
 621     Type b = v;
 622     if (sizeof (Type) < sizeof (int) && sizeof (Type2) < sizeof (int))
 623       return (int) a - (int) b;
 624     else
 625       return a < b ? -1 : a == b ? 0 : +1;
 626   }
 627   inline bool sanitize (hb_sanitize_context_t *c) const
 628   {
 629     TRACE_SANITIZE (this);
 630     return_trace (likely (c->check_struct (this)));
 631   }
 632   protected:
 633   BEInt<Type, Size> v;
 634   public:
 635   DEFINE_SIZE_STATIC (Size);
 636 };
 637 
 638 typedef IntType<int8_t,   1> CHAR;      /* 8-bit signed integer. */
 639 typedef IntType<uint8_t,  1> BYTE;      /* 8-bit unsigned integer. */
 640 typedef IntType<int8_t,   1> INT8;      /* 8-bit signed integer. */
 641 typedef IntType<uint16_t, 2> USHORT;    /* 16-bit unsigned integer. */
 642 typedef IntType<int16_t,  2> SHORT;     /* 16-bit signed integer. */
 643 typedef IntType<uint32_t, 4> ULONG;     /* 32-bit unsigned integer. */
 644 typedef IntType<int32_t,  4> LONG;      /* 32-bit signed integer. */
 645 typedef IntType<uint32_t, 3> UINT24;    /* 24-bit unsigned integer. */
 646 
 647 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
 648 typedef SHORT FWORD;
 649 
 650 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
 651 typedef USHORT UFWORD;
 652 
 653 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
 654 struct F2DOT14 : SHORT
 655 {
 656   //inline float to_float (void) const { return ???; }
 657   //inline void set_float (float f) { v.set (f * ???); }
 658   public:
 659   DEFINE_SIZE_STATIC (2);
 660 };


 680   protected:
 681   LONG major;
 682   ULONG minor;
 683   public:
 684   DEFINE_SIZE_STATIC (8);
 685 };
 686 
 687 /* Array of four uint8s (length = 32 bits) used to identify a script, language
 688  * system, feature, or baseline */
 689 struct Tag : ULONG
 690 {
 691   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
 692   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
 693   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
 694   public:
 695   DEFINE_SIZE_STATIC (4);
 696 };
 697 DEFINE_NULL_DATA (Tag, "    ");
 698 
 699 /* Glyph index number, same as uint16 (length = 16 bits) */
 700 typedef USHORT GlyphID;



 701 
 702 /* Script/language-system/feature index */
 703 struct Index : USHORT {
 704   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
 705 };
 706 DEFINE_NULL_DATA (Index, "\xff\xff");
 707 
 708 /* Offset, Null offset = 0 */
 709 template <typename Type=USHORT>
 710 struct Offset : Type
 711 {
 712   inline bool is_null (void) const { return 0 == *this; }
 713   public:
 714   DEFINE_SIZE_STATIC (sizeof(Type));
 715 };
 716 
 717 
 718 /* CheckSum */
 719 struct CheckSum : ULONG
 720 {


 794     return_trace (likely (obj.sanitize (c)) || neuter (c));
 795   }
 796   template <typename T>
 797   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 798   {
 799     TRACE_SANITIZE (this);
 800     if (unlikely (!c->check_struct (this))) return_trace (false);
 801     unsigned int offset = *this;
 802     if (unlikely (!offset)) return_trace (true);
 803     if (unlikely (!c->check_range (base, offset))) return_trace (false);
 804     const Type &obj = StructAtOffset<Type> (base, offset);
 805     return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
 806   }
 807 
 808   /* Set the offset to Null */
 809   inline bool neuter (hb_sanitize_context_t *c) const {
 810     return c->try_set (this, 0);
 811   }
 812   DEFINE_SIZE_STATIC (sizeof(OffsetType));
 813 };
 814 template <typename Type> struct LOffsetTo : OffsetTo<Type, ULONG> {};
 815 template <typename Base, typename OffsetType, typename Type>
 816 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
 817 template <typename Base, typename OffsetType, typename Type>
 818 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
 819 
 820 
 821 /*
 822  * Array Types
 823  */
 824 
 825 /* An array with a number of elements. */
 826 template <typename Type, typename LenType=USHORT>
 827 struct ArrayOf
 828 {
 829   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
 830   {
 831     unsigned int count = len;
 832     if (unlikely (start_offset > count))
 833       count = 0;
 834     else


 907     for (unsigned int i = 0; i < count; i++)
 908       if (unlikely (!array[i].sanitize (c, base, user_data)))
 909         return_trace (false);
 910     return_trace (true);
 911   }
 912 
 913   template <typename SearchType>
 914   inline int lsearch (const SearchType &x) const
 915   {
 916     unsigned int count = len;
 917     for (unsigned int i = 0; i < count; i++)
 918       if (!this->array[i].cmp (x))
 919         return i;
 920     return -1;
 921   }
 922 
 923   private:
 924   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
 925   {
 926     TRACE_SANITIZE (this);
 927     return_trace (len.sanitize (c) && c->check_array (array, Type::static_size, len));
 928   }
 929 
 930   public:
 931   LenType len;
 932   Type array[VAR];
 933   public:
 934   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
 935 };
 936 template <typename Type> struct LArrayOf : ArrayOf<Type, ULONG> {};
 937 
 938 /* Array of Offset's */
 939 template <typename Type, typename OffsetType=USHORT>
 940 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, OffsetType> > {};
 941 
 942 /* Array of offsets relative to the beginning of the array itself. */
 943 template <typename Type>
 944 struct OffsetListOf : OffsetArrayOf<Type>
 945 {
 946   inline const Type& operator [] (unsigned int i) const
 947   {
 948     if (unlikely (i >= this->len)) return Null(Type);
 949     return this+this->array[i];
 950   }
 951 
 952   inline bool sanitize (hb_sanitize_context_t *c) const
 953   {
 954     TRACE_SANITIZE (this);
 955     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
 956   }


 973     return array[i-1];
 974   }
 975   inline unsigned int get_size (void) const
 976   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
 977 
 978   inline bool serialize (hb_serialize_context_t *c,
 979                          Supplier<Type> &items,
 980                          unsigned int items_len)
 981   {
 982     TRACE_SERIALIZE (this);
 983     if (unlikely (!c->extend_min (*this))) return_trace (false);
 984     len.set (items_len); /* TODO(serialize) Overflow? */
 985     if (unlikely (!items_len)) return_trace (true);
 986     if (unlikely (!c->extend (*this))) return_trace (false);
 987     for (unsigned int i = 0; i < items_len - 1; i++)
 988       array[i] = items[i];
 989     items.advance (items_len - 1);
 990     return_trace (true);
 991   }
 992 






 993   inline bool sanitize (hb_sanitize_context_t *c) const
 994   {
 995     TRACE_SANITIZE (this);
 996     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 997 
 998     /* Note: for structs that do not reference other structs,
 999      * we do not need to call their sanitize() as we already did
1000      * a bound check on the aggregate array size.  We just include
1001      * a small unreachable expression to make sure the structs
1002      * pointed to do have a simple sanitize(), ie. they do not
1003      * reference other structs via offsets.
1004      */
1005     (void) (false && array[0].sanitize (c));
1006 
1007     return_trace (true);
1008   }
1009 
1010   private:
1011   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1012   {
1013     TRACE_SANITIZE (this);
1014     return_trace (len.sanitize (c) &&
1015                   (!len || c->check_array (array, Type::static_size, len - 1)));
1016   }
1017 
1018   public:
1019   LenType len;
1020   Type array[VAR];
1021   public:
1022   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
1023 };
1024 
1025 
1026 /*
1027  * An array with sorted elements.  Supports binary searching.
1028  */
1029 template <typename Type, typename LenType=USHORT>
1030 struct SortedArrayOf : ArrayOf<Type, LenType>
1031 {
1032   template <typename SearchType>
1033   inline int bsearch (const SearchType &x) const
1034   {
1035     /* Hand-coded bsearch here since this is in the hot inner loop. */
1036     const Type *array = this->array;
1037     int min = 0, max = (int) this->len - 1;
1038     while (min <= max)
1039     {
1040       int mid = (min + max) / 2;
1041       int c = array[mid].cmp (x);
1042       if (c < 0)
1043         max = mid - 1;
1044       else if (c > 0)
1045         min = mid + 1;
1046       else
1047         return mid;
1048     }
1049     return -1;
1050   }
1051 };
1052 
1053 /*
1054  * Binary-search arrays
1055  */
1056 
1057 struct BinSearchHeader
1058 {
1059   inline operator uint32_t (void) const { return len; }
1060 
1061   inline bool sanitize (hb_sanitize_context_t *c) const
1062   {
1063     TRACE_SANITIZE (this);
1064     return_trace (c->check_struct (this));
1065   }
1066 
1067   protected:
1068   USHORT        len;
1069   USHORT        searchRangeZ;
1070   USHORT        entrySelectorZ;
1071   USHORT        rangeShiftZ;
1072 
1073   public:
1074   DEFINE_SIZE_STATIC (8);
1075 };
1076 
1077 template <typename Type>
1078 struct BinSearchArrayOf : SortedArrayOf<Type, BinSearchHeader> {};
1079 
1080 
1081 /* Lazy struct and blob loaders. */
1082 
1083 /* Logic is shared between hb_lazy_loader_t and hb_lazy_table_loader_t */
1084 template <typename T>
1085 struct hb_lazy_loader_t
1086 {
1087   inline void init (hb_face_t *face_)
1088   {
1089     face = face_;
1090     instance = nullptr;
1091   }
1092 
1093   inline void fini (void)
1094   {
1095     if (instance && instance != &OT::Null(T))
1096     {
1097       instance->fini();
1098       free (instance);
1099     }
1100   }
1101 
1102   inline const T* get (void) const
1103   {
1104   retry:
1105     T *p = (T *) hb_atomic_ptr_get (&instance);
1106     if (unlikely (!p))
1107     {
1108       p = (T *) calloc (1, sizeof (T));
1109       if (unlikely (!p))
1110         p = const_cast<T *> (&OT::Null(T));
1111       else
1112         p->init (face);
1113       if (unlikely (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), nullptr, p)))
1114       {
1115         if (p != &OT::Null(T))
1116           p->fini ();
1117         goto retry;
1118       }
1119     }
1120     return p;
1121   }
1122 
1123   inline const T* operator-> (void) const
1124   {
1125     return get ();
1126   }
1127 
1128   private:
1129   hb_face_t *face;
1130   T *instance;
1131 };
1132 
1133 /* Logic is shared between hb_lazy_loader_t and hb_lazy_table_loader_t */
1134 template <typename T>
1135 struct hb_lazy_table_loader_t
1136 {
1137   inline void init (hb_face_t *face_)
1138   {
1139     face = face_;
1140     instance = nullptr;
1141     blob = nullptr;
1142   }
1143 
1144   inline void fini (void)
1145   {
1146     hb_blob_destroy (blob);
1147   }
1148 
1149   inline const T* get (void) const
1150   {
1151   retry:
1152     T *p = (T *) hb_atomic_ptr_get (&instance);
1153     if (unlikely (!p))
1154     {
1155       hb_blob_t *blob_ = OT::Sanitizer<T>::sanitize (face->reference_table (T::tableTag));
1156       p = const_cast<T *>(OT::Sanitizer<T>::lock_instance (blob_));
1157       if (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), nullptr, p))
1158       {
1159         hb_blob_destroy (blob_);
1160         goto retry;
1161       }
1162       blob = blob_;
1163     }
1164     return p;
1165   }
1166 
1167   inline const T* operator-> (void) const
1168   {
1169     return get();
1170   }
1171 
1172   private:
1173   hb_face_t *face;
1174   T *instance;
1175   mutable hb_blob_t *blob;
1176 };
1177 
1178 
1179 } /* namespace OT */
1180 
1181 
1182 #endif /* HB_OPEN_TYPE_PRIVATE_HH */
< prev index next >