1 /*
   2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
   3  * Copyright © 2012  Google, Inc.
   4  *
   5  *  This is part of HarfBuzz, a text shaping library.
   6  *
   7  * Permission is hereby granted, without written agreement and without
   8  * license or royalty fees, to use, copy, modify, and distribute this
   9  * software and its documentation for any purpose, provided that the
  10  * above copyright notice and the following two paragraphs appear in
  11  * all copies of this software.
  12  *
  13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17  * DAMAGE.
  18  *
  19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
  22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24  *
  25  * Red Hat Author(s): Behdad Esfahbod
  26  * Google Author(s): Behdad Esfahbod
  27  */
  28 
  29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
  30 #define HB_OPEN_TYPE_PRIVATE_HH
  31 
  32 #include "hb-private.hh"
  33 
  34 
  35 namespace OT {
  36 
  37 
  38 
  39 /*
  40  * Casts
  41  */
  42 
  43 /* Cast to struct T, reference to reference */
  44 template<typename Type, typename TObject>
  45 static inline const Type& CastR(const TObject &X)
  46 { return reinterpret_cast<const Type&> (X); }
  47 template<typename Type, typename TObject>
  48 static inline Type& CastR(TObject &X)
  49 { return reinterpret_cast<Type&> (X); }
  50 
  51 /* Cast to struct T, pointer to pointer */
  52 template<typename Type, typename TObject>
  53 static inline const Type* CastP(const TObject *X)
  54 { return reinterpret_cast<const Type*> (X); }
  55 template<typename Type, typename TObject>
  56 static inline Type* CastP(TObject *X)
  57 { return reinterpret_cast<Type*> (X); }
  58 
  59 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
  60  * location pointed to by P plus Ofs bytes. */
  61 template<typename Type>
  62 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
  63 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
  64 template<typename Type>
  65 static inline Type& StructAtOffset(void *P, unsigned int offset)
  66 { return * reinterpret_cast<Type*> ((char *) P + offset); }
  67 
  68 /* StructAfter<T>(X) returns the struct T& that is placed after X.
  69  * Works with X of variable size also.  X must implement get_size() */
  70 template<typename Type, typename TObject>
  71 static inline const Type& StructAfter(const TObject &X)
  72 { return StructAtOffset<Type>(&X, X.get_size()); }
  73 template<typename Type, typename TObject>
  74 static inline Type& StructAfter(TObject &X)
  75 { return StructAtOffset<Type>(&X, X.get_size()); }
  76 
  77 
  78 
  79 /*
  80  * Size checking
  81  */
  82 
  83 /* Check _assertion in a method environment */
  84 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
  85   inline void _instance_assertion_on_line_##_line (void) const \
  86   { \
  87     ASSERT_STATIC (_assertion); \
  88     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
  89   }
  90 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
  91 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
  92 
  93 /* Check that _code compiles in a method environment */
  94 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
  95   inline void _compiles_assertion_on_line_##_line (void) const \
  96   { _code; }
  97 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
  98 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
  99 
 100 
 101 #define DEFINE_SIZE_STATIC(size) \
 102   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
 103   static const unsigned int static_size = (size); \
 104   static const unsigned int min_size = (size); \
 105   inline unsigned int get_size (void) const { return (size); }
 106 
 107 #define DEFINE_SIZE_UNION(size, _member) \
 108   DEFINE_INSTANCE_ASSERTION (0*sizeof(this->u._member.static_size) + sizeof(this->u._member) == (size)); \
 109   static const unsigned int min_size = (size)
 110 
 111 #define DEFINE_SIZE_MIN(size) \
 112   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
 113   static const unsigned int min_size = (size)
 114 
 115 #define DEFINE_SIZE_ARRAY(size, array) \
 116   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
 117   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
 118   static const unsigned int min_size = (size)
 119 
 120 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
 121   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
 122   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
 123   static const unsigned int min_size = (size)
 124 
 125 
 126 
 127 /*
 128  * Null objects
 129  */
 130 
 131 /* Global nul-content Null pool.  Enlarge as necessary. */
 132 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
 133 static const void *_NullPool[(256+8) / sizeof (void *)];
 134 
 135 /* Generic nul-content Null objects. */
 136 template <typename Type>
 137 static inline const Type& Null (void) {
 138   ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool));
 139   return *CastP<Type> (_NullPool);
 140 }
 141 
 142 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
 143 #define DEFINE_NULL_DATA(Type, data) \
 144 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
 145 template <> \
 146 /*static*/ inline const Type& Null<Type> (void) { \
 147   return *CastP<Type> (_Null##Type); \
 148 } /* The following line really exists such that we end in a place needing semicolon */ \
 149 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
 150 
 151 /* Accessor macro. */
 152 #define Null(Type) Null<Type>()
 153 
 154 
 155 /*
 156  * Dispatch
 157  */
 158 
 159 template <typename Context, typename Return, unsigned int MaxDebugDepth>
 160 struct hb_dispatch_context_t
 161 {
 162   static const unsigned int max_debug_depth = MaxDebugDepth;
 163   typedef Return return_t;
 164   template <typename T, typename F>
 165   inline bool may_dispatch (const T *obj, const F *format) { return true; }
 166   static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
 167 };
 168 
 169 
 170 /*
 171  * Sanitize
 172  */
 173 
 174 #ifndef HB_DEBUG_SANITIZE
 175 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
 176 #endif
 177 
 178 
 179 #define TRACE_SANITIZE(this) \
 180         hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
 181         (&c->debug_depth, c->get_name (), this, HB_FUNC, \
 182          "");
 183 
 184 /* This limits sanitizing time on really broken fonts. */
 185 #ifndef HB_SANITIZE_MAX_EDITS
 186 #define HB_SANITIZE_MAX_EDITS 32
 187 #endif
 188 
 189 struct hb_sanitize_context_t :
 190        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
 191 {
 192   inline hb_sanitize_context_t (void) :
 193         debug_depth (0),
 194         start (NULL), end (NULL),
 195         writable (false), edit_count (0),
 196         blob (NULL) {}
 197 
 198   inline const char *get_name (void) { return "SANITIZE"; }
 199   template <typename T, typename F>
 200   inline bool may_dispatch (const T *obj, const F *format)
 201   { return format->sanitize (this); }
 202   template <typename T>
 203   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
 204   static return_t default_return_value (void) { return true; }
 205   static return_t no_dispatch_return_value (void) { return false; }
 206   bool stop_sublookup_iteration (const return_t r) const { return !r; }
 207 
 208   inline void init (hb_blob_t *b)
 209   {
 210     this->blob = hb_blob_reference (b);
 211     this->writable = false;
 212   }
 213 
 214   inline void start_processing (void)
 215   {
 216     this->start = hb_blob_get_data (this->blob, NULL);
 217     this->end = this->start + hb_blob_get_length (this->blob);
 218     assert (this->start <= this->end); /* Must not overflow. */
 219     this->edit_count = 0;
 220     this->debug_depth = 0;
 221 
 222     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
 223                      "start [%p..%p] (%lu bytes)",
 224                      this->start, this->end,
 225                      (unsigned long) (this->end - this->start));
 226   }
 227 
 228   inline void end_processing (void)
 229   {
 230     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
 231                      "end [%p..%p] %u edit requests",
 232                      this->start, this->end, this->edit_count);
 233 
 234     hb_blob_destroy (this->blob);
 235     this->blob = NULL;
 236     this->start = this->end = NULL;
 237   }
 238 
 239   inline bool check_range (const void *base, unsigned int len) const
 240   {
 241     const char *p = (const char *) base;
 242     bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
 243 
 244     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
 245        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
 246        p, p + len, len,
 247        this->start, this->end,
 248        ok ? "OK" : "OUT-OF-RANGE");
 249 
 250     return likely (ok);
 251   }
 252 
 253   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
 254   {
 255     const char *p = (const char *) base;
 256     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
 257     unsigned int array_size = record_size * len;
 258     bool ok = !overflows && this->check_range (base, array_size);
 259 
 260     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
 261        "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
 262        p, p + (record_size * len), record_size, len, (unsigned int) array_size,
 263        this->start, this->end,
 264        overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
 265 
 266     return likely (ok);
 267   }
 268 
 269   template <typename Type>
 270   inline bool check_struct (const Type *obj) const
 271   {
 272     return likely (this->check_range (obj, obj->min_size));
 273   }
 274 
 275   inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
 276   {
 277     if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
 278       return false;
 279 
 280     const char *p = (const char *) base;
 281     this->edit_count++;
 282 
 283     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
 284        "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
 285        this->edit_count,
 286        p, p + len, len,
 287        this->start, this->end,
 288        this->writable ? "GRANTED" : "DENIED");
 289 
 290     return this->writable;
 291   }
 292 
 293   template <typename Type, typename ValueType>
 294   inline bool try_set (const Type *obj, const ValueType &v) {
 295     if (this->may_edit (obj, obj->static_size)) {
 296       const_cast<Type *> (obj)->set (v);
 297       return true;
 298     }
 299     return false;
 300   }
 301 
 302   mutable unsigned int debug_depth;
 303   const char *start, *end;
 304   bool writable;
 305   unsigned int edit_count;
 306   hb_blob_t *blob;
 307 };
 308 
 309 
 310 
 311 /* Template to sanitize an object. */
 312 template <typename Type>
 313 struct Sanitizer
 314 {
 315   static hb_blob_t *sanitize (hb_blob_t *blob) {
 316     hb_sanitize_context_t c[1];
 317     bool sane;
 318 
 319     /* TODO is_sane() stuff */
 320 
 321     c->init (blob);
 322 
 323   retry:
 324     DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
 325 
 326     c->start_processing ();
 327 
 328     if (unlikely (!c->start)) {
 329       c->end_processing ();
 330       return blob;
 331     }
 332 
 333     Type *t = CastP<Type> (const_cast<char *> (c->start));
 334 
 335     sane = t->sanitize (c);
 336     if (sane) {
 337       if (c->edit_count) {
 338         DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
 339 
 340         /* sanitize again to ensure no toe-stepping */
 341         c->edit_count = 0;
 342         sane = t->sanitize (c);
 343         if (c->edit_count) {
 344           DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
 345           sane = false;
 346         }
 347       }
 348     } else {
 349       unsigned int edit_count = c->edit_count;
 350       if (edit_count && !c->writable) {
 351         c->start = hb_blob_get_data_writable (blob, NULL);
 352         c->end = c->start + hb_blob_get_length (blob);
 353 
 354         if (c->start) {
 355           c->writable = true;
 356           /* ok, we made it writable by relocating.  try again */
 357           DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
 358           goto retry;
 359         }
 360       }
 361     }
 362 
 363     c->end_processing ();
 364 
 365     DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
 366     if (sane)
 367       return blob;
 368     else {
 369       hb_blob_destroy (blob);
 370       return hb_blob_get_empty ();
 371     }
 372   }
 373 
 374   static const Type* lock_instance (hb_blob_t *blob) {
 375     hb_blob_make_immutable (blob);
 376     const char *base = hb_blob_get_data (blob, NULL);
 377     return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
 378   }
 379 };
 380 
 381 
 382 
 383 /*
 384  * Serialize
 385  */
 386 
 387 #ifndef HB_DEBUG_SERIALIZE
 388 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
 389 #endif
 390 
 391 
 392 #define TRACE_SERIALIZE(this) \
 393         hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
 394         (&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
 395          "");
 396 
 397 
 398 struct hb_serialize_context_t
 399 {
 400   inline hb_serialize_context_t (void *start_, unsigned int size)
 401   {
 402     this->start = (char *) start_;
 403     this->end = this->start + size;
 404 
 405     this->ran_out_of_room = false;
 406     this->head = this->start;
 407     this->debug_depth = 0;
 408   }
 409 
 410   template <typename Type>
 411   inline Type *start_serialize (void)
 412   {
 413     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
 414                      "start [%p..%p] (%lu bytes)",
 415                      this->start, this->end,
 416                      (unsigned long) (this->end - this->start));
 417 
 418     return start_embed<Type> ();
 419   }
 420 
 421   inline void end_serialize (void)
 422   {
 423     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
 424                      "end [%p..%p] serialized %d bytes; %s",
 425                      this->start, this->end,
 426                      (int) (this->head - this->start),
 427                      this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
 428 
 429   }
 430 
 431   template <typename Type>
 432   inline Type *copy (void)
 433   {
 434     assert (!this->ran_out_of_room);
 435     unsigned int len = this->head - this->start;
 436     void *p = malloc (len);
 437     if (p)
 438       memcpy (p, this->start, len);
 439     return reinterpret_cast<Type *> (p);
 440   }
 441 
 442   template <typename Type>
 443   inline Type *allocate_size (unsigned int size)
 444   {
 445     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
 446       this->ran_out_of_room = true;
 447       return NULL;
 448     }
 449     memset (this->head, 0, size);
 450     char *ret = this->head;
 451     this->head += size;
 452     return reinterpret_cast<Type *> (ret);
 453   }
 454 
 455   template <typename Type>
 456   inline Type *allocate_min (void)
 457   {
 458     return this->allocate_size<Type> (Type::min_size);
 459   }
 460 
 461   template <typename Type>
 462   inline Type *start_embed (void)
 463   {
 464     Type *ret = reinterpret_cast<Type *> (this->head);
 465     return ret;
 466   }
 467 
 468   template <typename Type>
 469   inline Type *embed (const Type &obj)
 470   {
 471     unsigned int size = obj.get_size ();
 472     Type *ret = this->allocate_size<Type> (size);
 473     if (unlikely (!ret)) return NULL;
 474     memcpy (ret, obj, size);
 475     return ret;
 476   }
 477 
 478   template <typename Type>
 479   inline Type *extend_min (Type &obj)
 480   {
 481     unsigned int size = obj.min_size;
 482     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
 483     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
 484     return reinterpret_cast<Type *> (&obj);
 485   }
 486 
 487   template <typename Type>
 488   inline Type *extend (Type &obj)
 489   {
 490     unsigned int size = obj.get_size ();
 491     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
 492     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
 493     return reinterpret_cast<Type *> (&obj);
 494   }
 495 
 496   inline void truncate (void *new_head)
 497   {
 498     assert (this->start < new_head && new_head <= this->head);
 499     this->head = (char *) new_head;
 500   }
 501 
 502   unsigned int debug_depth;
 503   char *start, *end, *head;
 504   bool ran_out_of_room;
 505 };
 506 
 507 template <typename Type>
 508 struct Supplier
 509 {
 510   inline Supplier (const Type *array, unsigned int len_)
 511   {
 512     head = array;
 513     len = len_;
 514   }
 515   inline const Type operator [] (unsigned int i) const
 516   {
 517     if (unlikely (i >= len)) return Type ();
 518     return head[i];
 519   }
 520 
 521   inline void advance (unsigned int count)
 522   {
 523     if (unlikely (count > len))
 524       count = len;
 525     len -= count;
 526     head += count;
 527   }
 528 
 529   private:
 530   inline Supplier (const Supplier<Type> &); /* Disallow copy */
 531   inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
 532 
 533   unsigned int len;
 534   const Type *head;
 535 };
 536 
 537 
 538 
 539 
 540 /*
 541  *
 542  * The OpenType Font File: Data Types
 543  */
 544 
 545 
 546 /* "The following data types are used in the OpenType font file.
 547  *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
 548 
 549 /*
 550  * Int types
 551  */
 552 
 553 
 554 template <typename Type, int Bytes> struct BEInt;
 555 
 556 template <typename Type>
 557 struct BEInt<Type, 1>
 558 {
 559   public:
 560   inline void set (Type V)
 561   {
 562     v = V;
 563   }
 564   inline operator Type (void) const
 565   {
 566     return v;
 567   }
 568   private: uint8_t v;
 569 };
 570 template <typename Type>
 571 struct BEInt<Type, 2>
 572 {
 573   public:
 574   inline void set (Type V)
 575   {
 576     v[0] = (V >>  8) & 0xFF;
 577     v[1] = (V      ) & 0xFF;
 578   }
 579   inline operator Type (void) const
 580   {
 581     return (v[0] <<  8)
 582          + (v[1]      );
 583   }
 584   private: uint8_t v[2];
 585 };
 586 template <typename Type>
 587 struct BEInt<Type, 3>
 588 {
 589   public:
 590   inline void set (Type V)
 591   {
 592     v[0] = (V >> 16) & 0xFF;
 593     v[1] = (V >>  8) & 0xFF;
 594     v[2] = (V      ) & 0xFF;
 595   }
 596   inline operator Type (void) const
 597   {
 598     return (v[0] << 16)
 599          + (v[1] <<  8)
 600          + (v[2]      );
 601   }
 602   private: uint8_t v[3];
 603 };
 604 template <typename Type>
 605 struct BEInt<Type, 4>
 606 {
 607   public:
 608   inline void set (Type V)
 609   {
 610     v[0] = (V >> 24) & 0xFF;
 611     v[1] = (V >> 16) & 0xFF;
 612     v[2] = (V >>  8) & 0xFF;
 613     v[3] = (V      ) & 0xFF;
 614   }
 615   inline operator Type (void) const
 616   {
 617     return (v[0] << 24)
 618          + (v[1] << 16)
 619          + (v[2] <<  8)
 620          + (v[3]      );
 621   }
 622   private: uint8_t v[4];
 623 };
 624 
 625 /* Integer types in big-endian order and no alignment requirement */
 626 template <typename Type, unsigned int Size>
 627 struct IntType
 628 {
 629   inline void set (Type i) { v.set (i); }
 630   inline operator Type(void) const { return v; }
 631   inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
 632   inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
 633   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
 634   inline int cmp (Type a) const
 635   {
 636     Type b = v;
 637     if (sizeof (Type) < sizeof (int))
 638       return (int) a - (int) b;
 639     else
 640       return a < b ? -1 : a == b ? 0 : +1;
 641   }
 642   inline bool sanitize (hb_sanitize_context_t *c) const
 643   {
 644     TRACE_SANITIZE (this);
 645     return_trace (likely (c->check_struct (this)));
 646   }
 647   protected:
 648   BEInt<Type, Size> v;
 649   public:
 650   DEFINE_SIZE_STATIC (Size);
 651 };
 652 
 653 typedef IntType<int8_t  , 1> CHAR;      /* 8-bit signed integer. */
 654 typedef IntType<uint8_t , 1> BYTE;      /* 8-bit unsigned integer. */
 655 typedef IntType<int8_t  , 1> INT8;      /* 8-bit signed integer. */
 656 typedef IntType<uint16_t, 2> USHORT;    /* 16-bit unsigned integer. */
 657 typedef IntType<int16_t,  2> SHORT;     /* 16-bit signed integer. */
 658 typedef IntType<uint32_t, 4> ULONG;     /* 32-bit unsigned integer. */
 659 typedef IntType<int32_t,  4> LONG;      /* 32-bit signed integer. */
 660 typedef IntType<uint32_t, 3> UINT24;    /* 24-bit unsigned integer. */
 661 
 662 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
 663 typedef SHORT FWORD;
 664 
 665 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
 666 typedef USHORT UFWORD;
 667 
 668 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
 669 struct F2DOT14 : SHORT
 670 {
 671   //inline float to_float (void) const { return ???; }
 672   //inline void set_float (float f) { v.set (f * ???); }
 673   public:
 674   DEFINE_SIZE_STATIC (2);
 675 };
 676 
 677 /* 32-bit signed fixed-point number (16.16). */
 678 struct Fixed: LONG
 679 {
 680   //inline float to_float (void) const { return ???; }
 681   //inline void set_float (float f) { v.set (f * ???); }
 682   public:
 683   DEFINE_SIZE_STATIC (4);
 684 };
 685 
 686 /* Date represented in number of seconds since 12:00 midnight, January 1,
 687  * 1904. The value is represented as a signed 64-bit integer. */
 688 struct LONGDATETIME
 689 {
 690   inline bool sanitize (hb_sanitize_context_t *c) const
 691   {
 692     TRACE_SANITIZE (this);
 693     return_trace (likely (c->check_struct (this)));
 694   }
 695   protected:
 696   LONG major;
 697   ULONG minor;
 698   public:
 699   DEFINE_SIZE_STATIC (8);
 700 };
 701 
 702 /* Array of four uint8s (length = 32 bits) used to identify a script, language
 703  * system, feature, or baseline */
 704 struct Tag : ULONG
 705 {
 706   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
 707   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
 708   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
 709   public:
 710   DEFINE_SIZE_STATIC (4);
 711 };
 712 DEFINE_NULL_DATA (Tag, "    ");
 713 
 714 /* Glyph index number, same as uint16 (length = 16 bits) */
 715 struct GlyphID : USHORT {
 716   static inline int cmp (const GlyphID *a, const GlyphID *b) { return b->USHORT::cmp (*a); }
 717   inline int cmp (hb_codepoint_t a) const { return (int) a - (int) *this; }
 718 };
 719 
 720 /* Script/language-system/feature index */
 721 struct Index : USHORT {
 722   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
 723 };
 724 DEFINE_NULL_DATA (Index, "\xff\xff");
 725 
 726 /* Offset, Null offset = 0 */
 727 template <typename Type=USHORT>
 728 struct Offset : Type
 729 {
 730   inline bool is_null (void) const { return 0 == *this; }
 731   public:
 732   DEFINE_SIZE_STATIC (sizeof(Type));
 733 };
 734 
 735 
 736 /* CheckSum */
 737 struct CheckSum : ULONG
 738 {
 739   /* This is reference implementation from the spec. */
 740   static inline uint32_t CalcTableChecksum (const ULONG *Table, uint32_t Length)
 741   {
 742     uint32_t Sum = 0L;
 743     const ULONG *EndPtr = Table+((Length+3) & ~3) / ULONG::static_size;
 744 
 745     while (Table < EndPtr)
 746       Sum += *Table++;
 747     return Sum;
 748   }
 749 
 750   /* Note: data should be 4byte aligned and have 4byte padding at the end. */
 751   inline void set_for_data (const void *data, unsigned int length)
 752   { set (CalcTableChecksum ((const ULONG *) data, length)); }
 753 
 754   public:
 755   DEFINE_SIZE_STATIC (4);
 756 };
 757 
 758 
 759 /*
 760  * Version Numbers
 761  */
 762 
 763 template <typename FixedType=USHORT>
 764 struct FixedVersion
 765 {
 766   inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
 767 
 768   inline bool sanitize (hb_sanitize_context_t *c) const
 769   {
 770     TRACE_SANITIZE (this);
 771     return_trace (c->check_struct (this));
 772   }
 773 
 774   FixedType major;
 775   FixedType minor;
 776   public:
 777   DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
 778 };
 779 
 780 
 781 
 782 /*
 783  * Template subclasses of Offset that do the dereferencing.
 784  * Use: (base+offset)
 785  */
 786 
 787 template <typename Type, typename OffsetType=USHORT>
 788 struct OffsetTo : Offset<OffsetType>
 789 {
 790   inline const Type& operator () (const void *base) const
 791   {
 792     unsigned int offset = *this;
 793     if (unlikely (!offset)) return Null(Type);
 794     return StructAtOffset<Type> (base, offset);
 795   }
 796 
 797   inline Type& serialize (hb_serialize_context_t *c, const void *base)
 798   {
 799     Type *t = c->start_embed<Type> ();
 800     this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
 801     return *t;
 802   }
 803 
 804   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
 805   {
 806     TRACE_SANITIZE (this);
 807     if (unlikely (!c->check_struct (this))) return_trace (false);
 808     unsigned int offset = *this;
 809     if (unlikely (!offset)) return_trace (true);
 810     if (unlikely (!c->check_range (base, offset))) return_trace (false);
 811     const Type &obj = StructAtOffset<Type> (base, offset);
 812     return_trace (likely (obj.sanitize (c)) || neuter (c));
 813   }
 814   template <typename T>
 815   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 816   {
 817     TRACE_SANITIZE (this);
 818     if (unlikely (!c->check_struct (this))) return_trace (false);
 819     unsigned int offset = *this;
 820     if (unlikely (!offset)) return_trace (true);
 821     if (unlikely (!c->check_range (base, offset))) return_trace (false);
 822     const Type &obj = StructAtOffset<Type> (base, offset);
 823     return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
 824   }
 825 
 826   /* Set the offset to Null */
 827   inline bool neuter (hb_sanitize_context_t *c) const {
 828     return c->try_set (this, 0);
 829   }
 830   DEFINE_SIZE_STATIC (sizeof(OffsetType));
 831 };
 832 template <typename Base, typename OffsetType, typename Type>
 833 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
 834 template <typename Base, typename OffsetType, typename Type>
 835 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
 836 
 837 
 838 /*
 839  * Array Types
 840  */
 841 
 842 /* An array with a number of elements. */
 843 template <typename Type, typename LenType=USHORT>
 844 struct ArrayOf
 845 {
 846   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
 847   {
 848     unsigned int count = len;
 849     if (unlikely (start_offset > count))
 850       count = 0;
 851     else
 852       count -= start_offset;
 853     count = MIN (count, *pcount);
 854     *pcount = count;
 855     return array + start_offset;
 856   }
 857 
 858   inline const Type& operator [] (unsigned int i) const
 859   {
 860     if (unlikely (i >= len)) return Null(Type);
 861     return array[i];
 862   }
 863   inline Type& operator [] (unsigned int i)
 864   {
 865     return array[i];
 866   }
 867   inline unsigned int get_size (void) const
 868   { return len.static_size + len * Type::static_size; }
 869 
 870   inline bool serialize (hb_serialize_context_t *c,
 871                          unsigned int items_len)
 872   {
 873     TRACE_SERIALIZE (this);
 874     if (unlikely (!c->extend_min (*this))) return_trace (false);
 875     len.set (items_len); /* TODO(serialize) Overflow? */
 876     if (unlikely (!c->extend (*this))) return_trace (false);
 877     return_trace (true);
 878   }
 879 
 880   inline bool serialize (hb_serialize_context_t *c,
 881                          Supplier<Type> &items,
 882                          unsigned int items_len)
 883   {
 884     TRACE_SERIALIZE (this);
 885     if (unlikely (!serialize (c, items_len))) return_trace (false);
 886     for (unsigned int i = 0; i < items_len; i++)
 887       array[i] = items[i];
 888     items.advance (items_len);
 889     return_trace (true);
 890   }
 891 
 892   inline bool sanitize (hb_sanitize_context_t *c) const
 893   {
 894     TRACE_SANITIZE (this);
 895     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 896 
 897     /* Note: for structs that do not reference other structs,
 898      * we do not need to call their sanitize() as we already did
 899      * a bound check on the aggregate array size.  We just include
 900      * a small unreachable expression to make sure the structs
 901      * pointed to do have a simple sanitize(), ie. they do not
 902      * reference other structs via offsets.
 903      */
 904     (void) (false && array[0].sanitize (c));
 905 
 906     return_trace (true);
 907   }
 908   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
 909   {
 910     TRACE_SANITIZE (this);
 911     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 912     unsigned int count = len;
 913     for (unsigned int i = 0; i < count; i++)
 914       if (unlikely (!array[i].sanitize (c, base)))
 915         return_trace (false);
 916     return_trace (true);
 917   }
 918   template <typename T>
 919   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 920   {
 921     TRACE_SANITIZE (this);
 922     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 923     unsigned int count = len;
 924     for (unsigned int i = 0; i < count; i++)
 925       if (unlikely (!array[i].sanitize (c, base, user_data)))
 926         return_trace (false);
 927     return_trace (true);
 928   }
 929 
 930   template <typename SearchType>
 931   inline int lsearch (const SearchType &x) const
 932   {
 933     unsigned int count = len;
 934     for (unsigned int i = 0; i < count; i++)
 935       if (!this->array[i].cmp (x))
 936         return i;
 937     return -1;
 938   }
 939 
 940   private:
 941   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
 942   {
 943     TRACE_SANITIZE (this);
 944     return_trace (c->check_struct (this) && c->check_array (array, Type::static_size, len));
 945   }
 946 
 947   public:
 948   LenType len;
 949   Type array[VAR];
 950   public:
 951   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
 952 };
 953 
 954 /* Array of Offset's */
 955 template <typename Type, typename OffsetType=USHORT>
 956 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, OffsetType> > {};
 957 
 958 /* Array of offsets relative to the beginning of the array itself. */
 959 template <typename Type>
 960 struct OffsetListOf : OffsetArrayOf<Type>
 961 {
 962   inline const Type& operator [] (unsigned int i) const
 963   {
 964     if (unlikely (i >= this->len)) return Null(Type);
 965     return this+this->array[i];
 966   }
 967 
 968   inline bool sanitize (hb_sanitize_context_t *c) const
 969   {
 970     TRACE_SANITIZE (this);
 971     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
 972   }
 973   template <typename T>
 974   inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
 975   {
 976     TRACE_SANITIZE (this);
 977     return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
 978   }
 979 };
 980 
 981 
 982 /* An array starting at second element. */
 983 template <typename Type, typename LenType=USHORT>
 984 struct HeadlessArrayOf
 985 {
 986   inline const Type& operator [] (unsigned int i) const
 987   {
 988     if (unlikely (i >= len || !i)) return Null(Type);
 989     return array[i-1];
 990   }
 991   inline unsigned int get_size (void) const
 992   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
 993 
 994   inline bool serialize (hb_serialize_context_t *c,
 995                          Supplier<Type> &items,
 996                          unsigned int items_len)
 997   {
 998     TRACE_SERIALIZE (this);
 999     if (unlikely (!c->extend_min (*this))) return_trace (false);
1000     len.set (items_len); /* TODO(serialize) Overflow? */
1001     if (unlikely (!items_len)) return_trace (true);
1002     if (unlikely (!c->extend (*this))) return_trace (false);
1003     for (unsigned int i = 0; i < items_len - 1; i++)
1004       array[i] = items[i];
1005     items.advance (items_len - 1);
1006     return_trace (true);
1007   }
1008 
1009   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1010   {
1011     return c->check_struct (this)
1012         && c->check_array (this, Type::static_size, len);
1013   }
1014 
1015   inline bool sanitize (hb_sanitize_context_t *c) const
1016   {
1017     TRACE_SANITIZE (this);
1018     if (unlikely (!sanitize_shallow (c))) return_trace (false);
1019 
1020     /* Note: for structs that do not reference other structs,
1021      * we do not need to call their sanitize() as we already did
1022      * a bound check on the aggregate array size.  We just include
1023      * a small unreachable expression to make sure the structs
1024      * pointed to do have a simple sanitize(), ie. they do not
1025      * reference other structs via offsets.
1026      */
1027     (void) (false && array[0].sanitize (c));
1028 
1029     return_trace (true);
1030   }
1031 
1032   LenType len;
1033   Type array[VAR];
1034   public:
1035   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
1036 };
1037 
1038 
1039 /* An array with sorted elements.  Supports binary searching. */
1040 template <typename Type, typename LenType=USHORT>
1041 struct SortedArrayOf : ArrayOf<Type, LenType>
1042 {
1043   template <typename SearchType>
1044   inline int bsearch (const SearchType &x) const
1045   {
1046     /* Hand-coded bsearch here since this is in the hot inner loop. */
1047     int min = 0, max = (int) this->len - 1;
1048     while (min <= max)
1049     {
1050       int mid = (min + max) / 2;
1051       int c = this->array[mid].cmp (x);
1052       if (c < 0)
1053         max = mid - 1;
1054       else if (c > 0)
1055         min = mid + 1;
1056       else
1057         return mid;
1058     }
1059     return -1;
1060   }
1061 };
1062 
1063 
1064 } /* namespace OT */
1065 
1066 
1067 #endif /* HB_OPEN_TYPE_PRIVATE_HH */