< prev index next >

src/java.desktop/share/native/libfontmanager/harfbuzz/hb-open-type.hh

Print this page




   9  * software and its documentation for any purpose, provided that the
  10  * above copyright notice and the following two paragraphs appear in
  11  * all copies of this software.
  12  *
  13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17  * DAMAGE.
  18  *
  19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
  22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24  *
  25  * Red Hat Author(s): Behdad Esfahbod
  26  * Google Author(s): Behdad Esfahbod
  27  */
  28 
  29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
  30 #define HB_OPEN_TYPE_PRIVATE_HH
  31 
  32 #include "hb-private.hh"
  33 #include "hb-debug.hh"
  34 #include "hb-blob-private.hh"
  35 #include "hb-face-private.hh"

  36 
  37 
  38 namespace OT {
  39 
  40 
  41 
  42 /*
  43  * Casts
  44  */
  45 
  46 /* Cast to struct T, reference to reference */
  47 template<typename Type, typename TObject>
  48 static inline const Type& CastR(const TObject &X)
  49 { return reinterpret_cast<const Type&> (X); }
  50 template<typename Type, typename TObject>
  51 static inline Type& CastR(TObject &X)
  52 { return reinterpret_cast<Type&> (X); }
  53 
  54 /* Cast to struct T, pointer to pointer */
  55 template<typename Type, typename TObject>
  56 static inline const Type* CastP(const TObject *X)
  57 { return reinterpret_cast<const Type*> (X); }
  58 template<typename Type, typename TObject>
  59 static inline Type* CastP(TObject *X)
  60 { return reinterpret_cast<Type*> (X); }
  61 
  62 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
  63  * location pointed to by P plus Ofs bytes. */
  64 template<typename Type>
  65 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
  66 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
  67 template<typename Type>
  68 static inline Type& StructAtOffset(void *P, unsigned int offset)
  69 { return * reinterpret_cast<Type*> ((char *) P + offset); }
  70 
  71 /* StructAfter<T>(X) returns the struct T& that is placed after X.
  72  * Works with X of variable size also.  X must implement get_size() */
  73 template<typename Type, typename TObject>
  74 static inline const Type& StructAfter(const TObject &X)
  75 { return StructAtOffset<Type>(&X, X.get_size()); }
  76 template<typename Type, typename TObject>
  77 static inline Type& StructAfter(TObject &X)
  78 { return StructAtOffset<Type>(&X, X.get_size()); }
  79 
  80 
  81 
  82 /*
  83  * Size checking
  84  */
  85 
  86 /* Check _assertion in a method environment */
  87 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
  88   inline void _instance_assertion_on_line_##_line (void) const \
  89   { \
  90     static_assert ((_assertion), ""); \
  91     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
  92   }
  93 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
  94 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
  95 
  96 /* Check that _code compiles in a method environment */
  97 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
  98   inline void _compiles_assertion_on_line_##_line (void) const \
  99   { _code; }
 100 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
 101 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
 102 
 103 
 104 #define DEFINE_SIZE_STATIC(size) \
 105   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
 106   static const unsigned int static_size = (size); \
 107   static const unsigned int min_size = (size); \
 108   inline unsigned int get_size (void) const { return (size); }
 109 
 110 #define DEFINE_SIZE_UNION(size, _member) \
 111   DEFINE_INSTANCE_ASSERTION (0*sizeof(this->u._member.static_size) + sizeof(this->u._member) == (size)); \
 112   static const unsigned int min_size = (size)
 113 
 114 #define DEFINE_SIZE_MIN(size) \
 115   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
 116   static const unsigned int min_size = (size)
 117 
 118 #define DEFINE_SIZE_ARRAY(size, array) \
 119   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
 120   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
 121   static const unsigned int min_size = (size)
 122 
 123 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
 124   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
 125   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
 126   static const unsigned int min_size = (size)
 127 
 128 
 129 
 130 /*
 131  * Dispatch
 132  */
 133 
 134 template <typename Context, typename Return, unsigned int MaxDebugDepth>
 135 struct hb_dispatch_context_t
 136 {
 137   static const unsigned int max_debug_depth = MaxDebugDepth;
 138   typedef Return return_t;
 139   template <typename T, typename F>
 140   inline bool may_dispatch (const T *obj, const F *format) { return true; }
 141   static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
 142 };
 143 
 144 
 145 /*
 146  * Sanitize
 147  */
 148 
 149 /* This limits sanitizing time on really broken fonts. */
 150 #ifndef HB_SANITIZE_MAX_EDITS
 151 #define HB_SANITIZE_MAX_EDITS 32
 152 #endif
 153 #ifndef HB_SANITIZE_MAX_OPS_FACTOR
 154 #define HB_SANITIZE_MAX_OPS_FACTOR 8
 155 #endif
 156 #ifndef HB_SANITIZE_MAX_OPS_MIN
 157 #define HB_SANITIZE_MAX_OPS_MIN 16384
 158 #endif
 159 
 160 struct hb_sanitize_context_t :
 161        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
 162 {
 163   inline hb_sanitize_context_t (void) :
 164         debug_depth (0),
 165         start (nullptr), end (nullptr),
 166         writable (false), edit_count (0), max_ops (0),
 167         blob (nullptr),
 168         num_glyphs (0) {}
 169 
 170   inline const char *get_name (void) { return "SANITIZE"; }
 171   template <typename T, typename F>
 172   inline bool may_dispatch (const T *obj, const F *format)
 173   { return format->sanitize (this); }
 174   template <typename T>
 175   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
 176   static return_t default_return_value (void) { return true; }
 177   static return_t no_dispatch_return_value (void) { return false; }
 178   bool stop_sublookup_iteration (const return_t r) const { return !r; }
 179 
 180   inline void init (hb_blob_t *b)
 181   {
 182     this->blob = hb_blob_reference (b);
 183     this->writable = false;
 184   }
 185 
 186   inline void start_processing (void)
 187   {
 188     this->start = hb_blob_get_data (this->blob, nullptr);
 189     this->end = this->start + this->blob->length;
 190     assert (this->start <= this->end); /* Must not overflow. */
 191     this->max_ops = MAX ((unsigned int) (this->end - this->start) * HB_SANITIZE_MAX_OPS_FACTOR,
 192                          (unsigned) HB_SANITIZE_MAX_OPS_MIN);
 193     this->edit_count = 0;
 194     this->debug_depth = 0;
 195 
 196     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
 197                      "start [%p..%p] (%lu bytes)",
 198                      this->start, this->end,
 199                      (unsigned long) (this->end - this->start));
 200   }
 201 
 202   inline void end_processing (void)
 203   {
 204     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
 205                      "end [%p..%p] %u edit requests",
 206                      this->start, this->end, this->edit_count);
 207 
 208     hb_blob_destroy (this->blob);
 209     this->blob = nullptr;
 210     this->start = this->end = nullptr;
 211   }
 212 
 213   inline bool check_range (const void *base, unsigned int len) const
 214   {
 215     const char *p = (const char *) base;
 216     bool ok = this->max_ops-- > 0 &&
 217               this->start <= p &&
 218               p <= this->end &&
 219               (unsigned int) (this->end - p) >= len;
 220 
 221     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
 222        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
 223        p, p + len, len,
 224        this->start, this->end,
 225        ok ? "OK" : "OUT-OF-RANGE");
 226 
 227     return likely (ok);
 228   }
 229 
 230   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
 231   {
 232     const char *p = (const char *) base;
 233     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
 234     unsigned int array_size = record_size * len;
 235     bool ok = !overflows && this->check_range (base, array_size);
 236 
 237     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
 238        "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
 239        p, p + (record_size * len), record_size, len, (unsigned int) array_size,
 240        this->start, this->end,
 241        overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
 242 
 243     return likely (ok);
 244   }
 245 
 246   template <typename Type>
 247   inline bool check_struct (const Type *obj) const
 248   {
 249     return likely (this->check_range (obj, obj->min_size));
 250   }
 251 
 252   inline bool may_edit (const void *base, unsigned int len)
 253   {
 254     if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
 255       return false;
 256 
 257     const char *p = (const char *) base;
 258     this->edit_count++;
 259 
 260     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
 261        "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
 262        this->edit_count,
 263        p, p + len, len,
 264        this->start, this->end,
 265        this->writable ? "GRANTED" : "DENIED");
 266 
 267     return this->writable;
 268   }
 269 
 270   template <typename Type, typename ValueType>
 271   inline bool try_set (const Type *obj, const ValueType &v) {
 272     if (this->may_edit (obj, obj->static_size)) {
 273       const_cast<Type *> (obj)->set (v);
 274       return true;
 275     }
 276     return false;
 277   }
 278 
 279   mutable unsigned int debug_depth;
 280   const char *start, *end;
 281   bool writable;
 282   unsigned int edit_count;
 283   mutable int max_ops;
 284   hb_blob_t *blob;
 285   unsigned int num_glyphs;
 286 };
 287 
 288 
 289 
 290 /* Template to sanitize an object. */
 291 template <typename Type>
 292 struct Sanitizer
 293 {
 294   inline Sanitizer (void) {}
 295 
 296   inline hb_blob_t *sanitize (hb_blob_t *blob) {
 297     bool sane;
 298 
 299     /* TODO is_sane() stuff */
 300 
 301     c->init (blob);
 302 
 303   retry:
 304     DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
 305 
 306     c->start_processing ();
 307 
 308     if (unlikely (!c->start)) {
 309       c->end_processing ();
 310       return blob;
 311     }
 312 
 313     Type *t = CastP<Type> (const_cast<char *> (c->start));
 314 
 315     sane = t->sanitize (c);
 316     if (sane) {
 317       if (c->edit_count) {
 318         DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
 319 
 320         /* sanitize again to ensure no toe-stepping */
 321         c->edit_count = 0;
 322         sane = t->sanitize (c);
 323         if (c->edit_count) {
 324           DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
 325           sane = false;
 326         }
 327       }
 328     } else {
 329       unsigned int edit_count = c->edit_count;
 330       if (edit_count && !c->writable) {
 331         c->start = hb_blob_get_data_writable (blob, nullptr);
 332         c->end = c->start + blob->length;
 333 
 334         if (c->start) {
 335           c->writable = true;
 336           /* ok, we made it writable by relocating.  try again */
 337           DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
 338           goto retry;
 339         }
 340       }
 341     }
 342 
 343     c->end_processing ();
 344 
 345     DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
 346     if (sane)
 347     {
 348       blob->lock ();
 349       return blob;
 350     }
 351     else
 352     {
 353       hb_blob_destroy (blob);
 354       return hb_blob_get_empty ();
 355     }
 356   }
 357 
 358   inline void set_num_glyphs (unsigned int num_glyphs) { c->num_glyphs = num_glyphs; }
 359 
 360   private:
 361   hb_sanitize_context_t c[1];
 362 };
 363 
 364 
 365 
 366 /*
 367  * Serialize
 368  */
 369 
 370 
 371 struct hb_serialize_context_t
 372 {
 373   inline hb_serialize_context_t (void *start_, unsigned int size)
 374   {
 375     this->start = (char *) start_;
 376     this->end = this->start + size;
 377 
 378     this->ran_out_of_room = false;
 379     this->head = this->start;
 380     this->debug_depth = 0;
 381   }
 382 
 383   template <typename Type>
 384   inline Type *start_serialize (void)
 385   {
 386     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
 387                      "start [%p..%p] (%lu bytes)",
 388                      this->start, this->end,
 389                      (unsigned long) (this->end - this->start));
 390 
 391     return start_embed<Type> ();
 392   }
 393 
 394   inline void end_serialize (void)
 395   {
 396     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
 397                      "end [%p..%p] serialized %d bytes; %s",
 398                      this->start, this->end,
 399                      (int) (this->head - this->start),
 400                      this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
 401 
 402   }
 403 
 404   template <typename Type>
 405   inline Type *copy (void)
 406   {
 407     assert (!this->ran_out_of_room);
 408     unsigned int len = this->head - this->start;
 409     void *p = malloc (len);
 410     if (p)
 411       memcpy (p, this->start, len);
 412     return reinterpret_cast<Type *> (p);
 413   }
 414 
 415   template <typename Type>
 416   inline Type *allocate_size (unsigned int size)
 417   {
 418     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
 419       this->ran_out_of_room = true;
 420       return nullptr;
 421     }
 422     memset (this->head, 0, size);
 423     char *ret = this->head;
 424     this->head += size;
 425     return reinterpret_cast<Type *> (ret);
 426   }
 427 
 428   template <typename Type>
 429   inline Type *allocate_min (void)
 430   {
 431     return this->allocate_size<Type> (Type::min_size);
 432   }
 433 
 434   template <typename Type>
 435   inline Type *start_embed (void)
 436   {
 437     Type *ret = reinterpret_cast<Type *> (this->head);
 438     return ret;
 439   }
 440 
 441   template <typename Type>
 442   inline Type *embed (const Type &obj)
 443   {
 444     unsigned int size = obj.get_size ();
 445     Type *ret = this->allocate_size<Type> (size);
 446     if (unlikely (!ret)) return nullptr;
 447     memcpy (ret, obj, size);
 448     return ret;
 449   }
 450 
 451   template <typename Type>
 452   inline Type *extend_min (Type &obj)
 453   {
 454     unsigned int size = obj.min_size;
 455     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
 456     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
 457     return reinterpret_cast<Type *> (&obj);
 458   }
 459 
 460   template <typename Type>
 461   inline Type *extend (Type &obj)
 462   {
 463     unsigned int size = obj.get_size ();
 464     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
 465     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
 466     return reinterpret_cast<Type *> (&obj);
 467   }
 468 
 469   inline void truncate (void *new_head)
 470   {
 471     assert (this->start < new_head && new_head <= this->head);
 472     this->head = (char *) new_head;
 473   }
 474 
 475   unsigned int debug_depth;
 476   char *start, *end, *head;
 477   bool ran_out_of_room;
 478 };
 479 
 480 template <typename Type>
 481 struct Supplier
 482 {
 483   inline Supplier (const Type *array, unsigned int len_, unsigned int stride_=sizeof(Type))
 484   {
 485     head = array;
 486     len = len_;
 487     stride = stride_;
 488   }
 489   inline const Type operator [] (unsigned int i) const
 490   {
 491     if (unlikely (i >= len)) return Type ();
 492     return * (const Type *) (const void *) ((const char *) head + stride * i);
 493   }
 494 
 495   inline Supplier<Type> & operator += (unsigned int count)
 496   {
 497     if (unlikely (count > len))
 498       count = len;
 499     len -= count;
 500     head = (const Type *) (const void *) ((const char *) head + stride * count);
 501     return *this;
 502   }
 503 
 504   private:
 505   inline Supplier (const Supplier<Type> &); /* Disallow copy */
 506   inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
 507 
 508   unsigned int len;
 509   unsigned int stride;
 510   const Type *head;
 511 };
 512 
 513 
 514 /*
 515  *
 516  * The OpenType Font File: Data Types
 517  */
 518 
 519 
 520 /* "The following data types are used in the OpenType font file.
 521  *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
 522 
 523 /*
 524  * Int types
 525  */
 526 
 527 
 528 template <typename Type, int Bytes> struct BEInt;
 529 
 530 template <typename Type>
 531 struct BEInt<Type, 1>
 532 {
 533   public:
 534   inline void set (Type V)
 535   {
 536     v = V;
 537   }
 538   inline operator Type (void) const
 539   {
 540     return v;
 541   }
 542   private: uint8_t v;
 543 };
 544 template <typename Type>
 545 struct BEInt<Type, 2>
 546 {
 547   public:
 548   inline void set (Type V)
 549   {
 550     v[0] = (V >>  8) & 0xFF;
 551     v[1] = (V      ) & 0xFF;
 552   }
 553   inline operator Type (void) const
 554   {
 555     return (v[0] <<  8)
 556          + (v[1]      );
 557   }
 558   private: uint8_t v[2];
 559 };
 560 template <typename Type>
 561 struct BEInt<Type, 3>
 562 {
 563   public:
 564   inline void set (Type V)
 565   {
 566     v[0] = (V >> 16) & 0xFF;
 567     v[1] = (V >>  8) & 0xFF;
 568     v[2] = (V      ) & 0xFF;
 569   }
 570   inline operator Type (void) const
 571   {
 572     return (v[0] << 16)
 573          + (v[1] <<  8)
 574          + (v[2]      );
 575   }
 576   private: uint8_t v[3];
 577 };
 578 template <typename Type>
 579 struct BEInt<Type, 4>
 580 {
 581   public:
 582   inline void set (Type V)
 583   {
 584     v[0] = (V >> 24) & 0xFF;
 585     v[1] = (V >> 16) & 0xFF;
 586     v[2] = (V >>  8) & 0xFF;
 587     v[3] = (V      ) & 0xFF;
 588   }
 589   inline operator Type (void) const
 590   {
 591     return (v[0] << 24)
 592          + (v[1] << 16)
 593          + (v[2] <<  8)
 594          + (v[3]      );
 595   }
 596   private: uint8_t v[4];
 597 };
 598 
 599 /* Integer types in big-endian order and no alignment requirement */
 600 template <typename Type, unsigned int Size>
 601 struct IntType
 602 {
 603   inline void set (Type i) { v.set (i); }
 604   inline operator Type(void) const { return v; }
 605   inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
 606   inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
 607   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }



 608   template <typename Type2>
 609   inline int cmp (Type2 a) const
 610   {
 611     Type b = v;
 612     if (sizeof (Type) < sizeof (int) && sizeof (Type2) < sizeof (int))
 613       return (int) a - (int) b;
 614     else
 615       return a < b ? -1 : a == b ? 0 : +1;
 616   }
 617   inline bool sanitize (hb_sanitize_context_t *c) const
 618   {
 619     TRACE_SANITIZE (this);
 620     return_trace (likely (c->check_struct (this)));
 621   }
 622   protected:
 623   BEInt<Type, Size> v;
 624   public:
 625   DEFINE_SIZE_STATIC (Size);
 626 };
 627 
 628 typedef IntType<uint8_t,  1> HBUINT8;   /* 8-bit unsigned integer. */
 629 typedef IntType<int8_t,   1> HBINT8;    /* 8-bit signed integer. */
 630 typedef IntType<uint16_t, 2> HBUINT16;  /* 16-bit unsigned integer. */
 631 typedef IntType<int16_t,  2> HBINT16;   /* 16-bit signed integer. */
 632 typedef IntType<uint32_t, 4> HBUINT32;  /* 32-bit unsigned integer. */
 633 typedef IntType<int32_t,  4> HBINT32;   /* 32-bit signed integer. */


 634 typedef IntType<uint32_t, 3> HBUINT24;  /* 24-bit unsigned integer. */
 635 
 636 /* 16-bit signed integer (HBINT16) that describes a quantity in FUnits. */
 637 typedef HBINT16 FWORD;
 638 



 639 /* 16-bit unsigned integer (HBUINT16) that describes a quantity in FUnits. */
 640 typedef HBUINT16 UFWORD;
 641 
 642 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
 643 struct F2DOT14 : HBINT16
 644 {
 645   // 16384 means 1<<14
 646   inline float to_float (void) const { return ((int32_t) v) / 16384.f; }
 647   inline void set_float (float f) { v.set (round (f * 16384.f)); }
 648   public:
 649   DEFINE_SIZE_STATIC (2);
 650 };
 651 
 652 /* 32-bit signed fixed-point number (16.16). */
 653 struct Fixed : HBINT32
 654 {
 655   // 65536 means 1<<16
 656   inline float to_float (void) const { return ((int32_t) v) / 65536.f; }
 657   inline void set_float (float f) { v.set (round (f * 65536.f)); }
 658   public:
 659   DEFINE_SIZE_STATIC (4);
 660 };
 661 
 662 /* Date represented in number of seconds since 12:00 midnight, January 1,
 663  * 1904. The value is represented as a signed 64-bit integer. */
 664 struct LONGDATETIME
 665 {
 666   inline bool sanitize (hb_sanitize_context_t *c) const
 667   {
 668     TRACE_SANITIZE (this);
 669     return_trace (likely (c->check_struct (this)));
 670   }
 671   protected:
 672   HBINT32 major;
 673   HBUINT32 minor;
 674   public:
 675   DEFINE_SIZE_STATIC (8);
 676 };
 677 
 678 /* Array of four uint8s (length = 32 bits) used to identify a script, language
 679  * system, feature, or baseline */
 680 struct Tag : HBUINT32
 681 {
 682   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
 683   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
 684   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
 685   public:
 686   DEFINE_SIZE_STATIC (4);
 687 };
 688 DEFINE_NULL_DATA (OT, Tag, "    ");
 689 
 690 /* Glyph index number, same as uint16 (length = 16 bits) */
 691 typedef HBUINT16 GlyphID;
 692 
 693 /* Name-table index, same as uint16 (length = 16 bits) */
 694 typedef HBUINT16 NameID;
 695 
 696 /* Script/language-system/feature index */
 697 struct Index : HBUINT16 {
 698   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
 699 };
 700 DEFINE_NULL_DATA (OT, Index, "\xff\xff");


 701 
 702 /* Offset, Null offset = 0 */
 703 template <typename Type>
 704 struct Offset : Type
 705 {
 706   inline bool is_null (void) const { return 0 == *this; }


 707 
 708   inline void *serialize (hb_serialize_context_t *c, const void *base)
 709   {
 710     void *t = c->start_embed<void> ();
 711     this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
 712     return t;
 713   }
 714 
 715   public:
 716   DEFINE_SIZE_STATIC (sizeof(Type));
 717 };
 718 
 719 typedef Offset<HBUINT16> Offset16;
 720 typedef Offset<HBUINT32> Offset32;
 721 
 722 
 723 /* CheckSum */
 724 struct CheckSum : HBUINT32
 725 {
 726   /* This is reference implementation from the spec. */
 727   static inline uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length)
 728   {
 729     uint32_t Sum = 0L;
 730     assert (0 == (Length & 3));
 731     const HBUINT32 *EndPtr = Table + Length / HBUINT32::static_size;
 732 
 733     while (Table < EndPtr)
 734       Sum += *Table++;
 735     return Sum;
 736   }
 737 
 738   /* Note: data should be 4byte aligned and have 4byte padding at the end. */
 739   inline void set_for_data (const void *data, unsigned int length)
 740   { set (CalcTableChecksum ((const HBUINT32 *) data, length)); }
 741 
 742   public:
 743   DEFINE_SIZE_STATIC (4);
 744 };
 745 
 746 
 747 /*
 748  * Version Numbers
 749  */
 750 
 751 template <typename FixedType=HBUINT16>
 752 struct FixedVersion
 753 {
 754   inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
 755 
 756   inline bool sanitize (hb_sanitize_context_t *c) const
 757   {
 758     TRACE_SANITIZE (this);
 759     return_trace (c->check_struct (this));
 760   }
 761 
 762   FixedType major;
 763   FixedType minor;
 764   public:
 765   DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
 766 };
 767 
 768 
 769 
 770 /*
 771  * Template subclasses of Offset that do the dereferencing.
 772  * Use: (base+offset)
 773  */
 774 
 775 template <typename Type, typename OffsetType=HBUINT16>
 776 struct OffsetTo : Offset<OffsetType>













 777 {
 778   inline const Type& operator () (const void *base) const
 779   {
 780     unsigned int offset = *this;
 781     if (unlikely (!offset)) return Null(Type);
 782     return StructAtOffset<const Type> (base, offset);
 783   }
 784   inline Type& operator () (void *base) const
 785   {
 786     unsigned int offset = *this;
 787     if (unlikely (!offset)) return Crap(Type);
 788     return StructAtOffset<Type> (base, offset);
 789   }
 790 
 791   inline Type& serialize (hb_serialize_context_t *c, const void *base)
 792   {
 793     return * (Type *) Offset<OffsetType>::serialize (c, base);
 794   }
 795 
 796   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const













 797   {
 798     TRACE_SANITIZE (this);
 799     if (unlikely (!c->check_struct (this))) return_trace (false);
 800     unsigned int offset = *this;
 801     if (unlikely (!offset)) return_trace (true);
 802     if (unlikely (!c->check_range (base, offset))) return_trace (false);
 803     const Type &obj = StructAtOffset<Type> (base, offset);
 804     return_trace (likely (obj.sanitize (c)) || neuter (c));
 805   }
 806   template <typename T>
 807   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 808   {
 809     TRACE_SANITIZE (this);
 810     if (unlikely (!c->check_struct (this))) return_trace (false);
 811     unsigned int offset = *this;
 812     if (unlikely (!offset)) return_trace (true);
 813     if (unlikely (!c->check_range (base, offset))) return_trace (false);
 814     const Type &obj = StructAtOffset<Type> (base, offset);
 815     return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));

























 816   }
 817 
 818   /* Set the offset to Null */
 819   inline bool neuter (hb_sanitize_context_t *c) const {


 820     return c->try_set (this, 0);
 821   }
 822   DEFINE_SIZE_STATIC (sizeof(OffsetType));
 823 };
 824 template <typename Type> struct LOffsetTo : OffsetTo<Type, HBUINT32> {};
 825 template <typename Base, typename OffsetType, typename Type>
 826 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
 827 template <typename Base, typename OffsetType, typename Type>
 828 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }




 829 
 830 
 831 /*
 832  * Array Types
 833  */
 834 
 835 
 836 /* TODO Use it in ArrayOf, HeadlessArrayOf, and other places around the code base?? */
 837 template <typename Type>
 838 struct UnsizedArrayOf
 839 {
 840   inline const Type& operator [] (unsigned int i) const { return arrayZ[i]; }
 841   inline Type& operator [] (unsigned int i) { return arrayZ[i]; }







































 842 
 843   inline bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
 844   {
 845     TRACE_SANITIZE (this);
 846     if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
 847 
 848     /* Note: for structs that do not reference other structs,
 849      * we do not need to call their sanitize() as we already did
 850      * a bound check on the aggregate array size.  We just include
 851      * a small unreachable expression to make sure the structs
 852      * pointed to do have a simple sanitize(), ie. they do not
 853      * reference other structs via offsets.
 854      */
 855     (void) (false && arrayZ[0].sanitize (c));
 856 
 857     return_trace (true);
 858   }
 859   inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base) const
 860   {
 861     TRACE_SANITIZE (this);
 862     if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
 863     for (unsigned int i = 0; i < count; i++)
 864       if (unlikely (!arrayZ[i].sanitize (c, base)))
 865         return_trace (false);
 866     return_trace (true);
 867   }
 868   template <typename T>
 869   inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base, T user_data) const
 870   {
 871     TRACE_SANITIZE (this);
 872     if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
 873     for (unsigned int i = 0; i < count; i++)
 874       if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
 875         return_trace (false);
 876     return_trace (true);
 877   }
 878 
 879   inline bool sanitize_shallow (hb_sanitize_context_t *c, unsigned int count) const
 880   {
 881     TRACE_SANITIZE (this);
 882     return_trace (c->check_array (arrayZ, arrayZ[0].static_size, count));
 883   }
 884 
 885   public:
 886   Type  arrayZ[VAR];
 887   public:
 888   DEFINE_SIZE_ARRAY (0, arrayZ);
 889 };
 890 
 891 /* Unsized array of offset's */
 892 template <typename Type, typename OffsetType>
 893 struct UnsizedOffsetArrayOf : UnsizedArrayOf<OffsetTo<Type, OffsetType> > {};
 894 
 895 /* Unsized array of offsets relative to the beginning of the array itself. */
 896 template <typename Type, typename OffsetType>
 897 struct UnsizedOffsetListOf : UnsizedOffsetArrayOf<Type, OffsetType>
 898 {
 899   inline const Type& operator [] (unsigned int i) const
 900   {
 901     return this+this->arrayZ[i];










 902   }
 903 
 904   inline bool sanitize (hb_sanitize_context_t *c, unsigned int count) const

 905   {
 906     TRACE_SANITIZE (this);
 907     return_trace ((UnsizedOffsetArrayOf<Type, OffsetType>::sanitize (c, count, this)));
 908   }
 909   template <typename T>
 910   inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, T user_data) const
 911   {
 912     TRACE_SANITIZE (this);
 913     return_trace ((UnsizedOffsetArrayOf<Type, OffsetType>::sanitize (c, count, this, user_data)));
 914   }
 915 };
 916 
























 917 
 918 /* An array with a number of elements. */
 919 template <typename Type, typename LenType=HBUINT16>
 920 struct ArrayOf
 921 {
 922   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
 923   {
 924     unsigned int count = len;
 925     if (unlikely (start_offset > count))
 926       count = 0;
 927     else
 928       count -= start_offset;
 929     count = MIN (count, *pcount);
 930     *pcount = count;
 931     return arrayZ + start_offset;
 932   }
 933 
 934   inline const Type& operator [] (unsigned int i) const
 935   {
 936     if (unlikely (i >= len)) return Null(Type);

 937     return arrayZ[i];
 938   }
 939   inline Type& operator [] (unsigned int i)
 940   {
 941     if (unlikely (i >= len)) return Crap(Type);

 942     return arrayZ[i];
 943   }
 944   inline unsigned int get_size (void) const

 945   { return len.static_size + len * Type::static_size; }
 946 
 947   inline bool serialize (hb_serialize_context_t *c,
 948                          unsigned int items_len)















 949   {
 950     TRACE_SERIALIZE (this);
 951     if (unlikely (!c->extend_min (*this))) return_trace (false);
 952     len.set (items_len); /* TODO(serialize) Overflow? */
 953     if (unlikely (!c->extend (*this))) return_trace (false);
 954     return_trace (true);
 955   }
 956 
 957   inline bool serialize (hb_serialize_context_t *c,
 958                          Supplier<Type> &items,
 959                          unsigned int items_len)
 960   {
 961     TRACE_SERIALIZE (this);
 962     if (unlikely (!serialize (c, items_len))) return_trace (false);
 963     for (unsigned int i = 0; i < items_len; i++)
 964       arrayZ[i] = items[i];
 965     items += items_len;
 966     return_trace (true);
 967   }
 968 
 969   inline bool sanitize (hb_sanitize_context_t *c) const
 970   {
 971     TRACE_SANITIZE (this);
 972     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 973 
 974     /* Note: for structs that do not reference other structs,
 975      * we do not need to call their sanitize() as we already did
 976      * a bound check on the aggregate array size.  We just include
 977      * a small unreachable expression to make sure the structs
 978      * pointed to do have a simple sanitize(), ie. they do not
 979      * reference other structs via offsets.
 980      */
 981     (void) (false && arrayZ[0].sanitize (c));
 982 
 983     return_trace (true);
 984   }
 985   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
 986   {
 987     TRACE_SANITIZE (this);
 988     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 989     unsigned int count = len;
 990     for (unsigned int i = 0; i < count; i++)
 991       if (unlikely (!arrayZ[i].sanitize (c, base)))
 992         return_trace (false);
 993     return_trace (true);
 994   }
 995   template <typename T>
 996   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 997   {
 998     TRACE_SANITIZE (this);
 999     if (unlikely (!sanitize_shallow (c))) return_trace (false);
1000     unsigned int count = len;
1001     for (unsigned int i = 0; i < count; i++)
1002       if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
1003         return_trace (false);
1004     return_trace (true);
1005   }
1006 
1007   template <typename SearchType>
1008   inline int lsearch (const SearchType &x) const
1009   {
1010     unsigned int count = len;
1011     for (unsigned int i = 0; i < count; i++)
1012       if (!this->arrayZ[i].cmp (x))
1013         return i;
1014     return -1;
1015   }
1016 
1017   inline void qsort (void)
1018   {
1019     ::qsort (arrayZ, len, sizeof (Type), Type::cmp);
1020   }
1021 
1022   private:
1023   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1024   {
1025     TRACE_SANITIZE (this);
1026     return_trace (len.sanitize (c) && c->check_array (arrayZ, Type::static_size, len));
1027   }
1028 
1029   public:
1030   LenType len;
1031   Type arrayZ[VAR];
1032   public:
1033   DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
1034 };
1035 template <typename Type> struct LArrayOf : ArrayOf<Type, HBUINT32> {};
1036 typedef ArrayOf<HBUINT8, HBUINT8> PString;
1037 
1038 /* Array of Offset's */
1039 template <typename Type, typename OffsetType=HBUINT16>
1040 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, OffsetType> > {};




1041 
1042 /* Array of offsets relative to the beginning of the array itself. */
1043 template <typename Type>
1044 struct OffsetListOf : OffsetArrayOf<Type>
1045 {
1046   inline const Type& operator [] (unsigned int i) const
1047   {
1048     if (unlikely (i >= this->len)) return Null(Type);

1049     return this+this->arrayZ[i];
1050   }
1051   inline const Type& operator [] (unsigned int i)
1052   {
1053     if (unlikely (i >= this->len)) return Crap(Type);

1054     return this+this->arrayZ[i];
1055   }
1056 
1057   inline bool sanitize (hb_sanitize_context_t *c) const











1058   {
1059     TRACE_SANITIZE (this);
1060     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
1061   }
1062   template <typename T>
1063   inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
1064   {
1065     TRACE_SANITIZE (this);
1066     return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
1067   }
1068 };
1069 
1070 
1071 /* An array starting at second element. */
1072 template <typename Type, typename LenType=HBUINT16>
1073 struct HeadlessArrayOf
1074 {
1075   inline const Type& operator [] (unsigned int i) const




1076   {
1077     if (unlikely (i >= len || !i)) return Null(Type);

1078     return arrayZ[i-1];
1079   }
1080   inline Type& operator [] (unsigned int i)
1081   {
1082     if (unlikely (i >= len || !i)) return Crap(Type);

1083     return arrayZ[i-1];
1084   }
1085   inline unsigned int get_size (void) const
1086   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
1087 
1088   inline bool serialize (hb_serialize_context_t *c,
1089                          Supplier<Type> &items,
1090                          unsigned int items_len)
1091   {
1092     TRACE_SERIALIZE (this);
1093     if (unlikely (!c->extend_min (*this))) return_trace (false);
1094     len.set (items_len); /* TODO(serialize) Overflow? */
1095     if (unlikely (!items_len)) return_trace (true);
1096     if (unlikely (!c->extend (*this))) return_trace (false);
1097     for (unsigned int i = 0; i < items_len - 1; i++)
1098       arrayZ[i] = items[i];
1099     items += items_len - 1;
1100     return_trace (true);
1101   }
1102 
1103   inline bool sanitize (hb_sanitize_context_t *c) const
1104   {
1105     TRACE_SANITIZE (this);
1106     if (unlikely (!sanitize_shallow (c))) return_trace (false);
1107 
1108     /* Note: for structs that do not reference other structs,
1109      * we do not need to call their sanitize() as we already did
1110      * a bound check on the aggregate array size.  We just include
1111      * a small unreachable expression to make sure the structs
1112      * pointed to do have a simple sanitize(), ie. they do not
1113      * reference other structs via offsets.
1114      */
1115     (void) (false && arrayZ[0].sanitize (c));
1116 
1117     return_trace (true);
1118   }
1119 
1120   private:
1121   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1122   {
1123     TRACE_SANITIZE (this);
1124     return_trace (len.sanitize (c) &&
1125                   (!len || c->check_array (arrayZ, Type::static_size, len - 1)));
1126   }
1127 
1128   public:
1129   LenType len;
1130   Type arrayZ[VAR];
1131   public:
1132   DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
1133 };
1134 
1135 
1136 /*
1137  * An array with sorted elements.  Supports binary searching.
1138  */
1139 template <typename Type, typename LenType=HBUINT16>
1140 struct SortedArrayOf : ArrayOf<Type, LenType>
1141 {
1142   template <typename SearchType>
1143   inline int bsearch (const SearchType &x) const

1144   {
1145     /* Hand-coded bsearch here since this is in the hot inner loop. */
1146     const Type *arr = this->arrayZ;
1147     int min = 0, max = (int) this->len - 1;
1148     while (min <= max)

1149     {
1150       int mid = (min + max) / 2;
1151       int c = arr[mid].cmp (x);
1152       if (c < 0)
1153         max = mid - 1;
1154       else if (c > 0)
1155         min = mid + 1;
1156       else
1157         return mid;









1158     }
1159     return -1;






1160   }







































1161 };
1162 
1163 /*
1164  * Binary-search arrays
1165  */
1166 

1167 struct BinSearchHeader
1168 {
1169   inline operator uint32_t (void) const { return len; }
1170 
1171   inline bool sanitize (hb_sanitize_context_t *c) const
1172   {
1173     TRACE_SANITIZE (this);
1174     return_trace (c->check_struct (this));
1175   }
1176 
1177   inline void set (unsigned int v)
1178   {
1179     len.set (v);
1180     assert (len == v);
1181     entrySelector.set (MAX (1u, _hb_bit_storage (v)) - 1);
1182     searchRange.set (16 * (1u << entrySelector));
1183     rangeShift.set (v * 16 > searchRange
1184                     ? 16 * v - searchRange
1185                     : 0);
1186   }
1187 
1188   protected:
1189   HBUINT16      len;
1190   HBUINT16      searchRange;
1191   HBUINT16      entrySelector;
1192   HBUINT16      rangeShift;
1193 
1194   public:
1195   DEFINE_SIZE_STATIC (8);
1196 };
1197 
1198 template <typename Type>
1199 struct BinSearchArrayOf : SortedArrayOf<Type, BinSearchHeader> {};
1200 
1201 
1202 /* Lazy struct and blob loaders. */
1203 
1204 /* Logic is shared between hb_lazy_loader_t and hb_table_lazy_loader_t */
1205 template <typename T>
1206 struct hb_lazy_loader_t
1207 {
1208   inline void init (hb_face_t *face_)

1209   {
1210     face = face_;
1211     instance = nullptr;
1212   }
1213 
1214   inline void fini (void)
1215   {
1216     if (instance && instance != &Null(T))


















1217     {
1218       instance->fini();
1219       free (instance);
1220     }









1221   }
1222 
1223   inline const T* get (void) const
1224   {
1225   retry:
1226     T *p = (T *) hb_atomic_ptr_get (&instance);
1227     if (unlikely (!p))
1228     {
1229       p = (T *) calloc (1, sizeof (T));
1230       if (unlikely (!p))
1231         p = const_cast<T *> (&Null(T));
1232       else
1233         p->init (face);
1234       if (unlikely (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), nullptr, p)))
1235       {
1236         if (p != &Null(T))
1237           p->fini ();
1238         goto retry;
1239       }
1240     }
1241     return p;




1242   }




1243 
1244   inline const T* operator-> (void) const
1245   {
1246     return get ();
1247   }
1248 
1249   private:
1250   hb_face_t *face;
1251   T *instance;
1252 };




1253 
1254 /* Logic is shared between hb_lazy_loader_t and hb_table_lazy_loader_t */
1255 template <typename T>
1256 struct hb_table_lazy_loader_t
1257 {
1258   inline void init (hb_face_t *face_)
1259   {
1260     face = face_;
1261     blob = nullptr;





1262   }
1263 
1264   inline void fini (void)
1265   {
1266     hb_blob_destroy (blob);






1267   }
1268 
1269   inline const T* get (void) const
1270   {
1271   retry:
1272     hb_blob_t *blob_ = (hb_blob_t *) hb_atomic_ptr_get (&blob);
1273     if (unlikely (!blob_))
1274     {
1275       blob_ = OT::Sanitizer<T>().sanitize (face->reference_table (T::tableTag));
1276       if (!hb_atomic_ptr_cmpexch (&blob, nullptr, blob_))

1277       {
1278         hb_blob_destroy (blob_);
1279         goto retry;
1280       }
1281       blob = blob_;


1282     }
1283     return blob_->as<T> ();
1284   }
1285 
1286   inline const T* operator-> (void) const

1287   {
1288     return get();





1289   }
1290 
1291   private:
1292   hb_face_t *face;
1293   mutable hb_blob_t *blob;


1294 };
1295 
1296 
1297 } /* namespace OT */
1298 
1299 
1300 #endif /* HB_OPEN_TYPE_PRIVATE_HH */


   9  * software and its documentation for any purpose, provided that the
  10  * above copyright notice and the following two paragraphs appear in
  11  * all copies of this software.
  12  *
  13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
  14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
  15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
  16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
  17  * DAMAGE.
  18  *
  19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
  20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
  21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
  22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
  23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
  24  *
  25  * Red Hat Author(s): Behdad Esfahbod
  26  * Google Author(s): Behdad Esfahbod
  27  */
  28 
  29 #ifndef HB_OPEN_TYPE_HH
  30 #define HB_OPEN_TYPE_HH
  31 
  32 #include "hb.hh"
  33 #include "hb-blob.hh"
  34 #include "hb-face.hh"
  35 #include "hb-machinery.hh"
  36 #include "hb-subset.hh"
  37 
  38 
  39 namespace OT {
  40 
  41 

























































































































































































































































































































































































































































































  42 /*
  43  *
  44  * The OpenType Font File: Data Types
  45  */
  46 
  47 
  48 /* "The following data types are used in the OpenType font file.
  49  *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
  50 
  51 /*
  52  * Int types
  53  */
  54 
  55 template <bool is_signed> struct hb_signedness_int;
  56 template <> struct hb_signedness_int<false> { typedef unsigned int value; };
  57 template <> struct hb_signedness_int<true>  { typedef   signed int value; };




































































  58 
  59 /* Integer types in big-endian order and no alignment requirement */
  60 template <typename Type, unsigned int Size>
  61 struct IntType
  62 {
  63   typedef Type type;
  64   typedef typename hb_signedness_int<hb_is_signed<Type>::value>::value wide_type;
  65 
  66   void set (wide_type i) { v.set (i); }
  67   operator wide_type () const { return v; }
  68   bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
  69   bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
  70   static int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
  71   template <typename Type2>
  72   int cmp (Type2 a) const
  73   {
  74     Type b = v;
  75     if (sizeof (Type) < sizeof (int) && sizeof (Type2) < sizeof (int))
  76       return (int) a - (int) b;
  77     else
  78       return a < b ? -1 : a == b ? 0 : +1;
  79   }
  80   bool sanitize (hb_sanitize_context_t *c) const
  81   {
  82     TRACE_SANITIZE (this);
  83     return_trace (likely (c->check_struct (this)));
  84   }
  85   protected:
  86   BEInt<Type, Size> v;
  87   public:
  88   DEFINE_SIZE_STATIC (Size);
  89 };
  90 
  91 typedef IntType<uint8_t,  1> HBUINT8;   /* 8-bit unsigned integer. */
  92 typedef IntType<int8_t,   1> HBINT8;    /* 8-bit signed integer. */
  93 typedef IntType<uint16_t, 2> HBUINT16;  /* 16-bit unsigned integer. */
  94 typedef IntType<int16_t,  2> HBINT16;   /* 16-bit signed integer. */
  95 typedef IntType<uint32_t, 4> HBUINT32;  /* 32-bit unsigned integer. */
  96 typedef IntType<int32_t,  4> HBINT32;   /* 32-bit signed integer. */
  97 /* Note: we cannot defined a signed HBINT24 because there's no corresponding C type.
  98  * Works for unsigned, but not signed, since we rely on compiler for sign-extension. */
  99 typedef IntType<uint32_t, 3> HBUINT24;  /* 24-bit unsigned integer. */
 100 
 101 /* 16-bit signed integer (HBINT16) that describes a quantity in FUnits. */
 102 typedef HBINT16 FWORD;
 103 
 104 /* 32-bit signed integer (HBINT32) that describes a quantity in FUnits. */
 105 typedef HBINT32 FWORD32;
 106 
 107 /* 16-bit unsigned integer (HBUINT16) that describes a quantity in FUnits. */
 108 typedef HBUINT16 UFWORD;
 109 
 110 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
 111 struct F2DOT14 : HBINT16
 112 {
 113   // 16384 means 1<<14
 114   float to_float () const  { return ((int32_t) v) / 16384.f; }
 115   void set_float (float f) { v.set (round (f * 16384.f)); }
 116   public:
 117   DEFINE_SIZE_STATIC (2);
 118 };
 119 
 120 /* 32-bit signed fixed-point number (16.16). */
 121 struct Fixed : HBINT32
 122 {
 123   // 65536 means 1<<16
 124   float to_float () const  { return ((int32_t) v) / 65536.f; }
 125   void set_float (float f) { v.set (round (f * 65536.f)); }
 126   public:
 127   DEFINE_SIZE_STATIC (4);
 128 };
 129 
 130 /* Date represented in number of seconds since 12:00 midnight, January 1,
 131  * 1904. The value is represented as a signed 64-bit integer. */
 132 struct LONGDATETIME
 133 {
 134   bool sanitize (hb_sanitize_context_t *c) const
 135   {
 136     TRACE_SANITIZE (this);
 137     return_trace (likely (c->check_struct (this)));
 138   }
 139   protected:
 140   HBINT32 major;
 141   HBUINT32 minor;
 142   public:
 143   DEFINE_SIZE_STATIC (8);
 144 };
 145 
 146 /* Array of four uint8s (length = 32 bits) used to identify a script, language
 147  * system, feature, or baseline */
 148 struct Tag : HBUINT32
 149 {
 150   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
 151   operator const char* () const { return reinterpret_cast<const char *> (&this->v); }
 152   operator char* ()             { return reinterpret_cast<char *> (&this->v); }
 153   public:
 154   DEFINE_SIZE_STATIC (4);
 155 };

 156 
 157 /* Glyph index number, same as uint16 (length = 16 bits) */
 158 typedef HBUINT16 GlyphID;
 159 



 160 /* Script/language-system/feature index */
 161 struct Index : HBUINT16 {
 162   static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFu;
 163 };
 164 DECLARE_NULL_NAMESPACE_BYTES (OT, Index);
 165 
 166 typedef Index NameID;
 167 
 168 /* Offset, Null offset = 0 */
 169 template <typename Type, bool has_null=true>
 170 struct Offset : Type
 171 {
 172   typedef Type type;
 173 
 174   bool is_null () const { return has_null && 0 == *this; }
 175 
 176   void *serialize (hb_serialize_context_t *c, const void *base)
 177   {
 178     void *t = c->start_embed<void> ();
 179     this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
 180     return t;
 181   }
 182 
 183   public:
 184   DEFINE_SIZE_STATIC (sizeof (Type));
 185 };
 186 
 187 typedef Offset<HBUINT16> Offset16;
 188 typedef Offset<HBUINT32> Offset32;
 189 
 190 
 191 /* CheckSum */
 192 struct CheckSum : HBUINT32
 193 {
 194   /* This is reference implementation from the spec. */
 195   static uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length)
 196   {
 197     uint32_t Sum = 0L;
 198     assert (0 == (Length & 3));
 199     const HBUINT32 *EndPtr = Table + Length / HBUINT32::static_size;
 200 
 201     while (Table < EndPtr)
 202       Sum += *Table++;
 203     return Sum;
 204   }
 205 
 206   /* Note: data should be 4byte aligned and have 4byte padding at the end. */
 207   void set_for_data (const void *data, unsigned int length)
 208   { set (CalcTableChecksum ((const HBUINT32 *) data, length)); }
 209 
 210   public:
 211   DEFINE_SIZE_STATIC (4);
 212 };
 213 
 214 
 215 /*
 216  * Version Numbers
 217  */
 218 
 219 template <typename FixedType=HBUINT16>
 220 struct FixedVersion
 221 {
 222   uint32_t to_int () const { return (major << (sizeof (FixedType) * 8)) + minor; }
 223 
 224   bool sanitize (hb_sanitize_context_t *c) const
 225   {
 226     TRACE_SANITIZE (this);
 227     return_trace (c->check_struct (this));
 228   }
 229 
 230   FixedType major;
 231   FixedType minor;
 232   public:
 233   DEFINE_SIZE_STATIC (2 * sizeof (FixedType));
 234 };
 235 
 236 

 237 /*
 238  * Template subclasses of Offset that do the dereferencing.
 239  * Use: (base+offset)
 240  */
 241 
 242 template <typename Type, bool has_null>
 243 struct _hb_has_null
 244 {
 245   static const Type *get_null () { return nullptr; }
 246   static Type *get_crap ()       { return nullptr; }
 247 };
 248 template <typename Type>
 249 struct _hb_has_null<Type, true>
 250 {
 251   static const Type *get_null () { return &Null(Type); }
 252   static Type *get_crap ()       { return &Crap(Type); }
 253 };
 254 
 255 template <typename Type, typename OffsetType=HBUINT16, bool has_null=true>
 256 struct OffsetTo : Offset<OffsetType, has_null>
 257 {
 258   const Type& operator () (const void *base) const
 259   {
 260     if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_null ();
 261     return StructAtOffset<const Type> (base, *this);

 262   }
 263   Type& operator () (void *base) const
 264   {
 265     if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_crap ();
 266     return StructAtOffset<Type> (base, *this);

 267   }
 268 
 269   Type& serialize (hb_serialize_context_t *c, const void *base)
 270   {
 271     return * (Type *) Offset<OffsetType>::serialize (c, base);
 272   }
 273 
 274   template <typename T>
 275   void serialize_subset (hb_subset_context_t *c, const T &src, const void *base)
 276   {
 277     if (&src == &Null (T))
 278     {
 279       this->set (0);
 280       return;
 281     }
 282     serialize (c->serializer, base);
 283     if (!src.subset (c))
 284       this->set (0);
 285   }
 286 
 287   bool sanitize_shallow (hb_sanitize_context_t *c, const void *base) const
 288   {
 289     TRACE_SANITIZE (this);
 290     if (unlikely (!c->check_struct (this))) return_trace (false);
 291     if (unlikely (this->is_null ())) return_trace (true);
 292     if (unlikely (!c->check_range (base, *this))) return_trace (false);
 293     return_trace (true);


 294   }
 295 
 296   bool sanitize (hb_sanitize_context_t *c, const void *base) const
 297   {
 298     TRACE_SANITIZE (this);
 299     return_trace (sanitize_shallow (c, base) &&
 300                   (this->is_null () ||
 301                    StructAtOffset<Type> (base, *this).sanitize (c) ||
 302                    neuter (c)));
 303   }
 304   template <typename T1>
 305   bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1) const
 306   {
 307     TRACE_SANITIZE (this);
 308     return_trace (sanitize_shallow (c, base) &&
 309                   (this->is_null () ||
 310                    StructAtOffset<Type> (base, *this).sanitize (c, d1) ||
 311                    neuter (c)));
 312   }
 313   template <typename T1, typename T2>
 314   bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1, T2 d2) const
 315   {
 316     TRACE_SANITIZE (this);
 317     return_trace (sanitize_shallow (c, base) &&
 318                   (this->is_null () ||
 319                    StructAtOffset<Type> (base, *this).sanitize (c, d1, d2) ||
 320                    neuter (c)));
 321   }
 322   template <typename T1, typename T2, typename T3>
 323   bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1, T2 d2, T3 d3) const
 324   {
 325     TRACE_SANITIZE (this);
 326     return_trace (sanitize_shallow (c, base) &&
 327                   (this->is_null () ||
 328                    StructAtOffset<Type> (base, *this).sanitize (c, d1, d2, d3) ||
 329                    neuter (c)));
 330   }
 331 
 332   /* Set the offset to Null */
 333   bool neuter (hb_sanitize_context_t *c) const
 334   {
 335     if (!has_null) return false;
 336     return c->try_set (this, 0);
 337   }
 338   DEFINE_SIZE_STATIC (sizeof (OffsetType));
 339 };
 340 /* Partial specializations. */
 341 template <typename Type,                               bool has_null=true> struct   LOffsetTo : OffsetTo<Type, HBUINT32,   has_null> {};
 342 template <typename Type, typename OffsetType=HBUINT16                    > struct  NNOffsetTo : OffsetTo<Type, OffsetType, false> {};
 343 template <typename Type                                                  > struct LNNOffsetTo : OffsetTo<Type, HBUINT32,   false> {};
 344 
 345 template <typename Base, typename OffsetType, bool has_null, typename Type>
 346 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType, has_null> &offset) { return offset (base); }
 347 template <typename Base, typename OffsetType, bool has_null, typename Type>
 348 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType, has_null> &offset) { return offset (base); }
 349 
 350 
 351 /*
 352  * Array Types
 353  */
 354 


 355 template <typename Type>
 356 struct UnsizedArrayOf
 357 {
 358   typedef Type item_t;
 359   static constexpr unsigned item_size = hb_static_size (Type);
 360 
 361   HB_NO_CREATE_COPY_ASSIGN_TEMPLATE (UnsizedArrayOf, Type);
 362 
 363   const Type& operator [] (int i_) const
 364   {
 365     unsigned int i = (unsigned int) i_;
 366     const Type *p = &arrayZ[i];
 367     if (unlikely (p < arrayZ)) return Null (Type); /* Overflowed. */
 368     return *p;
 369   }
 370   Type& operator [] (int i_)
 371   {
 372     unsigned int i = (unsigned int) i_;
 373     Type *p = &arrayZ[i];
 374     if (unlikely (p < arrayZ)) return Crap (Type); /* Overflowed. */
 375     return *p;
 376   }
 377 
 378   unsigned int get_size (unsigned int len) const
 379   { return len * Type::static_size; }
 380 
 381   template <typename T> operator T * () { return arrayZ; }
 382   template <typename T> operator const T * () const { return arrayZ; }
 383   hb_array_t<Type> as_array (unsigned int len)
 384   { return hb_array (arrayZ, len); }
 385   hb_array_t<const Type> as_array (unsigned int len) const
 386   { return hb_array (arrayZ, len); }
 387   operator hb_array_t<Type> ()             { return as_array (); }
 388   operator hb_array_t<const Type> () const { return as_array (); }
 389 
 390   template <typename T>
 391   Type &lsearch (unsigned int len, const T &x, Type &not_found = Crap (Type))
 392   { return *as_array (len).lsearch (x, &not_found); }
 393   template <typename T>
 394   const Type &lsearch (unsigned int len, const T &x, const Type &not_found = Null (Type)) const
 395   { return *as_array (len).lsearch (x, &not_found); }
 396 
 397   void qsort (unsigned int len, unsigned int start = 0, unsigned int end = (unsigned int) -1)
 398   { as_array (len).qsort (start, end); }
 399 
 400   bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
 401   {
 402     TRACE_SANITIZE (this);
 403     if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
 404 
 405     /* Note: for structs that do not reference other structs,
 406      * we do not need to call their sanitize() as we already did
 407      * a bound check on the aggregate array size.  We just include
 408      * a small unreachable expression to make sure the structs
 409      * pointed to do have a simple sanitize(), ie. they do not
 410      * reference other structs via offsets.
 411      */
 412     (void) (false && arrayZ[0].sanitize (c));
 413 
 414     return_trace (true);
 415   }
 416   bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base) const
 417   {
 418     TRACE_SANITIZE (this);
 419     if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
 420     for (unsigned int i = 0; i < count; i++)
 421       if (unlikely (!arrayZ[i].sanitize (c, base)))
 422         return_trace (false);
 423     return_trace (true);
 424   }
 425   template <typename T>
 426   bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base, T user_data) const
 427   {
 428     TRACE_SANITIZE (this);
 429     if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
 430     for (unsigned int i = 0; i < count; i++)
 431       if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
 432         return_trace (false);
 433     return_trace (true);
 434   }
 435 
 436   bool sanitize_shallow (hb_sanitize_context_t *c, unsigned int count) const
 437   {
 438     TRACE_SANITIZE (this);
 439     return_trace (c->check_array (arrayZ, count));
 440   }
 441 
 442   public:
 443   Type          arrayZ[VAR];
 444   public:
 445   DEFINE_SIZE_UNBOUNDED (0);
 446 };
 447 
 448 /* Unsized array of offset's */
 449 template <typename Type, typename OffsetType, bool has_null=true>
 450 struct UnsizedOffsetArrayOf : UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null> > {};
 451 
 452 /* Unsized array of offsets relative to the beginning of the array itself. */
 453 template <typename Type, typename OffsetType, bool has_null=true>
 454 struct UnsizedOffsetListOf : UnsizedOffsetArrayOf<Type, OffsetType, has_null>
 455 {
 456   const Type& operator [] (int i_) const
 457   {
 458     unsigned int i = (unsigned int) i_;
 459     const OffsetTo<Type, OffsetType, has_null> *p = &this->arrayZ[i];
 460     if (unlikely (p < this->arrayZ)) return Null (Type); /* Overflowed. */
 461     return this+*p;
 462   }
 463   Type& operator [] (int i_)
 464   {
 465     unsigned int i = (unsigned int) i_;
 466     const OffsetTo<Type, OffsetType, has_null> *p = &this->arrayZ[i];
 467     if (unlikely (p < this->arrayZ)) return Crap (Type); /* Overflowed. */
 468     return this+*p;
 469   }
 470 
 471 
 472   bool sanitize (hb_sanitize_context_t *c, unsigned int count) const
 473   {
 474     TRACE_SANITIZE (this);
 475     return_trace ((UnsizedOffsetArrayOf<Type, OffsetType, has_null>::sanitize (c, count, this)));
 476   }
 477   template <typename T>
 478   bool sanitize (hb_sanitize_context_t *c, unsigned int count, T user_data) const
 479   {
 480     TRACE_SANITIZE (this);
 481     return_trace ((UnsizedOffsetArrayOf<Type, OffsetType, has_null>::sanitize (c, count, this, user_data)));
 482   }
 483 };
 484 
 485 /* An array with sorted elements.  Supports binary searching. */
 486 template <typename Type>
 487 struct SortedUnsizedArrayOf : UnsizedArrayOf<Type>
 488 {
 489   hb_sorted_array_t<Type> as_array (unsigned int len)
 490   { return hb_sorted_array (this->arrayZ, len); }
 491   hb_sorted_array_t<const Type> as_array (unsigned int len) const
 492   { return hb_sorted_array (this->arrayZ, len); }
 493   operator hb_sorted_array_t<Type> ()             { return as_array (); }
 494   operator hb_sorted_array_t<const Type> () const { return as_array (); }
 495 
 496   template <typename T>
 497   Type &bsearch (unsigned int len, const T &x, Type &not_found = Crap (Type))
 498   { return *as_array (len).bsearch (x, &not_found); }
 499   template <typename T>
 500   const Type &bsearch (unsigned int len, const T &x, const Type &not_found = Null (Type)) const
 501   { return *as_array (len).bsearch (x, &not_found); }
 502   template <typename T>
 503   bool bfind (unsigned int len, const T &x, unsigned int *i = nullptr,
 504                      hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE,
 505                      unsigned int to_store = (unsigned int) -1) const
 506   { return as_array (len).bfind (x, i, not_found, to_store); }
 507 };
 508 
 509 
 510 /* An array with a number of elements. */
 511 template <typename Type, typename LenType=HBUINT16>
 512 struct ArrayOf
 513 {
 514   typedef Type item_t;
 515   static constexpr unsigned item_size = hb_static_size (Type);
 516 
 517   HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (ArrayOf, Type, LenType);







 518 
 519   const Type& operator [] (int i_) const
 520   {
 521     unsigned int i = (unsigned int) i_;
 522     if (unlikely (i >= len)) return Null (Type);
 523     return arrayZ[i];
 524   }
 525   Type& operator [] (int i_)
 526   {
 527     unsigned int i = (unsigned int) i_;
 528     if (unlikely (i >= len)) return Crap (Type);
 529     return arrayZ[i];
 530   }
 531 
 532   unsigned int get_size () const
 533   { return len.static_size + len * Type::static_size; }
 534 
 535   hb_array_t<Type> as_array ()
 536   { return hb_array (arrayZ, len); }
 537   hb_array_t<const Type> as_array () const
 538   { return hb_array (arrayZ, len); }
 539   operator hb_array_t<Type> (void)             { return as_array (); }
 540   operator hb_array_t<const Type> (void) const { return as_array (); }
 541 
 542   hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
 543   { return as_array ().sub_array (start_offset, count);}
 544   hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
 545   { return as_array ().sub_array (start_offset, count);}
 546   hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int count)
 547   { return as_array ().sub_array (start_offset, count);}
 548   hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
 549   { return as_array ().sub_array (start_offset, count);}
 550 
 551   bool serialize (hb_serialize_context_t *c, unsigned int items_len)
 552   {
 553     TRACE_SERIALIZE (this);
 554     if (unlikely (!c->extend_min (*this))) return_trace (false);
 555     len.set (items_len); /* TODO(serialize) Overflow? */
 556     if (unlikely (!c->extend (*this))) return_trace (false);
 557     return_trace (true);
 558   }
 559   template <typename T>
 560   bool serialize (hb_serialize_context_t *c, hb_array_t<const T> items)


 561   {
 562     TRACE_SERIALIZE (this);
 563     if (unlikely (!serialize (c, items.length))) return_trace (false);
 564     for (unsigned int i = 0; i < items.length; i++)
 565       hb_assign (arrayZ[i], items[i]);

 566     return_trace (true);
 567   }
 568 
 569   bool sanitize (hb_sanitize_context_t *c) const
 570   {
 571     TRACE_SANITIZE (this);
 572     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 573 
 574     /* Note: for structs that do not reference other structs,
 575      * we do not need to call their sanitize() as we already did
 576      * a bound check on the aggregate array size.  We just include
 577      * a small unreachable expression to make sure the structs
 578      * pointed to do have a simple sanitize(), ie. they do not
 579      * reference other structs via offsets.
 580      */
 581     (void) (false && arrayZ[0].sanitize (c));
 582 
 583     return_trace (true);
 584   }
 585   bool sanitize (hb_sanitize_context_t *c, const void *base) const
 586   {
 587     TRACE_SANITIZE (this);
 588     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 589     unsigned int count = len;
 590     for (unsigned int i = 0; i < count; i++)
 591       if (unlikely (!arrayZ[i].sanitize (c, base)))
 592         return_trace (false);
 593     return_trace (true);
 594   }
 595   template <typename T>
 596   bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 597   {
 598     TRACE_SANITIZE (this);
 599     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 600     unsigned int count = len;
 601     for (unsigned int i = 0; i < count; i++)
 602       if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
 603         return_trace (false);
 604     return_trace (true);
 605   }
 606 
 607   template <typename T>
 608   Type &lsearch (const T &x, Type &not_found = Crap (Type))
 609   { return *as_array ().lsearch (x, &not_found); }
 610   template <typename T>
 611   const Type &lsearch (const T &x, const Type &not_found = Null (Type)) const
 612   { return *as_array ().lsearch (x, &not_found); }



 613 
 614   void qsort (unsigned int start = 0, unsigned int end = (unsigned int) -1)
 615   { as_array ().qsort (start, end); }


 616 
 617   bool sanitize_shallow (hb_sanitize_context_t *c) const

 618   {
 619     TRACE_SANITIZE (this);
 620     return_trace (len.sanitize (c) && c->check_array (arrayZ, len));
 621   }
 622 
 623   public:
 624   LenType       len;
 625   Type          arrayZ[VAR];
 626   public:
 627   DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
 628 };
 629 template <typename Type> struct LArrayOf : ArrayOf<Type, HBUINT32> {};
 630 typedef ArrayOf<HBUINT8, HBUINT8> PString;
 631 
 632 /* Array of Offset's */
 633 template <typename Type>
 634 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, HBUINT16> > {};
 635 template <typename Type>
 636 struct LOffsetArrayOf : ArrayOf<OffsetTo<Type, HBUINT32> > {};
 637 template <typename Type>
 638 struct LOffsetLArrayOf : ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32> {};
 639 
 640 /* Array of offsets relative to the beginning of the array itself. */
 641 template <typename Type>
 642 struct OffsetListOf : OffsetArrayOf<Type>
 643 {
 644   const Type& operator [] (int i_) const
 645   {
 646     unsigned int i = (unsigned int) i_;
 647     if (unlikely (i >= this->len)) return Null (Type);
 648     return this+this->arrayZ[i];
 649   }
 650   const Type& operator [] (int i_)
 651   {
 652     unsigned int i = (unsigned int) i_;
 653     if (unlikely (i >= this->len)) return Crap (Type);
 654     return this+this->arrayZ[i];
 655   }
 656 
 657   bool subset (hb_subset_context_t *c) const
 658   {
 659     TRACE_SUBSET (this);
 660     struct OffsetListOf<Type> *out = c->serializer->embed (*this);
 661     if (unlikely (!out)) return_trace (false);
 662     unsigned int count = this->len;
 663     for (unsigned int i = 0; i < count; i++)
 664       out->arrayZ[i].serialize_subset (c, (*this)[i], out);
 665     return_trace (true);
 666   }
 667 
 668   bool sanitize (hb_sanitize_context_t *c) const
 669   {
 670     TRACE_SANITIZE (this);
 671     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
 672   }
 673   template <typename T>
 674   bool sanitize (hb_sanitize_context_t *c, T user_data) const
 675   {
 676     TRACE_SANITIZE (this);
 677     return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
 678   }
 679 };
 680 

 681 /* An array starting at second element. */
 682 template <typename Type, typename LenType=HBUINT16>
 683 struct HeadlessArrayOf
 684 {
 685   static constexpr unsigned item_size = Type::static_size;
 686 
 687   HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (HeadlessArrayOf, Type, LenType);
 688 
 689   const Type& operator [] (int i_) const
 690   {
 691     unsigned int i = (unsigned int) i_;
 692     if (unlikely (i >= lenP1 || !i)) return Null (Type);
 693     return arrayZ[i-1];
 694   }
 695   Type& operator [] (int i_)
 696   {
 697     unsigned int i = (unsigned int) i_;
 698     if (unlikely (i >= lenP1 || !i)) return Crap (Type);
 699     return arrayZ[i-1];
 700   }
 701   unsigned int get_size () const
 702   { return lenP1.static_size + (lenP1 ? lenP1 - 1 : 0) * Type::static_size; }
 703 
 704   bool serialize (hb_serialize_context_t *c,
 705                   hb_array_t<const Type> items)

 706   {
 707     TRACE_SERIALIZE (this);
 708     if (unlikely (!c->extend_min (*this))) return_trace (false);
 709     lenP1.set (items.length + 1); /* TODO(serialize) Overflow? */

 710     if (unlikely (!c->extend (*this))) return_trace (false);
 711     for (unsigned int i = 0; i < items.length; i++)
 712       arrayZ[i] = items[i];

 713     return_trace (true);
 714   }
 715 
 716   bool sanitize (hb_sanitize_context_t *c) const
 717   {
 718     TRACE_SANITIZE (this);
 719     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 720 
 721     /* Note: for structs that do not reference other structs,
 722      * we do not need to call their sanitize() as we already did
 723      * a bound check on the aggregate array size.  We just include
 724      * a small unreachable expression to make sure the structs
 725      * pointed to do have a simple sanitize(), ie. they do not
 726      * reference other structs via offsets.
 727      */
 728     (void) (false && arrayZ[0].sanitize (c));
 729 
 730     return_trace (true);
 731   }
 732 
 733   private:
 734   bool sanitize_shallow (hb_sanitize_context_t *c) const
 735   {
 736     TRACE_SANITIZE (this);
 737     return_trace (lenP1.sanitize (c) &&
 738                   (!lenP1 || c->check_array (arrayZ, lenP1 - 1)));
 739   }
 740 
 741   public:
 742   LenType       lenP1;
 743   Type          arrayZ[VAR];
 744   public:
 745   DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
 746 };
 747 
 748 /* An array storing length-1. */



 749 template <typename Type, typename LenType=HBUINT16>
 750 struct ArrayOfM1
 751 {
 752   HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (ArrayOfM1, Type, LenType);
 753 
 754   const Type& operator [] (int i_) const
 755   {
 756     unsigned int i = (unsigned int) i_;
 757     if (unlikely (i > lenM1)) return Null (Type);
 758     return arrayZ[i];
 759   }
 760   Type& operator [] (int i_)
 761   {
 762     unsigned int i = (unsigned int) i_;
 763     if (unlikely (i > lenM1)) return Crap (Type);
 764     return arrayZ[i];
 765   }
 766   unsigned int get_size () const
 767   { return lenM1.static_size + (lenM1 + 1) * Type::static_size; }
 768 
 769   template <typename T>
 770   bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 771   {
 772     TRACE_SANITIZE (this);
 773     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 774     unsigned int count = lenM1 + 1;
 775     for (unsigned int i = 0; i < count; i++)
 776       if (unlikely (!arrayZ[i].sanitize (c, base, user_data)))
 777         return_trace (false);
 778     return_trace (true);
 779   }
 780 
 781   private:
 782   bool sanitize_shallow (hb_sanitize_context_t *c) const
 783   {
 784     TRACE_SANITIZE (this);
 785     return_trace (lenM1.sanitize (c) &&
 786                   (c->check_array (arrayZ, lenM1 + 1)));
 787   }
 788 
 789   public:
 790   LenType       lenM1;
 791   Type          arrayZ[VAR];
 792   public:
 793   DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
 794 };
 795 
 796 /* An array with sorted elements.  Supports binary searching. */
 797 template <typename Type, typename LenType=HBUINT16>
 798 struct SortedArrayOf : ArrayOf<Type, LenType>
 799 {
 800   hb_sorted_array_t<Type> as_array ()
 801   { return hb_sorted_array (this->arrayZ, this->len); }
 802   hb_sorted_array_t<const Type> as_array () const
 803   { return hb_sorted_array (this->arrayZ, this->len); }
 804   operator hb_sorted_array_t<Type> ()             { return as_array (); }
 805   operator hb_sorted_array_t<const Type> () const { return as_array (); }
 806 
 807   hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
 808   { return as_array ().sub_array (start_offset, count);}
 809   hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
 810   { return as_array ().sub_array (start_offset, count);}
 811   hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int count)
 812   { return as_array ().sub_array (start_offset, count);}
 813   hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
 814   { return as_array ().sub_array (start_offset, count);}
 815 
 816   template <typename T>
 817   Type &bsearch (const T &x, Type &not_found = Crap (Type))
 818   { return *as_array ().bsearch (x, &not_found); }
 819   template <typename T>
 820   const Type &bsearch (const T &x, const Type &not_found = Null (Type)) const
 821   { return *as_array ().bsearch (x, &not_found); }
 822   template <typename T>
 823   bool bfind (const T &x, unsigned int *i = nullptr,
 824                      hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE,
 825                      unsigned int to_store = (unsigned int) -1) const
 826   { return as_array ().bfind (x, i, not_found, to_store); }
 827 };
 828 
 829 /*
 830  * Binary-search arrays
 831  */
 832 
 833 template <typename LenType=HBUINT16>
 834 struct BinSearchHeader
 835 {
 836   operator uint32_t () const { return len; }
 837 
 838   bool sanitize (hb_sanitize_context_t *c) const
 839   {
 840     TRACE_SANITIZE (this);
 841     return_trace (c->check_struct (this));
 842   }
 843 
 844   void set (unsigned int v)
 845   {
 846     len.set (v);
 847     assert (len == v);
 848     entrySelector.set (MAX (1u, hb_bit_storage (v)) - 1);
 849     searchRange.set (16 * (1u << entrySelector));
 850     rangeShift.set (v * 16 > searchRange
 851                     ? 16 * v - searchRange
 852                     : 0);
 853   }
 854 
 855   protected:
 856   LenType       len;
 857   LenType       searchRange;
 858   LenType       entrySelector;
 859   LenType       rangeShift;
 860 
 861   public:
 862   DEFINE_SIZE_STATIC (8);
 863 };
 864 
 865 template <typename Type, typename LenType=HBUINT16>
 866 struct BinSearchArrayOf : SortedArrayOf<Type, BinSearchHeader<LenType> > {};

 867 

 868 
 869 struct VarSizedBinSearchHeader


 870 {
 871 
 872   bool sanitize (hb_sanitize_context_t *c) const
 873   {
 874     TRACE_SANITIZE (this);
 875     return_trace (c->check_struct (this));
 876   }
 877 
 878   HBUINT16      unitSize;       /* Size of a lookup unit for this search in bytes. */
 879   HBUINT16      nUnits;         /* Number of units of the preceding size to be searched. */
 880   HBUINT16      searchRange;    /* The value of unitSize times the largest power of 2
 881                                  * that is less than or equal to the value of nUnits. */
 882   HBUINT16      entrySelector;  /* The log base 2 of the largest power of 2 less than
 883                                  * or equal to the value of nUnits. */
 884   HBUINT16      rangeShift;     /* The value of unitSize times the difference of the
 885                                  * value of nUnits minus the largest power of 2 less
 886                                  * than or equal to the value of nUnits. */
 887   public:
 888   DEFINE_SIZE_STATIC (10);
 889 };
 890 
 891 template <typename Type>
 892 struct VarSizedBinSearchArrayOf
 893 {
 894   static constexpr unsigned item_size = Type::static_size;
 895 
 896   HB_NO_CREATE_COPY_ASSIGN_TEMPLATE (VarSizedBinSearchArrayOf, Type);
 897 
 898   bool last_is_terminator () const
 899   {
 900     if (unlikely (!header.nUnits)) return false;
 901 
 902     /* Gah.
 903      *
 904      * "The number of termination values that need to be included is table-specific.
 905      * The value that indicates binary search termination is 0xFFFF." */
 906     const HBUINT16 *words = &StructAtOffset<HBUINT16> (&bytesZ, (header.nUnits - 1) * header.unitSize);
 907     unsigned int count = Type::TerminationWordCount;
 908     for (unsigned int i = 0; i < count; i++)
 909       if (words[i] != 0xFFFFu)
 910         return false;
 911     return true;
 912   }
 913 
 914   const Type& operator [] (int i_) const




 915   {
 916     unsigned int i = (unsigned int) i_;
 917     if (unlikely (i >= get_length ())) return Null (Type);
 918     return StructAtOffset<Type> (&bytesZ, i * header.unitSize);








 919   }
 920   Type& operator [] (int i_)
 921   {
 922     unsigned int i = (unsigned int) i_;
 923     if (unlikely (i >= get_length ())) return Crap (Type);
 924     return StructAtOffset<Type> (&bytesZ, i * header.unitSize);
 925   }
 926   unsigned int get_length () const
 927   { return header.nUnits - last_is_terminator (); }
 928   unsigned int get_size () const
 929   { return header.static_size + header.nUnits * header.unitSize; }
 930 
 931   bool sanitize (hb_sanitize_context_t *c) const
 932   {
 933     TRACE_SANITIZE (this);
 934     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 935 
 936     /* Note: for structs that do not reference other structs,
 937      * we do not need to call their sanitize() as we already did
 938      * a bound check on the aggregate array size.  We just include
 939      * a small unreachable expression to make sure the structs
 940      * pointed to do have a simple sanitize(), ie. they do not
 941      * reference other structs via offsets.
 942      */
 943     (void) (false && StructAtOffset<Type> (&bytesZ, 0).sanitize (c));
 944 
 945     return_trace (true);
 946   }
 947   bool sanitize (hb_sanitize_context_t *c, const void *base) const


 948   {
 949     TRACE_SANITIZE (this);
 950     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 951     unsigned int count = get_length ();
 952     for (unsigned int i = 0; i < count; i++)
 953       if (unlikely (!(*this)[i].sanitize (c, base)))
 954         return_trace (false);
 955     return_trace (true);
 956   }
 957   template <typename T>
 958   bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
 959   {
 960     TRACE_SANITIZE (this);
 961     if (unlikely (!sanitize_shallow (c))) return_trace (false);
 962     unsigned int count = get_length ();
 963     for (unsigned int i = 0; i < count; i++)
 964       if (unlikely (!(*this)[i].sanitize (c, base, user_data)))
 965         return_trace (false);
 966     return_trace (true);
 967   }
 968 
 969   template <typename T>
 970   const Type *bsearch (const T &key) const



 971   {
 972     unsigned int size = header.unitSize;
 973     int min = 0, max = (int) get_length () - 1;
 974     while (min <= max)
 975     {
 976       int mid = ((unsigned int) min + (unsigned int) max) / 2;
 977       const Type *p = (const Type *) (((const char *) &bytesZ) + (mid * size));
 978       int c = p->cmp (key);
 979       if (c < 0) max = mid - 1;
 980       else if (c > 0) min = mid + 1;
 981       else return p;
 982     }
 983     return nullptr;
 984   }
 985 
 986   private:
 987   bool sanitize_shallow (hb_sanitize_context_t *c) const
 988   {
 989     TRACE_SANITIZE (this);
 990     return_trace (header.sanitize (c) &&
 991                   Type::static_size <= header.unitSize &&
 992                   c->check_range (bytesZ.arrayZ,
 993                                   header.nUnits,
 994                                   header.unitSize));
 995   }
 996 
 997   protected:
 998   VarSizedBinSearchHeader       header;
 999   UnsizedArrayOf<HBUINT8>       bytesZ;
1000   public:
1001   DEFINE_SIZE_ARRAY (10, bytesZ);
1002 };
1003 
1004 
1005 } /* namespace OT */
1006 
1007 
1008 #endif /* HB_OPEN_TYPE_HH */
< prev index next >