1 /* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OPEN_TYPE_HH 30 #define HB_OPEN_TYPE_HH 31 32 #include "hb.hh" 33 #include "hb-blob.hh" 34 #include "hb-face.hh" 35 #include "hb-machinery.hh" 36 #include "hb-subset.hh" 37 38 39 namespace OT { 40 41 42 /* 43 * 44 * The OpenType Font File: Data Types 45 */ 46 47 48 /* "The following data types are used in the OpenType font file. 49 * All OpenType fonts use Motorola-style byte ordering (Big Endian):" */ 50 51 /* 52 * Int types 53 */ 54 55 template <bool is_signed> struct hb_signedness_int; 56 template <> struct hb_signedness_int<false> { typedef unsigned int value; }; 57 template <> struct hb_signedness_int<true> { typedef signed int value; }; 58 59 /* Integer types in big-endian order and no alignment requirement */ 60 template <typename Type, unsigned int Size> 61 struct IntType 62 { 63 typedef Type type; 64 typedef typename hb_signedness_int<hb_is_signed<Type>::value>::value wide_type; 65 66 void set (wide_type i) { v.set (i); } 67 operator wide_type () const { return v; } 68 bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; } 69 bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); } 70 static int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); } 71 template <typename Type2> 72 int cmp (Type2 a) const 73 { 74 Type b = v; 75 if (sizeof (Type) < sizeof (int) && sizeof (Type2) < sizeof (int)) 76 return (int) a - (int) b; 77 else 78 return a < b ? -1 : a == b ? 0 : +1; 79 } 80 bool sanitize (hb_sanitize_context_t *c) const 81 { 82 TRACE_SANITIZE (this); 83 return_trace (likely (c->check_struct (this))); 84 } 85 protected: 86 BEInt<Type, Size> v; 87 public: 88 DEFINE_SIZE_STATIC (Size); 89 }; 90 91 typedef IntType<uint8_t, 1> HBUINT8; /* 8-bit unsigned integer. */ 92 typedef IntType<int8_t, 1> HBINT8; /* 8-bit signed integer. */ 93 typedef IntType<uint16_t, 2> HBUINT16; /* 16-bit unsigned integer. */ 94 typedef IntType<int16_t, 2> HBINT16; /* 16-bit signed integer. */ 95 typedef IntType<uint32_t, 4> HBUINT32; /* 32-bit unsigned integer. */ 96 typedef IntType<int32_t, 4> HBINT32; /* 32-bit signed integer. */ 97 /* Note: we cannot defined a signed HBINT24 because there's no corresponding C type. 98 * Works for unsigned, but not signed, since we rely on compiler for sign-extension. */ 99 typedef IntType<uint32_t, 3> HBUINT24; /* 24-bit unsigned integer. */ 100 101 /* 16-bit signed integer (HBINT16) that describes a quantity in FUnits. */ 102 typedef HBINT16 FWORD; 103 104 /* 32-bit signed integer (HBINT32) that describes a quantity in FUnits. */ 105 typedef HBINT32 FWORD32; 106 107 /* 16-bit unsigned integer (HBUINT16) that describes a quantity in FUnits. */ 108 typedef HBUINT16 UFWORD; 109 110 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */ 111 struct F2DOT14 : HBINT16 112 { 113 // 16384 means 1<<14 114 float to_float () const { return ((int32_t) v) / 16384.f; } 115 void set_float (float f) { v.set (round (f * 16384.f)); } 116 public: 117 DEFINE_SIZE_STATIC (2); 118 }; 119 120 /* 32-bit signed fixed-point number (16.16). */ 121 struct Fixed : HBINT32 122 { 123 // 65536 means 1<<16 124 float to_float () const { return ((int32_t) v) / 65536.f; } 125 void set_float (float f) { v.set (round (f * 65536.f)); } 126 public: 127 DEFINE_SIZE_STATIC (4); 128 }; 129 130 /* Date represented in number of seconds since 12:00 midnight, January 1, 131 * 1904. The value is represented as a signed 64-bit integer. */ 132 struct LONGDATETIME 133 { 134 bool sanitize (hb_sanitize_context_t *c) const 135 { 136 TRACE_SANITIZE (this); 137 return_trace (likely (c->check_struct (this))); 138 } 139 protected: 140 HBINT32 major; 141 HBUINT32 minor; 142 public: 143 DEFINE_SIZE_STATIC (8); 144 }; 145 146 /* Array of four uint8s (length = 32 bits) used to identify a script, language 147 * system, feature, or baseline */ 148 struct Tag : HBUINT32 149 { 150 /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */ 151 operator const char* () const { return reinterpret_cast<const char *> (&this->v); } 152 operator char* () { return reinterpret_cast<char *> (&this->v); } 153 public: 154 DEFINE_SIZE_STATIC (4); 155 }; 156 157 /* Glyph index number, same as uint16 (length = 16 bits) */ 158 typedef HBUINT16 GlyphID; 159 160 /* Script/language-system/feature index */ 161 struct Index : HBUINT16 { 162 static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFu; 163 }; 164 DECLARE_NULL_NAMESPACE_BYTES (OT, Index); 165 166 typedef Index NameID; 167 168 /* Offset, Null offset = 0 */ 169 template <typename Type, bool has_null=true> 170 struct Offset : Type 171 { 172 typedef Type type; 173 174 bool is_null () const { return has_null && 0 == *this; } 175 176 void *serialize (hb_serialize_context_t *c, const void *base) 177 { 178 void *t = c->start_embed<void> (); 179 this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */ 180 return t; 181 } 182 183 public: 184 DEFINE_SIZE_STATIC (sizeof (Type)); 185 }; 186 187 typedef Offset<HBUINT16> Offset16; 188 typedef Offset<HBUINT32> Offset32; 189 190 191 /* CheckSum */ 192 struct CheckSum : HBUINT32 193 { 194 /* This is reference implementation from the spec. */ 195 static uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length) 196 { 197 uint32_t Sum = 0L; 198 assert (0 == (Length & 3)); 199 const HBUINT32 *EndPtr = Table + Length / HBUINT32::static_size; 200 201 while (Table < EndPtr) 202 Sum += *Table++; 203 return Sum; 204 } 205 206 /* Note: data should be 4byte aligned and have 4byte padding at the end. */ 207 void set_for_data (const void *data, unsigned int length) 208 { set (CalcTableChecksum ((const HBUINT32 *) data, length)); } 209 210 public: 211 DEFINE_SIZE_STATIC (4); 212 }; 213 214 215 /* 216 * Version Numbers 217 */ 218 219 template <typename FixedType=HBUINT16> 220 struct FixedVersion 221 { 222 uint32_t to_int () const { return (major << (sizeof (FixedType) * 8)) + minor; } 223 224 bool sanitize (hb_sanitize_context_t *c) const 225 { 226 TRACE_SANITIZE (this); 227 return_trace (c->check_struct (this)); 228 } 229 230 FixedType major; 231 FixedType minor; 232 public: 233 DEFINE_SIZE_STATIC (2 * sizeof (FixedType)); 234 }; 235 236 237 /* 238 * Template subclasses of Offset that do the dereferencing. 239 * Use: (base+offset) 240 */ 241 242 template <typename Type, bool has_null> 243 struct _hb_has_null 244 { 245 static const Type *get_null () { return nullptr; } 246 static Type *get_crap () { return nullptr; } 247 }; 248 template <typename Type> 249 struct _hb_has_null<Type, true> 250 { 251 static const Type *get_null () { return &Null(Type); } 252 static Type *get_crap () { return &Crap(Type); } 253 }; 254 255 template <typename Type, typename OffsetType=HBUINT16, bool has_null=true> 256 struct OffsetTo : Offset<OffsetType, has_null> 257 { 258 const Type& operator () (const void *base) const 259 { 260 if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_null (); 261 return StructAtOffset<const Type> (base, *this); 262 } 263 Type& operator () (void *base) const 264 { 265 if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_crap (); 266 return StructAtOffset<Type> (base, *this); 267 } 268 269 Type& serialize (hb_serialize_context_t *c, const void *base) 270 { 271 return * (Type *) Offset<OffsetType>::serialize (c, base); 272 } 273 274 template <typename T> 275 void serialize_subset (hb_subset_context_t *c, const T &src, const void *base) 276 { 277 if (&src == &Null (T)) 278 { 279 this->set (0); 280 return; 281 } 282 serialize (c->serializer, base); 283 if (!src.subset (c)) 284 this->set (0); 285 } 286 287 bool sanitize_shallow (hb_sanitize_context_t *c, const void *base) const 288 { 289 TRACE_SANITIZE (this); 290 if (unlikely (!c->check_struct (this))) return_trace (false); 291 if (unlikely (this->is_null ())) return_trace (true); 292 if (unlikely (!c->check_range (base, *this))) return_trace (false); 293 return_trace (true); 294 } 295 296 bool sanitize (hb_sanitize_context_t *c, const void *base) const 297 { 298 TRACE_SANITIZE (this); 299 return_trace (sanitize_shallow (c, base) && 300 (this->is_null () || 301 StructAtOffset<Type> (base, *this).sanitize (c) || 302 neuter (c))); 303 } 304 template <typename T1> 305 bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1) const 306 { 307 TRACE_SANITIZE (this); 308 return_trace (sanitize_shallow (c, base) && 309 (this->is_null () || 310 StructAtOffset<Type> (base, *this).sanitize (c, d1) || 311 neuter (c))); 312 } 313 template <typename T1, typename T2> 314 bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1, T2 d2) const 315 { 316 TRACE_SANITIZE (this); 317 return_trace (sanitize_shallow (c, base) && 318 (this->is_null () || 319 StructAtOffset<Type> (base, *this).sanitize (c, d1, d2) || 320 neuter (c))); 321 } 322 template <typename T1, typename T2, typename T3> 323 bool sanitize (hb_sanitize_context_t *c, const void *base, T1 d1, T2 d2, T3 d3) const 324 { 325 TRACE_SANITIZE (this); 326 return_trace (sanitize_shallow (c, base) && 327 (this->is_null () || 328 StructAtOffset<Type> (base, *this).sanitize (c, d1, d2, d3) || 329 neuter (c))); 330 } 331 332 /* Set the offset to Null */ 333 bool neuter (hb_sanitize_context_t *c) const 334 { 335 if (!has_null) return false; 336 return c->try_set (this, 0); 337 } 338 DEFINE_SIZE_STATIC (sizeof (OffsetType)); 339 }; 340 /* Partial specializations. */ 341 template <typename Type, bool has_null=true> struct LOffsetTo : OffsetTo<Type, HBUINT32, has_null> {}; 342 template <typename Type, typename OffsetType=HBUINT16 > struct NNOffsetTo : OffsetTo<Type, OffsetType, false> {}; 343 template <typename Type > struct LNNOffsetTo : OffsetTo<Type, HBUINT32, false> {}; 344 345 template <typename Base, typename OffsetType, bool has_null, typename Type> 346 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType, has_null> &offset) { return offset (base); } 347 template <typename Base, typename OffsetType, bool has_null, typename Type> 348 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType, has_null> &offset) { return offset (base); } 349 350 351 /* 352 * Array Types 353 */ 354 355 template <typename Type> 356 struct UnsizedArrayOf 357 { 358 typedef Type item_t; 359 static constexpr unsigned item_size = hb_static_size (Type); 360 361 HB_NO_CREATE_COPY_ASSIGN_TEMPLATE (UnsizedArrayOf, Type); 362 363 const Type& operator [] (int i_) const 364 { 365 unsigned int i = (unsigned int) i_; 366 const Type *p = &arrayZ[i]; 367 if (unlikely (p < arrayZ)) return Null (Type); /* Overflowed. */ 368 return *p; 369 } 370 Type& operator [] (int i_) 371 { 372 unsigned int i = (unsigned int) i_; 373 Type *p = &arrayZ[i]; 374 if (unlikely (p < arrayZ)) return Crap (Type); /* Overflowed. */ 375 return *p; 376 } 377 378 unsigned int get_size (unsigned int len) const 379 { return len * Type::static_size; } 380 381 template <typename T> operator T * () { return arrayZ; } 382 template <typename T> operator const T * () const { return arrayZ; } 383 hb_array_t<Type> as_array (unsigned int len) 384 { return hb_array (arrayZ, len); } 385 hb_array_t<const Type> as_array (unsigned int len) const 386 { return hb_array (arrayZ, len); } 387 operator hb_array_t<Type> () { return as_array (); } 388 operator hb_array_t<const Type> () const { return as_array (); } 389 390 template <typename T> 391 Type &lsearch (unsigned int len, const T &x, Type ¬_found = Crap (Type)) 392 { return *as_array (len).lsearch (x, ¬_found); } 393 template <typename T> 394 const Type &lsearch (unsigned int len, const T &x, const Type ¬_found = Null (Type)) const 395 { return *as_array (len).lsearch (x, ¬_found); } 396 397 void qsort (unsigned int len, unsigned int start = 0, unsigned int end = (unsigned int) -1) 398 { as_array (len).qsort (start, end); } 399 400 bool sanitize (hb_sanitize_context_t *c, unsigned int count) const 401 { 402 TRACE_SANITIZE (this); 403 if (unlikely (!sanitize_shallow (c, count))) return_trace (false); 404 405 /* Note: for structs that do not reference other structs, 406 * we do not need to call their sanitize() as we already did 407 * a bound check on the aggregate array size. We just include 408 * a small unreachable expression to make sure the structs 409 * pointed to do have a simple sanitize(), ie. they do not 410 * reference other structs via offsets. 411 */ 412 (void) (false && arrayZ[0].sanitize (c)); 413 414 return_trace (true); 415 } 416 bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base) const 417 { 418 TRACE_SANITIZE (this); 419 if (unlikely (!sanitize_shallow (c, count))) return_trace (false); 420 for (unsigned int i = 0; i < count; i++) 421 if (unlikely (!arrayZ[i].sanitize (c, base))) 422 return_trace (false); 423 return_trace (true); 424 } 425 template <typename T> 426 bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base, T user_data) const 427 { 428 TRACE_SANITIZE (this); 429 if (unlikely (!sanitize_shallow (c, count))) return_trace (false); 430 for (unsigned int i = 0; i < count; i++) 431 if (unlikely (!arrayZ[i].sanitize (c, base, user_data))) 432 return_trace (false); 433 return_trace (true); 434 } 435 436 bool sanitize_shallow (hb_sanitize_context_t *c, unsigned int count) const 437 { 438 TRACE_SANITIZE (this); 439 return_trace (c->check_array (arrayZ, count)); 440 } 441 442 public: 443 Type arrayZ[VAR]; 444 public: 445 DEFINE_SIZE_UNBOUNDED (0); 446 }; 447 448 /* Unsized array of offset's */ 449 template <typename Type, typename OffsetType, bool has_null=true> 450 struct UnsizedOffsetArrayOf : UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null> > {}; 451 452 /* Unsized array of offsets relative to the beginning of the array itself. */ 453 template <typename Type, typename OffsetType, bool has_null=true> 454 struct UnsizedOffsetListOf : UnsizedOffsetArrayOf<Type, OffsetType, has_null> 455 { 456 const Type& operator [] (int i_) const 457 { 458 unsigned int i = (unsigned int) i_; 459 const OffsetTo<Type, OffsetType, has_null> *p = &this->arrayZ[i]; 460 if (unlikely (p < this->arrayZ)) return Null (Type); /* Overflowed. */ 461 return this+*p; 462 } 463 Type& operator [] (int i_) 464 { 465 unsigned int i = (unsigned int) i_; 466 const OffsetTo<Type, OffsetType, has_null> *p = &this->arrayZ[i]; 467 if (unlikely (p < this->arrayZ)) return Crap (Type); /* Overflowed. */ 468 return this+*p; 469 } 470 471 472 bool sanitize (hb_sanitize_context_t *c, unsigned int count) const 473 { 474 TRACE_SANITIZE (this); 475 return_trace ((UnsizedOffsetArrayOf<Type, OffsetType, has_null>::sanitize (c, count, this))); 476 } 477 template <typename T> 478 bool sanitize (hb_sanitize_context_t *c, unsigned int count, T user_data) const 479 { 480 TRACE_SANITIZE (this); 481 return_trace ((UnsizedOffsetArrayOf<Type, OffsetType, has_null>::sanitize (c, count, this, user_data))); 482 } 483 }; 484 485 /* An array with sorted elements. Supports binary searching. */ 486 template <typename Type> 487 struct SortedUnsizedArrayOf : UnsizedArrayOf<Type> 488 { 489 hb_sorted_array_t<Type> as_array (unsigned int len) 490 { return hb_sorted_array (this->arrayZ, len); } 491 hb_sorted_array_t<const Type> as_array (unsigned int len) const 492 { return hb_sorted_array (this->arrayZ, len); } 493 operator hb_sorted_array_t<Type> () { return as_array (); } 494 operator hb_sorted_array_t<const Type> () const { return as_array (); } 495 496 template <typename T> 497 Type &bsearch (unsigned int len, const T &x, Type ¬_found = Crap (Type)) 498 { return *as_array (len).bsearch (x, ¬_found); } 499 template <typename T> 500 const Type &bsearch (unsigned int len, const T &x, const Type ¬_found = Null (Type)) const 501 { return *as_array (len).bsearch (x, ¬_found); } 502 template <typename T> 503 bool bfind (unsigned int len, const T &x, unsigned int *i = nullptr, 504 hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE, 505 unsigned int to_store = (unsigned int) -1) const 506 { return as_array (len).bfind (x, i, not_found, to_store); } 507 }; 508 509 510 /* An array with a number of elements. */ 511 template <typename Type, typename LenType=HBUINT16> 512 struct ArrayOf 513 { 514 typedef Type item_t; 515 static constexpr unsigned item_size = hb_static_size (Type); 516 517 HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (ArrayOf, Type, LenType); 518 519 const Type& operator [] (int i_) const 520 { 521 unsigned int i = (unsigned int) i_; 522 if (unlikely (i >= len)) return Null (Type); 523 return arrayZ[i]; 524 } 525 Type& operator [] (int i_) 526 { 527 unsigned int i = (unsigned int) i_; 528 if (unlikely (i >= len)) return Crap (Type); 529 return arrayZ[i]; 530 } 531 532 unsigned int get_size () const 533 { return len.static_size + len * Type::static_size; } 534 535 hb_array_t<Type> as_array () 536 { return hb_array (arrayZ, len); } 537 hb_array_t<const Type> as_array () const 538 { return hb_array (arrayZ, len); } 539 operator hb_array_t<Type> (void) { return as_array (); } 540 operator hb_array_t<const Type> (void) const { return as_array (); } 541 542 hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const 543 { return as_array ().sub_array (start_offset, count);} 544 hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const 545 { return as_array ().sub_array (start_offset, count);} 546 hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int count) 547 { return as_array ().sub_array (start_offset, count);} 548 hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) 549 { return as_array ().sub_array (start_offset, count);} 550 551 bool serialize (hb_serialize_context_t *c, unsigned int items_len) 552 { 553 TRACE_SERIALIZE (this); 554 if (unlikely (!c->extend_min (*this))) return_trace (false); 555 len.set (items_len); /* TODO(serialize) Overflow? */ 556 if (unlikely (!c->extend (*this))) return_trace (false); 557 return_trace (true); 558 } 559 template <typename T> 560 bool serialize (hb_serialize_context_t *c, hb_array_t<const T> items) 561 { 562 TRACE_SERIALIZE (this); 563 if (unlikely (!serialize (c, items.length))) return_trace (false); 564 for (unsigned int i = 0; i < items.length; i++) 565 hb_assign (arrayZ[i], items[i]); 566 return_trace (true); 567 } 568 569 bool sanitize (hb_sanitize_context_t *c) const 570 { 571 TRACE_SANITIZE (this); 572 if (unlikely (!sanitize_shallow (c))) return_trace (false); 573 574 /* Note: for structs that do not reference other structs, 575 * we do not need to call their sanitize() as we already did 576 * a bound check on the aggregate array size. We just include 577 * a small unreachable expression to make sure the structs 578 * pointed to do have a simple sanitize(), ie. they do not 579 * reference other structs via offsets. 580 */ 581 (void) (false && arrayZ[0].sanitize (c)); 582 583 return_trace (true); 584 } 585 bool sanitize (hb_sanitize_context_t *c, const void *base) const 586 { 587 TRACE_SANITIZE (this); 588 if (unlikely (!sanitize_shallow (c))) return_trace (false); 589 unsigned int count = len; 590 for (unsigned int i = 0; i < count; i++) 591 if (unlikely (!arrayZ[i].sanitize (c, base))) 592 return_trace (false); 593 return_trace (true); 594 } 595 template <typename T> 596 bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const 597 { 598 TRACE_SANITIZE (this); 599 if (unlikely (!sanitize_shallow (c))) return_trace (false); 600 unsigned int count = len; 601 for (unsigned int i = 0; i < count; i++) 602 if (unlikely (!arrayZ[i].sanitize (c, base, user_data))) 603 return_trace (false); 604 return_trace (true); 605 } 606 607 template <typename T> 608 Type &lsearch (const T &x, Type ¬_found = Crap (Type)) 609 { return *as_array ().lsearch (x, ¬_found); } 610 template <typename T> 611 const Type &lsearch (const T &x, const Type ¬_found = Null (Type)) const 612 { return *as_array ().lsearch (x, ¬_found); } 613 614 void qsort (unsigned int start = 0, unsigned int end = (unsigned int) -1) 615 { as_array ().qsort (start, end); } 616 617 bool sanitize_shallow (hb_sanitize_context_t *c) const 618 { 619 TRACE_SANITIZE (this); 620 return_trace (len.sanitize (c) && c->check_array (arrayZ, len)); 621 } 622 623 public: 624 LenType len; 625 Type arrayZ[VAR]; 626 public: 627 DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); 628 }; 629 template <typename Type> struct LArrayOf : ArrayOf<Type, HBUINT32> {}; 630 typedef ArrayOf<HBUINT8, HBUINT8> PString; 631 632 /* Array of Offset's */ 633 template <typename Type> 634 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, HBUINT16> > {}; 635 template <typename Type> 636 struct LOffsetArrayOf : ArrayOf<OffsetTo<Type, HBUINT32> > {}; 637 template <typename Type> 638 struct LOffsetLArrayOf : ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32> {}; 639 640 /* Array of offsets relative to the beginning of the array itself. */ 641 template <typename Type> 642 struct OffsetListOf : OffsetArrayOf<Type> 643 { 644 const Type& operator [] (int i_) const 645 { 646 unsigned int i = (unsigned int) i_; 647 if (unlikely (i >= this->len)) return Null (Type); 648 return this+this->arrayZ[i]; 649 } 650 const Type& operator [] (int i_) 651 { 652 unsigned int i = (unsigned int) i_; 653 if (unlikely (i >= this->len)) return Crap (Type); 654 return this+this->arrayZ[i]; 655 } 656 657 bool subset (hb_subset_context_t *c) const 658 { 659 TRACE_SUBSET (this); 660 struct OffsetListOf<Type> *out = c->serializer->embed (*this); 661 if (unlikely (!out)) return_trace (false); 662 unsigned int count = this->len; 663 for (unsigned int i = 0; i < count; i++) 664 out->arrayZ[i].serialize_subset (c, (*this)[i], out); 665 return_trace (true); 666 } 667 668 bool sanitize (hb_sanitize_context_t *c) const 669 { 670 TRACE_SANITIZE (this); 671 return_trace (OffsetArrayOf<Type>::sanitize (c, this)); 672 } 673 template <typename T> 674 bool sanitize (hb_sanitize_context_t *c, T user_data) const 675 { 676 TRACE_SANITIZE (this); 677 return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data)); 678 } 679 }; 680 681 /* An array starting at second element. */ 682 template <typename Type, typename LenType=HBUINT16> 683 struct HeadlessArrayOf 684 { 685 static constexpr unsigned item_size = Type::static_size; 686 687 HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (HeadlessArrayOf, Type, LenType); 688 689 const Type& operator [] (int i_) const 690 { 691 unsigned int i = (unsigned int) i_; 692 if (unlikely (i >= lenP1 || !i)) return Null (Type); 693 return arrayZ[i-1]; 694 } 695 Type& operator [] (int i_) 696 { 697 unsigned int i = (unsigned int) i_; 698 if (unlikely (i >= lenP1 || !i)) return Crap (Type); 699 return arrayZ[i-1]; 700 } 701 unsigned int get_size () const 702 { return lenP1.static_size + (lenP1 ? lenP1 - 1 : 0) * Type::static_size; } 703 704 bool serialize (hb_serialize_context_t *c, 705 hb_array_t<const Type> items) 706 { 707 TRACE_SERIALIZE (this); 708 if (unlikely (!c->extend_min (*this))) return_trace (false); 709 lenP1.set (items.length + 1); /* TODO(serialize) Overflow? */ 710 if (unlikely (!c->extend (*this))) return_trace (false); 711 for (unsigned int i = 0; i < items.length; i++) 712 arrayZ[i] = items[i]; 713 return_trace (true); 714 } 715 716 bool sanitize (hb_sanitize_context_t *c) const 717 { 718 TRACE_SANITIZE (this); 719 if (unlikely (!sanitize_shallow (c))) return_trace (false); 720 721 /* Note: for structs that do not reference other structs, 722 * we do not need to call their sanitize() as we already did 723 * a bound check on the aggregate array size. We just include 724 * a small unreachable expression to make sure the structs 725 * pointed to do have a simple sanitize(), ie. they do not 726 * reference other structs via offsets. 727 */ 728 (void) (false && arrayZ[0].sanitize (c)); 729 730 return_trace (true); 731 } 732 733 private: 734 bool sanitize_shallow (hb_sanitize_context_t *c) const 735 { 736 TRACE_SANITIZE (this); 737 return_trace (lenP1.sanitize (c) && 738 (!lenP1 || c->check_array (arrayZ, lenP1 - 1))); 739 } 740 741 public: 742 LenType lenP1; 743 Type arrayZ[VAR]; 744 public: 745 DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); 746 }; 747 748 /* An array storing length-1. */ 749 template <typename Type, typename LenType=HBUINT16> 750 struct ArrayOfM1 751 { 752 HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2 (ArrayOfM1, Type, LenType); 753 754 const Type& operator [] (int i_) const 755 { 756 unsigned int i = (unsigned int) i_; 757 if (unlikely (i > lenM1)) return Null (Type); 758 return arrayZ[i]; 759 } 760 Type& operator [] (int i_) 761 { 762 unsigned int i = (unsigned int) i_; 763 if (unlikely (i > lenM1)) return Crap (Type); 764 return arrayZ[i]; 765 } 766 unsigned int get_size () const 767 { return lenM1.static_size + (lenM1 + 1) * Type::static_size; } 768 769 template <typename T> 770 bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const 771 { 772 TRACE_SANITIZE (this); 773 if (unlikely (!sanitize_shallow (c))) return_trace (false); 774 unsigned int count = lenM1 + 1; 775 for (unsigned int i = 0; i < count; i++) 776 if (unlikely (!arrayZ[i].sanitize (c, base, user_data))) 777 return_trace (false); 778 return_trace (true); 779 } 780 781 private: 782 bool sanitize_shallow (hb_sanitize_context_t *c) const 783 { 784 TRACE_SANITIZE (this); 785 return_trace (lenM1.sanitize (c) && 786 (c->check_array (arrayZ, lenM1 + 1))); 787 } 788 789 public: 790 LenType lenM1; 791 Type arrayZ[VAR]; 792 public: 793 DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); 794 }; 795 796 /* An array with sorted elements. Supports binary searching. */ 797 template <typename Type, typename LenType=HBUINT16> 798 struct SortedArrayOf : ArrayOf<Type, LenType> 799 { 800 hb_sorted_array_t<Type> as_array () 801 { return hb_sorted_array (this->arrayZ, this->len); } 802 hb_sorted_array_t<const Type> as_array () const 803 { return hb_sorted_array (this->arrayZ, this->len); } 804 operator hb_sorted_array_t<Type> () { return as_array (); } 805 operator hb_sorted_array_t<const Type> () const { return as_array (); } 806 807 hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const 808 { return as_array ().sub_array (start_offset, count);} 809 hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const 810 { return as_array ().sub_array (start_offset, count);} 811 hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int count) 812 { return as_array ().sub_array (start_offset, count);} 813 hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) 814 { return as_array ().sub_array (start_offset, count);} 815 816 template <typename T> 817 Type &bsearch (const T &x, Type ¬_found = Crap (Type)) 818 { return *as_array ().bsearch (x, ¬_found); } 819 template <typename T> 820 const Type &bsearch (const T &x, const Type ¬_found = Null (Type)) const 821 { return *as_array ().bsearch (x, ¬_found); } 822 template <typename T> 823 bool bfind (const T &x, unsigned int *i = nullptr, 824 hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE, 825 unsigned int to_store = (unsigned int) -1) const 826 { return as_array ().bfind (x, i, not_found, to_store); } 827 }; 828 829 /* 830 * Binary-search arrays 831 */ 832 833 template <typename LenType=HBUINT16> 834 struct BinSearchHeader 835 { 836 operator uint32_t () const { return len; } 837 838 bool sanitize (hb_sanitize_context_t *c) const 839 { 840 TRACE_SANITIZE (this); 841 return_trace (c->check_struct (this)); 842 } 843 844 void set (unsigned int v) 845 { 846 len.set (v); 847 assert (len == v); 848 entrySelector.set (MAX (1u, hb_bit_storage (v)) - 1); 849 searchRange.set (16 * (1u << entrySelector)); 850 rangeShift.set (v * 16 > searchRange 851 ? 16 * v - searchRange 852 : 0); 853 } 854 855 protected: 856 LenType len; 857 LenType searchRange; 858 LenType entrySelector; 859 LenType rangeShift; 860 861 public: 862 DEFINE_SIZE_STATIC (8); 863 }; 864 865 template <typename Type, typename LenType=HBUINT16> 866 struct BinSearchArrayOf : SortedArrayOf<Type, BinSearchHeader<LenType> > {}; 867 868 869 struct VarSizedBinSearchHeader 870 { 871 872 bool sanitize (hb_sanitize_context_t *c) const 873 { 874 TRACE_SANITIZE (this); 875 return_trace (c->check_struct (this)); 876 } 877 878 HBUINT16 unitSize; /* Size of a lookup unit for this search in bytes. */ 879 HBUINT16 nUnits; /* Number of units of the preceding size to be searched. */ 880 HBUINT16 searchRange; /* The value of unitSize times the largest power of 2 881 * that is less than or equal to the value of nUnits. */ 882 HBUINT16 entrySelector; /* The log base 2 of the largest power of 2 less than 883 * or equal to the value of nUnits. */ 884 HBUINT16 rangeShift; /* The value of unitSize times the difference of the 885 * value of nUnits minus the largest power of 2 less 886 * than or equal to the value of nUnits. */ 887 public: 888 DEFINE_SIZE_STATIC (10); 889 }; 890 891 template <typename Type> 892 struct VarSizedBinSearchArrayOf 893 { 894 static constexpr unsigned item_size = Type::static_size; 895 896 HB_NO_CREATE_COPY_ASSIGN_TEMPLATE (VarSizedBinSearchArrayOf, Type); 897 898 bool last_is_terminator () const 899 { 900 if (unlikely (!header.nUnits)) return false; 901 902 /* Gah. 903 * 904 * "The number of termination values that need to be included is table-specific. 905 * The value that indicates binary search termination is 0xFFFF." */ 906 const HBUINT16 *words = &StructAtOffset<HBUINT16> (&bytesZ, (header.nUnits - 1) * header.unitSize); 907 unsigned int count = Type::TerminationWordCount; 908 for (unsigned int i = 0; i < count; i++) 909 if (words[i] != 0xFFFFu) 910 return false; 911 return true; 912 } 913 914 const Type& operator [] (int i_) const 915 { 916 unsigned int i = (unsigned int) i_; 917 if (unlikely (i >= get_length ())) return Null (Type); 918 return StructAtOffset<Type> (&bytesZ, i * header.unitSize); 919 } 920 Type& operator [] (int i_) 921 { 922 unsigned int i = (unsigned int) i_; 923 if (unlikely (i >= get_length ())) return Crap (Type); 924 return StructAtOffset<Type> (&bytesZ, i * header.unitSize); 925 } 926 unsigned int get_length () const 927 { return header.nUnits - last_is_terminator (); } 928 unsigned int get_size () const 929 { return header.static_size + header.nUnits * header.unitSize; } 930 931 bool sanitize (hb_sanitize_context_t *c) const 932 { 933 TRACE_SANITIZE (this); 934 if (unlikely (!sanitize_shallow (c))) return_trace (false); 935 936 /* Note: for structs that do not reference other structs, 937 * we do not need to call their sanitize() as we already did 938 * a bound check on the aggregate array size. We just include 939 * a small unreachable expression to make sure the structs 940 * pointed to do have a simple sanitize(), ie. they do not 941 * reference other structs via offsets. 942 */ 943 (void) (false && StructAtOffset<Type> (&bytesZ, 0).sanitize (c)); 944 945 return_trace (true); 946 } 947 bool sanitize (hb_sanitize_context_t *c, const void *base) const 948 { 949 TRACE_SANITIZE (this); 950 if (unlikely (!sanitize_shallow (c))) return_trace (false); 951 unsigned int count = get_length (); 952 for (unsigned int i = 0; i < count; i++) 953 if (unlikely (!(*this)[i].sanitize (c, base))) 954 return_trace (false); 955 return_trace (true); 956 } 957 template <typename T> 958 bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const 959 { 960 TRACE_SANITIZE (this); 961 if (unlikely (!sanitize_shallow (c))) return_trace (false); 962 unsigned int count = get_length (); 963 for (unsigned int i = 0; i < count; i++) 964 if (unlikely (!(*this)[i].sanitize (c, base, user_data))) 965 return_trace (false); 966 return_trace (true); 967 } 968 969 template <typename T> 970 const Type *bsearch (const T &key) const 971 { 972 unsigned int size = header.unitSize; 973 int min = 0, max = (int) get_length () - 1; 974 while (min <= max) 975 { 976 int mid = ((unsigned int) min + (unsigned int) max) / 2; 977 const Type *p = (const Type *) (((const char *) &bytesZ) + (mid * size)); 978 int c = p->cmp (key); 979 if (c < 0) max = mid - 1; 980 else if (c > 0) min = mid + 1; 981 else return p; 982 } 983 return nullptr; 984 } 985 986 private: 987 bool sanitize_shallow (hb_sanitize_context_t *c) const 988 { 989 TRACE_SANITIZE (this); 990 return_trace (header.sanitize (c) && 991 Type::static_size <= header.unitSize && 992 c->check_range (bytesZ.arrayZ, 993 header.nUnits, 994 header.unitSize)); 995 } 996 997 protected: 998 VarSizedBinSearchHeader header; 999 UnsizedArrayOf<HBUINT8> bytesZ; 1000 public: 1001 DEFINE_SIZE_ARRAY (10, bytesZ); 1002 }; 1003 1004 1005 } /* namespace OT */ 1006 1007 1008 #endif /* HB_OPEN_TYPE_HH */