28 */ 29 30 #ifndef HB_BUFFER_PRIVATE_HH 31 #define HB_BUFFER_PRIVATE_HH 32 33 #include "hb-private.hh" 34 #include "hb-object-private.hh" 35 #include "hb-unicode-private.hh" 36 37 38 #ifndef HB_BUFFER_MAX_EXPANSION_FACTOR 39 #define HB_BUFFER_MAX_EXPANSION_FACTOR 32 40 #endif 41 #ifndef HB_BUFFER_MAX_LEN_MIN 42 #define HB_BUFFER_MAX_LEN_MIN 8192 43 #endif 44 #ifndef HB_BUFFER_MAX_LEN_DEFAULT 45 #define HB_BUFFER_MAX_LEN_DEFAULT 0x3FFFFFFF /* Shaping more than a billion chars? Let us know! */ 46 #endif 47 48 ASSERT_STATIC (sizeof (hb_glyph_info_t) == 20); 49 ASSERT_STATIC (sizeof (hb_glyph_info_t) == sizeof (hb_glyph_position_t)); 50 51 HB_MARK_AS_FLAG_T (hb_buffer_flags_t); 52 HB_MARK_AS_FLAG_T (hb_buffer_serialize_flags_t); 53 54 enum hb_buffer_scratch_flags_t { 55 HB_BUFFER_SCRATCH_FLAG_DEFAULT = 0x00000000u, 56 HB_BUFFER_SCRATCH_FLAG_HAS_NON_ASCII = 0x00000001u, 57 HB_BUFFER_SCRATCH_FLAG_HAS_DEFAULT_IGNORABLES = 0x00000002u, 58 HB_BUFFER_SCRATCH_FLAG_HAS_SPACE_FALLBACK = 0x00000004u, 59 HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT = 0x00000008u, 60 /* Reserved for complex shapers' internal use. */ 61 HB_BUFFER_SCRATCH_FLAG_COMPLEX0 = 0x01000000u, 62 HB_BUFFER_SCRATCH_FLAG_COMPLEX1 = 0x02000000u, 63 HB_BUFFER_SCRATCH_FLAG_COMPLEX2 = 0x04000000u, 64 HB_BUFFER_SCRATCH_FLAG_COMPLEX3 = 0x08000000u, 65 }; 66 HB_MARK_AS_FLAG_T (hb_buffer_scratch_flags_t); 67 68 69 /* 70 * hb_buffer_t 71 */ 72 73 struct hb_buffer_t { 74 hb_object_header_t header; 75 ASSERT_POD (); 76 77 /* Information about how the text in the buffer should be treated */ 78 hb_unicode_funcs_t *unicode; /* Unicode functions */ 79 hb_buffer_flags_t flags; /* BOT / EOT / etc. */ 215 } 216 out_len++; 217 } 218 219 idx++; 220 } 221 222 /* Advance idx without copying to output. */ 223 inline void skip_glyph (void) { idx++; } 224 225 inline void reset_masks (hb_mask_t mask) 226 { 227 for (unsigned int j = 0; j < len; j++) 228 info[j].mask = mask; 229 } 230 inline void add_masks (hb_mask_t mask) 231 { 232 for (unsigned int j = 0; j < len; j++) 233 info[j].mask |= mask; 234 } 235 HB_INTERNAL void set_masks (hb_mask_t value, 236 hb_mask_t mask, 237 unsigned int cluster_start, 238 unsigned int cluster_end); 239 240 HB_INTERNAL void merge_clusters (unsigned int start, 241 unsigned int end) 242 { 243 if (end - start < 2) 244 return; 245 merge_clusters_impl (start, end); 246 } 247 HB_INTERNAL void merge_clusters_impl (unsigned int start, 248 unsigned int end); 249 HB_INTERNAL void merge_out_clusters (unsigned int start, 250 unsigned int end); 251 /* Merge clusters for deleting current glyph, and skip it. */ 252 HB_INTERNAL void delete_glyph (void); 253 254 /* Internal methods */ 255 HB_INTERNAL bool enlarge (unsigned int size); 256 257 inline bool ensure (unsigned int size) 258 { return likely (!size || size < allocated) ? true : enlarge (size); } 259 260 inline bool ensure_inplace (unsigned int size) 261 { return likely (!size || size < allocated); } 262 263 HB_INTERNAL bool make_room_for (unsigned int num_in, unsigned int num_out); 264 HB_INTERNAL bool shift_forward (unsigned int count); 265 266 typedef long scratch_buffer_t; 267 HB_INTERNAL scratch_buffer_t *get_scratch_buffer (unsigned int *size); 268 269 inline void clear_context (unsigned int side) { context_len[side] = 0; } 270 271 HB_INTERNAL void sort (unsigned int start, unsigned int end, int(*compar)(const hb_glyph_info_t *, const hb_glyph_info_t *)); 272 273 inline bool messaging (void) { return unlikely (message_func); } 274 inline bool message (hb_font_t *font, const char *fmt, ...) HB_PRINTF_FUNC(3, 4) 275 { 276 if (!messaging ()) 277 return true; 278 va_list ap; 279 va_start (ap, fmt); 280 bool ret = message_impl (font, fmt, ap); 281 va_end (ap); 282 return ret; 283 } 284 HB_INTERNAL bool message_impl (hb_font_t *font, const char *fmt, va_list ap) HB_PRINTF_FUNC(3, 0); 285 }; 286 287 288 #define HB_BUFFER_XALLOCATE_VAR(b, func, var) \ 289 b->func (offsetof (hb_glyph_info_t, var) - offsetof(hb_glyph_info_t, var1), \ 290 sizeof (b->info[0].var)) 291 #define HB_BUFFER_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, allocate_var, var ()) 292 #define HB_BUFFER_DEALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, deallocate_var, var ()) 293 #define HB_BUFFER_ASSERT_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, assert_var, var ()) 294 295 296 #endif /* HB_BUFFER_PRIVATE_HH */ | 28 */ 29 30 #ifndef HB_BUFFER_PRIVATE_HH 31 #define HB_BUFFER_PRIVATE_HH 32 33 #include "hb-private.hh" 34 #include "hb-object-private.hh" 35 #include "hb-unicode-private.hh" 36 37 38 #ifndef HB_BUFFER_MAX_EXPANSION_FACTOR 39 #define HB_BUFFER_MAX_EXPANSION_FACTOR 32 40 #endif 41 #ifndef HB_BUFFER_MAX_LEN_MIN 42 #define HB_BUFFER_MAX_LEN_MIN 8192 43 #endif 44 #ifndef HB_BUFFER_MAX_LEN_DEFAULT 45 #define HB_BUFFER_MAX_LEN_DEFAULT 0x3FFFFFFF /* Shaping more than a billion chars? Let us know! */ 46 #endif 47 48 static_assert ((sizeof (hb_glyph_info_t) == 20), ""); 49 static_assert ((sizeof (hb_glyph_info_t) == sizeof (hb_glyph_position_t)), ""); 50 51 HB_MARK_AS_FLAG_T (hb_buffer_flags_t); 52 HB_MARK_AS_FLAG_T (hb_buffer_serialize_flags_t); 53 HB_MARK_AS_FLAG_T (hb_buffer_diff_flags_t); 54 55 enum hb_buffer_scratch_flags_t { 56 HB_BUFFER_SCRATCH_FLAG_DEFAULT = 0x00000000u, 57 HB_BUFFER_SCRATCH_FLAG_HAS_NON_ASCII = 0x00000001u, 58 HB_BUFFER_SCRATCH_FLAG_HAS_DEFAULT_IGNORABLES = 0x00000002u, 59 HB_BUFFER_SCRATCH_FLAG_HAS_SPACE_FALLBACK = 0x00000004u, 60 HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT = 0x00000008u, 61 HB_BUFFER_SCRATCH_FLAG_HAS_UNSAFE_TO_BREAK = 0x00000010u, 62 63 /* Reserved for complex shapers' internal use. */ 64 HB_BUFFER_SCRATCH_FLAG_COMPLEX0 = 0x01000000u, 65 HB_BUFFER_SCRATCH_FLAG_COMPLEX1 = 0x02000000u, 66 HB_BUFFER_SCRATCH_FLAG_COMPLEX2 = 0x04000000u, 67 HB_BUFFER_SCRATCH_FLAG_COMPLEX3 = 0x08000000u, 68 }; 69 HB_MARK_AS_FLAG_T (hb_buffer_scratch_flags_t); 70 71 72 /* 73 * hb_buffer_t 74 */ 75 76 struct hb_buffer_t { 77 hb_object_header_t header; 78 ASSERT_POD (); 79 80 /* Information about how the text in the buffer should be treated */ 81 hb_unicode_funcs_t *unicode; /* Unicode functions */ 82 hb_buffer_flags_t flags; /* BOT / EOT / etc. */ 218 } 219 out_len++; 220 } 221 222 idx++; 223 } 224 225 /* Advance idx without copying to output. */ 226 inline void skip_glyph (void) { idx++; } 227 228 inline void reset_masks (hb_mask_t mask) 229 { 230 for (unsigned int j = 0; j < len; j++) 231 info[j].mask = mask; 232 } 233 inline void add_masks (hb_mask_t mask) 234 { 235 for (unsigned int j = 0; j < len; j++) 236 info[j].mask |= mask; 237 } 238 HB_INTERNAL void set_masks (hb_mask_t value, hb_mask_t mask, 239 unsigned int cluster_start, unsigned int cluster_end); 240 241 inline void merge_clusters (unsigned int start, unsigned int end) 242 { 243 if (end - start < 2) 244 return; 245 merge_clusters_impl (start, end); 246 } 247 HB_INTERNAL void merge_clusters_impl (unsigned int start, unsigned int end); 248 HB_INTERNAL void merge_out_clusters (unsigned int start, unsigned int end); 249 /* Merge clusters for deleting current glyph, and skip it. */ 250 HB_INTERNAL void delete_glyph (void); 251 252 inline void unsafe_to_break (unsigned int start, 253 unsigned int end) 254 { 255 if (end - start < 2) 256 return; 257 unsafe_to_break_impl (start, end); 258 } 259 HB_INTERNAL void unsafe_to_break_impl (unsigned int start, unsigned int end); 260 HB_INTERNAL void unsafe_to_break_from_outbuffer (unsigned int start, unsigned int end); 261 262 263 /* Internal methods */ 264 HB_INTERNAL bool enlarge (unsigned int size); 265 266 inline bool ensure (unsigned int size) 267 { return likely (!size || size < allocated) ? true : enlarge (size); } 268 269 inline bool ensure_inplace (unsigned int size) 270 { return likely (!size || size < allocated); } 271 272 HB_INTERNAL bool make_room_for (unsigned int num_in, unsigned int num_out); 273 HB_INTERNAL bool shift_forward (unsigned int count); 274 275 typedef long scratch_buffer_t; 276 HB_INTERNAL scratch_buffer_t *get_scratch_buffer (unsigned int *size); 277 278 inline void clear_context (unsigned int side) { context_len[side] = 0; } 279 280 HB_INTERNAL void sort (unsigned int start, unsigned int end, int(*compar)(const hb_glyph_info_t *, const hb_glyph_info_t *)); 281 282 inline bool messaging (void) { return unlikely (message_func); } 283 inline bool message (hb_font_t *font, const char *fmt, ...) HB_PRINTF_FUNC(3, 4) 284 { 285 if (!messaging ()) 286 return true; 287 va_list ap; 288 va_start (ap, fmt); 289 bool ret = message_impl (font, fmt, ap); 290 va_end (ap); 291 return ret; 292 } 293 HB_INTERNAL bool message_impl (hb_font_t *font, const char *fmt, va_list ap) HB_PRINTF_FUNC(3, 0); 294 295 static inline void 296 set_cluster (hb_glyph_info_t &info, unsigned int cluster, unsigned int mask = 0) 297 { 298 if (info.cluster != cluster) 299 { 300 if (mask & HB_GLYPH_FLAG_UNSAFE_TO_BREAK) 301 info.mask |= HB_GLYPH_FLAG_UNSAFE_TO_BREAK; 302 else 303 info.mask &= ~HB_GLYPH_FLAG_UNSAFE_TO_BREAK; 304 } 305 info.cluster = cluster; 306 } 307 308 inline int 309 _unsafe_to_break_find_min_cluster (const hb_glyph_info_t *info, 310 unsigned int start, unsigned int end, 311 unsigned int cluster) const 312 { 313 for (unsigned int i = start; i < end; i++) 314 cluster = MIN<unsigned int> (cluster, info[i].cluster); 315 return cluster; 316 } 317 inline void 318 _unsafe_to_break_set_mask (hb_glyph_info_t *info, 319 unsigned int start, unsigned int end, 320 unsigned int cluster) 321 { 322 for (unsigned int i = start; i < end; i++) 323 if (cluster != info[i].cluster) 324 { 325 scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_UNSAFE_TO_BREAK; 326 info[i].mask |= HB_GLYPH_FLAG_UNSAFE_TO_BREAK; 327 } 328 } 329 330 inline void 331 unsafe_to_break_all (void) 332 { 333 for (unsigned int i = 0; i < len; i++) 334 info[i].mask |= HB_GLYPH_FLAG_UNSAFE_TO_BREAK; 335 } 336 inline void 337 safe_to_break_all (void) 338 { 339 for (unsigned int i = 0; i < len; i++) 340 info[i].mask &= ~HB_GLYPH_FLAG_UNSAFE_TO_BREAK; 341 } 342 }; 343 344 345 /* Loop over clusters. Duplicated in foreach_syllable(). */ 346 #define foreach_cluster(buffer, start, end) \ 347 for (unsigned int \ 348 _count = buffer->len, \ 349 start = 0, end = _count ? _next_cluster (buffer, 0) : 0; \ 350 start < _count; \ 351 start = end, end = _next_cluster (buffer, start)) 352 353 static inline unsigned int 354 _next_cluster (hb_buffer_t *buffer, unsigned int start) 355 { 356 hb_glyph_info_t *info = buffer->info; 357 unsigned int count = buffer->len; 358 359 unsigned int cluster = info[start].cluster; 360 while (++start < count && cluster == info[start].cluster) 361 ; 362 363 return start; 364 } 365 366 367 #define HB_BUFFER_XALLOCATE_VAR(b, func, var) \ 368 b->func (offsetof (hb_glyph_info_t, var) - offsetof(hb_glyph_info_t, var1), \ 369 sizeof (b->info[0].var)) 370 #define HB_BUFFER_ALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, allocate_var, var ()) 371 #define HB_BUFFER_DEALLOCATE_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, deallocate_var, var ()) 372 #define HB_BUFFER_ASSERT_VAR(b, var) HB_BUFFER_XALLOCATE_VAR (b, assert_var, var ()) 373 374 375 #endif /* HB_BUFFER_PRIVATE_HH */ |