• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2012  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
30 #define HB_OPEN_TYPE_PRIVATE_HH
31 
32 #include "hb-private.hh"
33 #include "hb-debug.hh"
34 #include "hb-face-private.hh"
35 
36 
37 namespace OT {
38 
39 
40 
41 /*
42  * Casts
43  */
44 
45 /* Cast to struct T, reference to reference */
46 template<typename Type, typename TObject>
CastR(const TObject & X)47 static inline const Type& CastR(const TObject &X)
48 { return reinterpret_cast<const Type&> (X); }
49 template<typename Type, typename TObject>
CastR(TObject & X)50 static inline Type& CastR(TObject &X)
51 { return reinterpret_cast<Type&> (X); }
52 
53 /* Cast to struct T, pointer to pointer */
54 template<typename Type, typename TObject>
CastP(const TObject * X)55 static inline const Type* CastP(const TObject *X)
56 { return reinterpret_cast<const Type*> (X); }
57 template<typename Type, typename TObject>
CastP(TObject * X)58 static inline Type* CastP(TObject *X)
59 { return reinterpret_cast<Type*> (X); }
60 
61 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
62  * location pointed to by P plus Ofs bytes. */
63 template<typename Type>
StructAtOffset(const void * P,unsigned int offset)64 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
65 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
66 template<typename Type>
StructAtOffset(void * P,unsigned int offset)67 static inline Type& StructAtOffset(void *P, unsigned int offset)
68 { return * reinterpret_cast<Type*> ((char *) P + offset); }
69 
70 /* StructAfter<T>(X) returns the struct T& that is placed after X.
71  * Works with X of variable size also.  X must implement get_size() */
72 template<typename Type, typename TObject>
StructAfter(const TObject & X)73 static inline const Type& StructAfter(const TObject &X)
74 { return StructAtOffset<Type>(&X, X.get_size()); }
75 template<typename Type, typename TObject>
StructAfter(TObject & X)76 static inline Type& StructAfter(TObject &X)
77 { return StructAtOffset<Type>(&X, X.get_size()); }
78 
79 
80 
81 /*
82  * Size checking
83  */
84 
85 /* Check _assertion in a method environment */
86 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
87   inline void _instance_assertion_on_line_##_line (void) const \
88   { \
89     static_assert ((_assertion), ""); \
90     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
91   }
92 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
93 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
94 
95 /* Check that _code compiles in a method environment */
96 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
97   inline void _compiles_assertion_on_line_##_line (void) const \
98   { _code; }
99 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
100 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
101 
102 
103 #define DEFINE_SIZE_STATIC(size) \
104   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
105   static const unsigned int static_size = (size); \
106   static const unsigned int min_size = (size); \
107   inline unsigned int get_size (void) const { return (size); }
108 
109 #define DEFINE_SIZE_UNION(size, _member) \
110   DEFINE_INSTANCE_ASSERTION (0*sizeof(this->u._member.static_size) + sizeof(this->u._member) == (size)); \
111   static const unsigned int min_size = (size)
112 
113 #define DEFINE_SIZE_MIN(size) \
114   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
115   static const unsigned int min_size = (size)
116 
117 #define DEFINE_SIZE_ARRAY(size, array) \
118   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
119   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
120   static const unsigned int min_size = (size)
121 
122 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
123   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
124   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
125   static const unsigned int min_size = (size)
126 
127 
128 
129 /*
130  * Null objects
131  */
132 
133 /* Global nul-content Null pool.  Enlarge as necessary. */
134 
135 #define HB_NULL_POOL_SIZE 264
136 static_assert (HB_NULL_POOL_SIZE % sizeof (void *) == 0, "Align HB_NULL_POOL_SIZE.");
137 extern HB_INTERNAL const void * const _hb_NullPool[HB_NULL_POOL_SIZE / sizeof (void *)];
138 
139 /* Generic nul-content Null objects. */
140 template <typename Type>
Null(void)141 static inline const Type& Null (void) {
142   static_assert (sizeof (Type) <= HB_NULL_POOL_SIZE, "Increase HB_NULL_POOL_SIZE.");
143   return *CastP<Type> (_hb_NullPool);
144 }
145 
146 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
147 #define DEFINE_NULL_DATA(Type, data) \
148 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
149 template <> \
150 /*static*/ inline const Type& Null<Type> (void) { \
151   return *CastP<Type> (_Null##Type); \
152 } /* The following line really exists such that we end in a place needing semicolon */ \
153 static_assert (Type::min_size + 1 <= sizeof (_Null##Type), "Null pool too small.  Enlarge.")
154 
155 /* Accessor macro. */
156 #define Null(Type) Null<Type>()
157 
158 
159 /*
160  * Dispatch
161  */
162 
163 template <typename Context, typename Return, unsigned int MaxDebugDepth>
164 struct hb_dispatch_context_t
165 {
166   static const unsigned int max_debug_depth = MaxDebugDepth;
167   typedef Return return_t;
168   template <typename T, typename F>
may_dispatchOT::hb_dispatch_context_t169   inline bool may_dispatch (const T *obj, const F *format) { return true; }
no_dispatch_return_valueOT::hb_dispatch_context_t170   static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
171 };
172 
173 
174 /*
175  * Sanitize
176  */
177 
178 /* This limits sanitizing time on really broken fonts. */
179 #ifndef HB_SANITIZE_MAX_EDITS
180 #define HB_SANITIZE_MAX_EDITS 32
181 #endif
182 
183 struct hb_sanitize_context_t :
184        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
185 {
hb_sanitize_context_tOT::hb_sanitize_context_t186   inline hb_sanitize_context_t (void) :
187 	debug_depth (0),
188 	start (nullptr), end (nullptr),
189 	writable (false), edit_count (0),
190 	blob (nullptr) {}
191 
get_nameOT::hb_sanitize_context_t192   inline const char *get_name (void) { return "SANITIZE"; }
193   template <typename T, typename F>
may_dispatchOT::hb_sanitize_context_t194   inline bool may_dispatch (const T *obj, const F *format)
195   { return format->sanitize (this); }
196   template <typename T>
dispatchOT::hb_sanitize_context_t197   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
default_return_valueOT::hb_sanitize_context_t198   static return_t default_return_value (void) { return true; }
no_dispatch_return_valueOT::hb_sanitize_context_t199   static return_t no_dispatch_return_value (void) { return false; }
stop_sublookup_iterationOT::hb_sanitize_context_t200   bool stop_sublookup_iteration (const return_t r) const { return !r; }
201 
initOT::hb_sanitize_context_t202   inline void init (hb_blob_t *b)
203   {
204     this->blob = hb_blob_reference (b);
205     this->writable = false;
206   }
207 
start_processingOT::hb_sanitize_context_t208   inline void start_processing (void)
209   {
210     this->start = hb_blob_get_data (this->blob, nullptr);
211     this->end = this->start + hb_blob_get_length (this->blob);
212     assert (this->start <= this->end); /* Must not overflow. */
213     this->edit_count = 0;
214     this->debug_depth = 0;
215 
216     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
217 		     "start [%p..%p] (%lu bytes)",
218 		     this->start, this->end,
219 		     (unsigned long) (this->end - this->start));
220   }
221 
end_processingOT::hb_sanitize_context_t222   inline void end_processing (void)
223   {
224     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
225 		     "end [%p..%p] %u edit requests",
226 		     this->start, this->end, this->edit_count);
227 
228     hb_blob_destroy (this->blob);
229     this->blob = nullptr;
230     this->start = this->end = nullptr;
231   }
232 
check_rangeOT::hb_sanitize_context_t233   inline bool check_range (const void *base, unsigned int len) const
234   {
235     const char *p = (const char *) base;
236     bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
237 
238     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
239        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
240        p, p + len, len,
241        this->start, this->end,
242        ok ? "OK" : "OUT-OF-RANGE");
243 
244     return likely (ok);
245   }
246 
check_arrayOT::hb_sanitize_context_t247   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
248   {
249     const char *p = (const char *) base;
250     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
251     unsigned int array_size = record_size * len;
252     bool ok = !overflows && this->check_range (base, array_size);
253 
254     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
255        "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
256        p, p + (record_size * len), record_size, len, (unsigned int) array_size,
257        this->start, this->end,
258        overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
259 
260     return likely (ok);
261   }
262 
263   template <typename Type>
check_structOT::hb_sanitize_context_t264   inline bool check_struct (const Type *obj) const
265   {
266     return likely (this->check_range (obj, obj->min_size));
267   }
268 
may_editOT::hb_sanitize_context_t269   inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
270   {
271     if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
272       return false;
273 
274     const char *p = (const char *) base;
275     this->edit_count++;
276 
277     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
278        "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
279        this->edit_count,
280        p, p + len, len,
281        this->start, this->end,
282        this->writable ? "GRANTED" : "DENIED");
283 
284     return this->writable;
285   }
286 
287   template <typename Type, typename ValueType>
try_setOT::hb_sanitize_context_t288   inline bool try_set (const Type *obj, const ValueType &v) {
289     if (this->may_edit (obj, obj->static_size)) {
290       const_cast<Type *> (obj)->set (v);
291       return true;
292     }
293     return false;
294   }
295 
296   mutable unsigned int debug_depth;
297   const char *start, *end;
298   bool writable;
299   unsigned int edit_count;
300   hb_blob_t *blob;
301 };
302 
303 
304 
305 /* Template to sanitize an object. */
306 template <typename Type>
307 struct Sanitizer
308 {
sanitizeOT::Sanitizer309   static hb_blob_t *sanitize (hb_blob_t *blob) {
310     hb_sanitize_context_t c[1];
311     bool sane;
312 
313     /* TODO is_sane() stuff */
314 
315     c->init (blob);
316 
317   retry:
318     DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
319 
320     c->start_processing ();
321 
322     if (unlikely (!c->start)) {
323       c->end_processing ();
324       return blob;
325     }
326 
327     Type *t = CastP<Type> (const_cast<char *> (c->start));
328 
329     sane = t->sanitize (c);
330     if (sane) {
331       if (c->edit_count) {
332 	DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
333 
334         /* sanitize again to ensure no toe-stepping */
335         c->edit_count = 0;
336 	sane = t->sanitize (c);
337 	if (c->edit_count) {
338 	  DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
339 	  sane = false;
340 	}
341       }
342     } else {
343       unsigned int edit_count = c->edit_count;
344       if (edit_count && !c->writable) {
345         c->start = hb_blob_get_data_writable (blob, nullptr);
346 	c->end = c->start + hb_blob_get_length (blob);
347 
348 	if (c->start) {
349 	  c->writable = true;
350 	  /* ok, we made it writable by relocating.  try again */
351 	  DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
352 	  goto retry;
353 	}
354       }
355     }
356 
357     c->end_processing ();
358 
359     DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
360     if (sane)
361       return blob;
362     else {
363       hb_blob_destroy (blob);
364       return hb_blob_get_empty ();
365     }
366   }
367 
lock_instanceOT::Sanitizer368   static const Type* lock_instance (hb_blob_t *blob) {
369     hb_blob_make_immutable (blob);
370     const char *base = hb_blob_get_data (blob, nullptr);
371     return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
372   }
373 };
374 
375 
376 
377 /*
378  * Serialize
379  */
380 
381 
382 struct hb_serialize_context_t
383 {
hb_serialize_context_tOT::hb_serialize_context_t384   inline hb_serialize_context_t (void *start_, unsigned int size)
385   {
386     this->start = (char *) start_;
387     this->end = this->start + size;
388 
389     this->ran_out_of_room = false;
390     this->head = this->start;
391     this->debug_depth = 0;
392   }
393 
394   template <typename Type>
start_serializeOT::hb_serialize_context_t395   inline Type *start_serialize (void)
396   {
397     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
398 		     "start [%p..%p] (%lu bytes)",
399 		     this->start, this->end,
400 		     (unsigned long) (this->end - this->start));
401 
402     return start_embed<Type> ();
403   }
404 
end_serializeOT::hb_serialize_context_t405   inline void end_serialize (void)
406   {
407     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
408 		     "end [%p..%p] serialized %d bytes; %s",
409 		     this->start, this->end,
410 		     (int) (this->head - this->start),
411 		     this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
412 
413   }
414 
415   template <typename Type>
copyOT::hb_serialize_context_t416   inline Type *copy (void)
417   {
418     assert (!this->ran_out_of_room);
419     unsigned int len = this->head - this->start;
420     void *p = malloc (len);
421     if (p)
422       memcpy (p, this->start, len);
423     return reinterpret_cast<Type *> (p);
424   }
425 
426   template <typename Type>
allocate_sizeOT::hb_serialize_context_t427   inline Type *allocate_size (unsigned int size)
428   {
429     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
430       this->ran_out_of_room = true;
431       return nullptr;
432     }
433     memset (this->head, 0, size);
434     char *ret = this->head;
435     this->head += size;
436     return reinterpret_cast<Type *> (ret);
437   }
438 
439   template <typename Type>
allocate_minOT::hb_serialize_context_t440   inline Type *allocate_min (void)
441   {
442     return this->allocate_size<Type> (Type::min_size);
443   }
444 
445   template <typename Type>
start_embedOT::hb_serialize_context_t446   inline Type *start_embed (void)
447   {
448     Type *ret = reinterpret_cast<Type *> (this->head);
449     return ret;
450   }
451 
452   template <typename Type>
embedOT::hb_serialize_context_t453   inline Type *embed (const Type &obj)
454   {
455     unsigned int size = obj.get_size ();
456     Type *ret = this->allocate_size<Type> (size);
457     if (unlikely (!ret)) return nullptr;
458     memcpy (ret, obj, size);
459     return ret;
460   }
461 
462   template <typename Type>
extend_minOT::hb_serialize_context_t463   inline Type *extend_min (Type &obj)
464   {
465     unsigned int size = obj.min_size;
466     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
467     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
468     return reinterpret_cast<Type *> (&obj);
469   }
470 
471   template <typename Type>
extendOT::hb_serialize_context_t472   inline Type *extend (Type &obj)
473   {
474     unsigned int size = obj.get_size ();
475     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
476     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
477     return reinterpret_cast<Type *> (&obj);
478   }
479 
truncateOT::hb_serialize_context_t480   inline void truncate (void *new_head)
481   {
482     assert (this->start < new_head && new_head <= this->head);
483     this->head = (char *) new_head;
484   }
485 
486   unsigned int debug_depth;
487   char *start, *end, *head;
488   bool ran_out_of_room;
489 };
490 
491 template <typename Type>
492 struct Supplier
493 {
SupplierOT::Supplier494   inline Supplier (const Type *array, unsigned int len_)
495   {
496     head = array;
497     len = len_;
498   }
operator []OT::Supplier499   inline const Type operator [] (unsigned int i) const
500   {
501     if (unlikely (i >= len)) return Type ();
502     return head[i];
503   }
504 
advanceOT::Supplier505   inline void advance (unsigned int count)
506   {
507     if (unlikely (count > len))
508       count = len;
509     len -= count;
510     head += count;
511   }
512 
513   private:
514   inline Supplier (const Supplier<Type> &); /* Disallow copy */
515   inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
516 
517   unsigned int len;
518   const Type *head;
519 };
520 
521 
522 
523 
524 /*
525  *
526  * The OpenType Font File: Data Types
527  */
528 
529 
530 /* "The following data types are used in the OpenType font file.
531  *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
532 
533 /*
534  * Int types
535  */
536 
537 
538 template <typename Type, int Bytes> struct BEInt;
539 
540 template <typename Type>
541 struct BEInt<Type, 1>
542 {
543   public:
setOT::BEInt544   inline void set (Type V)
545   {
546     v = V;
547   }
operator TypeOT::BEInt548   inline operator Type (void) const
549   {
550     return v;
551   }
552   private: uint8_t v;
553 };
554 template <typename Type>
555 struct BEInt<Type, 2>
556 {
557   public:
setOT::BEInt558   inline void set (Type V)
559   {
560     v[0] = (V >>  8) & 0xFF;
561     v[1] = (V      ) & 0xFF;
562   }
operator TypeOT::BEInt563   inline operator Type (void) const
564   {
565     return (v[0] <<  8)
566          + (v[1]      );
567   }
568   private: uint8_t v[2];
569 };
570 template <typename Type>
571 struct BEInt<Type, 3>
572 {
573   public:
setOT::BEInt574   inline void set (Type V)
575   {
576     v[0] = (V >> 16) & 0xFF;
577     v[1] = (V >>  8) & 0xFF;
578     v[2] = (V      ) & 0xFF;
579   }
operator TypeOT::BEInt580   inline operator Type (void) const
581   {
582     return (v[0] << 16)
583          + (v[1] <<  8)
584          + (v[2]      );
585   }
586   private: uint8_t v[3];
587 };
588 template <typename Type>
589 struct BEInt<Type, 4>
590 {
591   public:
setOT::BEInt592   inline void set (Type V)
593   {
594     v[0] = (V >> 24) & 0xFF;
595     v[1] = (V >> 16) & 0xFF;
596     v[2] = (V >>  8) & 0xFF;
597     v[3] = (V      ) & 0xFF;
598   }
operator TypeOT::BEInt599   inline operator Type (void) const
600   {
601     return (v[0] << 24)
602          + (v[1] << 16)
603          + (v[2] <<  8)
604          + (v[3]      );
605   }
606   private: uint8_t v[4];
607 };
608 
609 /* Integer types in big-endian order and no alignment requirement */
610 template <typename Type, unsigned int Size>
611 struct IntType
612 {
setOT::IntType613   inline void set (Type i) { v.set (i); }
operator TypeOT::IntType614   inline operator Type(void) const { return v; }
operator ==OT::IntType615   inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
operator !=OT::IntType616   inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
cmpOT::IntType617   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
618   template <typename Type2>
cmpOT::IntType619   inline int cmp (Type2 a) const
620   {
621     Type b = v;
622     if (sizeof (Type) < sizeof (int) && sizeof (Type2) < sizeof (int))
623       return (int) a - (int) b;
624     else
625       return a < b ? -1 : a == b ? 0 : +1;
626   }
sanitizeOT::IntType627   inline bool sanitize (hb_sanitize_context_t *c) const
628   {
629     TRACE_SANITIZE (this);
630     return_trace (likely (c->check_struct (this)));
631   }
632   protected:
633   BEInt<Type, Size> v;
634   public:
635   DEFINE_SIZE_STATIC (Size);
636 };
637 
638 typedef IntType<uint8_t,  1> UINT8;	/* 8-bit unsigned integer. */
639 typedef IntType<int8_t,   1> INT8;	/* 8-bit signed integer. */
640 typedef IntType<uint16_t, 2> UINT16;	/* 16-bit unsigned integer. */
641 typedef IntType<int16_t,  2> INT16;	/* 16-bit signed integer. */
642 typedef IntType<uint32_t, 4> UINT32;	/* 32-bit unsigned integer. */
643 typedef IntType<int32_t,  4> INT32;	/* 32-bit signed integer. */
644 typedef IntType<uint32_t, 3> UINT24;	/* 24-bit unsigned integer. */
645 
646 /* 16-bit signed integer (INT16) that describes a quantity in FUnits. */
647 typedef INT16 FWORD;
648 
649 /* 16-bit unsigned integer (UINT16) that describes a quantity in FUnits. */
650 typedef UINT16 UFWORD;
651 
652 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
653 struct F2DOT14 : INT16
654 {
655   //inline float to_float (void) const { return ???; }
656   //inline void set_float (float f) { v.set (f * ???); }
657   public:
658   DEFINE_SIZE_STATIC (2);
659 };
660 
661 /* 32-bit signed fixed-point number (16.16). */
662 struct Fixed: INT32
663 {
664   //inline float to_float (void) const { return ???; }
665   //inline void set_float (float f) { v.set (f * ???); }
666   public:
667   DEFINE_SIZE_STATIC (4);
668 };
669 
670 /* Date represented in number of seconds since 12:00 midnight, January 1,
671  * 1904. The value is represented as a signed 64-bit integer. */
672 struct LONGDATETIME
673 {
sanitizeOT::LONGDATETIME674   inline bool sanitize (hb_sanitize_context_t *c) const
675   {
676     TRACE_SANITIZE (this);
677     return_trace (likely (c->check_struct (this)));
678   }
679   protected:
680   INT32 major;
681   UINT32 minor;
682   public:
683   DEFINE_SIZE_STATIC (8);
684 };
685 
686 /* Array of four uint8s (length = 32 bits) used to identify a script, language
687  * system, feature, or baseline */
688 struct Tag : UINT32
689 {
690   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
operator const char*OT::Tag691   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
operator char*OT::Tag692   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
693   public:
694   DEFINE_SIZE_STATIC (4);
695 };
696 DEFINE_NULL_DATA (Tag, "    ");
697 
698 /* Glyph index number, same as uint16 (length = 16 bits) */
699 typedef UINT16 GlyphID;
700 
701 /* Script/language-system/feature index */
702 struct Index : UINT16 {
703   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
704 };
705 DEFINE_NULL_DATA (Index, "\xff\xff");
706 
707 /* Offset, Null offset = 0 */
708 template <typename Type>
709 struct Offset : Type
710 {
is_nullOT::Offset711   inline bool is_null (void) const { return 0 == *this; }
712   public:
713   DEFINE_SIZE_STATIC (sizeof(Type));
714 };
715 
716 typedef Offset<UINT16> Offset16;
717 typedef Offset<UINT32> Offset32;
718 
719 
720 /* CheckSum */
721 struct CheckSum : UINT32
722 {
723   /* This is reference implementation from the spec. */
CalcTableChecksumOT::CheckSum724   static inline uint32_t CalcTableChecksum (const UINT32 *Table, uint32_t Length)
725   {
726     uint32_t Sum = 0L;
727     const UINT32 *EndPtr = Table+((Length+3) & ~3) / UINT32::static_size;
728 
729     while (Table < EndPtr)
730       Sum += *Table++;
731     return Sum;
732   }
733 
734   /* Note: data should be 4byte aligned and have 4byte padding at the end. */
set_for_dataOT::CheckSum735   inline void set_for_data (const void *data, unsigned int length)
736   { set (CalcTableChecksum ((const UINT32 *) data, length)); }
737 
738   public:
739   DEFINE_SIZE_STATIC (4);
740 };
741 
742 
743 /*
744  * Version Numbers
745  */
746 
747 template <typename FixedType=UINT16>
748 struct FixedVersion
749 {
to_intOT::FixedVersion750   inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
751 
sanitizeOT::FixedVersion752   inline bool sanitize (hb_sanitize_context_t *c) const
753   {
754     TRACE_SANITIZE (this);
755     return_trace (c->check_struct (this));
756   }
757 
758   FixedType major;
759   FixedType minor;
760   public:
761   DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
762 };
763 
764 
765 
766 /*
767  * Template subclasses of Offset that do the dereferencing.
768  * Use: (base+offset)
769  */
770 
771 template <typename Type, typename OffsetType=UINT16>
772 struct OffsetTo : Offset<OffsetType>
773 {
operator ()OT::OffsetTo774   inline const Type& operator () (const void *base) const
775   {
776     unsigned int offset = *this;
777     if (unlikely (!offset)) return Null(Type);
778     return StructAtOffset<Type> (base, offset);
779   }
780 
serializeOT::OffsetTo781   inline Type& serialize (hb_serialize_context_t *c, const void *base)
782   {
783     Type *t = c->start_embed<Type> ();
784     this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
785     return *t;
786   }
787 
sanitizeOT::OffsetTo788   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
789   {
790     TRACE_SANITIZE (this);
791     if (unlikely (!c->check_struct (this))) return_trace (false);
792     unsigned int offset = *this;
793     if (unlikely (!offset)) return_trace (true);
794     if (unlikely (!c->check_range (base, offset))) return_trace (false);
795     const Type &obj = StructAtOffset<Type> (base, offset);
796     return_trace (likely (obj.sanitize (c)) || neuter (c));
797   }
798   template <typename T>
sanitizeOT::OffsetTo799   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
800   {
801     TRACE_SANITIZE (this);
802     if (unlikely (!c->check_struct (this))) return_trace (false);
803     unsigned int offset = *this;
804     if (unlikely (!offset)) return_trace (true);
805     if (unlikely (!c->check_range (base, offset))) return_trace (false);
806     const Type &obj = StructAtOffset<Type> (base, offset);
807     return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
808   }
809 
810   /* Set the offset to Null */
neuterOT::OffsetTo811   inline bool neuter (hb_sanitize_context_t *c) const {
812     return c->try_set (this, 0);
813   }
814   DEFINE_SIZE_STATIC (sizeof(OffsetType));
815 };
816 template <typename Type> struct LOffsetTo : OffsetTo<Type, UINT32> {};
817 template <typename Base, typename OffsetType, typename Type>
operator +(const Base & base,const OffsetTo<Type,OffsetType> & offset)818 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
819 template <typename Base, typename OffsetType, typename Type>
operator +(Base & base,OffsetTo<Type,OffsetType> & offset)820 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
821 
822 
823 /*
824  * Array Types
825  */
826 
827 /* An array with a number of elements. */
828 template <typename Type, typename LenType=UINT16>
829 struct ArrayOf
830 {
sub_arrayOT::ArrayOf831   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
832   {
833     unsigned int count = len;
834     if (unlikely (start_offset > count))
835       count = 0;
836     else
837       count -= start_offset;
838     count = MIN (count, *pcount);
839     *pcount = count;
840     return array + start_offset;
841   }
842 
operator []OT::ArrayOf843   inline const Type& operator [] (unsigned int i) const
844   {
845     if (unlikely (i >= len)) return Null(Type);
846     return array[i];
847   }
operator []OT::ArrayOf848   inline Type& operator [] (unsigned int i)
849   {
850     return array[i];
851   }
get_sizeOT::ArrayOf852   inline unsigned int get_size (void) const
853   { return len.static_size + len * Type::static_size; }
854 
serializeOT::ArrayOf855   inline bool serialize (hb_serialize_context_t *c,
856 			 unsigned int items_len)
857   {
858     TRACE_SERIALIZE (this);
859     if (unlikely (!c->extend_min (*this))) return_trace (false);
860     len.set (items_len); /* TODO(serialize) Overflow? */
861     if (unlikely (!c->extend (*this))) return_trace (false);
862     return_trace (true);
863   }
864 
serializeOT::ArrayOf865   inline bool serialize (hb_serialize_context_t *c,
866 			 Supplier<Type> &items,
867 			 unsigned int items_len)
868   {
869     TRACE_SERIALIZE (this);
870     if (unlikely (!serialize (c, items_len))) return_trace (false);
871     for (unsigned int i = 0; i < items_len; i++)
872       array[i] = items[i];
873     items.advance (items_len);
874     return_trace (true);
875   }
876 
sanitizeOT::ArrayOf877   inline bool sanitize (hb_sanitize_context_t *c) const
878   {
879     TRACE_SANITIZE (this);
880     if (unlikely (!sanitize_shallow (c))) return_trace (false);
881 
882     /* Note: for structs that do not reference other structs,
883      * we do not need to call their sanitize() as we already did
884      * a bound check on the aggregate array size.  We just include
885      * a small unreachable expression to make sure the structs
886      * pointed to do have a simple sanitize(), ie. they do not
887      * reference other structs via offsets.
888      */
889     (void) (false && array[0].sanitize (c));
890 
891     return_trace (true);
892   }
sanitizeOT::ArrayOf893   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
894   {
895     TRACE_SANITIZE (this);
896     if (unlikely (!sanitize_shallow (c))) return_trace (false);
897     unsigned int count = len;
898     for (unsigned int i = 0; i < count; i++)
899       if (unlikely (!array[i].sanitize (c, base)))
900         return_trace (false);
901     return_trace (true);
902   }
903   template <typename T>
sanitizeOT::ArrayOf904   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
905   {
906     TRACE_SANITIZE (this);
907     if (unlikely (!sanitize_shallow (c))) return_trace (false);
908     unsigned int count = len;
909     for (unsigned int i = 0; i < count; i++)
910       if (unlikely (!array[i].sanitize (c, base, user_data)))
911         return_trace (false);
912     return_trace (true);
913   }
914 
915   template <typename SearchType>
lsearchOT::ArrayOf916   inline int lsearch (const SearchType &x) const
917   {
918     unsigned int count = len;
919     for (unsigned int i = 0; i < count; i++)
920       if (!this->array[i].cmp (x))
921         return i;
922     return -1;
923   }
924 
925   private:
sanitize_shallowOT::ArrayOf926   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
927   {
928     TRACE_SANITIZE (this);
929     return_trace (len.sanitize (c) && c->check_array (array, Type::static_size, len));
930   }
931 
932   public:
933   LenType len;
934   Type array[VAR];
935   public:
936   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
937 };
938 template <typename Type> struct LArrayOf : ArrayOf<Type, UINT32> {};
939 
940 /* Array of Offset's */
941 template <typename Type, typename OffsetType=UINT16>
942 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, OffsetType> > {};
943 
944 /* Array of offsets relative to the beginning of the array itself. */
945 template <typename Type>
946 struct OffsetListOf : OffsetArrayOf<Type>
947 {
operator []OT::OffsetListOf948   inline const Type& operator [] (unsigned int i) const
949   {
950     if (unlikely (i >= this->len)) return Null(Type);
951     return this+this->array[i];
952   }
953 
sanitizeOT::OffsetListOf954   inline bool sanitize (hb_sanitize_context_t *c) const
955   {
956     TRACE_SANITIZE (this);
957     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
958   }
959   template <typename T>
sanitizeOT::OffsetListOf960   inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
961   {
962     TRACE_SANITIZE (this);
963     return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
964   }
965 };
966 
967 
968 /* An array starting at second element. */
969 template <typename Type, typename LenType=UINT16>
970 struct HeadlessArrayOf
971 {
operator []OT::HeadlessArrayOf972   inline const Type& operator [] (unsigned int i) const
973   {
974     if (unlikely (i >= len || !i)) return Null(Type);
975     return array[i-1];
976   }
get_sizeOT::HeadlessArrayOf977   inline unsigned int get_size (void) const
978   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
979 
serializeOT::HeadlessArrayOf980   inline bool serialize (hb_serialize_context_t *c,
981 			 Supplier<Type> &items,
982 			 unsigned int items_len)
983   {
984     TRACE_SERIALIZE (this);
985     if (unlikely (!c->extend_min (*this))) return_trace (false);
986     len.set (items_len); /* TODO(serialize) Overflow? */
987     if (unlikely (!items_len)) return_trace (true);
988     if (unlikely (!c->extend (*this))) return_trace (false);
989     for (unsigned int i = 0; i < items_len - 1; i++)
990       array[i] = items[i];
991     items.advance (items_len - 1);
992     return_trace (true);
993   }
994 
sanitizeOT::HeadlessArrayOf995   inline bool sanitize (hb_sanitize_context_t *c) const
996   {
997     TRACE_SANITIZE (this);
998     if (unlikely (!sanitize_shallow (c))) return_trace (false);
999 
1000     /* Note: for structs that do not reference other structs,
1001      * we do not need to call their sanitize() as we already did
1002      * a bound check on the aggregate array size.  We just include
1003      * a small unreachable expression to make sure the structs
1004      * pointed to do have a simple sanitize(), ie. they do not
1005      * reference other structs via offsets.
1006      */
1007     (void) (false && array[0].sanitize (c));
1008 
1009     return_trace (true);
1010   }
1011 
1012   private:
sanitize_shallowOT::HeadlessArrayOf1013   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1014   {
1015     TRACE_SANITIZE (this);
1016     return_trace (len.sanitize (c) &&
1017 		  (!len || c->check_array (array, Type::static_size, len - 1)));
1018   }
1019 
1020   public:
1021   LenType len;
1022   Type array[VAR];
1023   public:
1024   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
1025 };
1026 
1027 
1028 /*
1029  * An array with sorted elements.  Supports binary searching.
1030  */
1031 template <typename Type, typename LenType=UINT16>
1032 struct SortedArrayOf : ArrayOf<Type, LenType>
1033 {
1034   template <typename SearchType>
bsearchOT::SortedArrayOf1035   inline int bsearch (const SearchType &x) const
1036   {
1037     /* Hand-coded bsearch here since this is in the hot inner loop. */
1038     const Type *array = this->array;
1039     int min = 0, max = (int) this->len - 1;
1040     while (min <= max)
1041     {
1042       int mid = (min + max) / 2;
1043       int c = array[mid].cmp (x);
1044       if (c < 0)
1045         max = mid - 1;
1046       else if (c > 0)
1047         min = mid + 1;
1048       else
1049         return mid;
1050     }
1051     return -1;
1052   }
1053 };
1054 
1055 /*
1056  * Binary-search arrays
1057  */
1058 
1059 struct BinSearchHeader
1060 {
operator uint32_tOT::BinSearchHeader1061   inline operator uint32_t (void) const { return len; }
1062 
sanitizeOT::BinSearchHeader1063   inline bool sanitize (hb_sanitize_context_t *c) const
1064   {
1065     TRACE_SANITIZE (this);
1066     return_trace (c->check_struct (this));
1067   }
1068 
1069   protected:
1070   UINT16	len;
1071   UINT16	searchRangeZ;
1072   UINT16	entrySelectorZ;
1073   UINT16	rangeShiftZ;
1074 
1075   public:
1076   DEFINE_SIZE_STATIC (8);
1077 };
1078 
1079 template <typename Type>
1080 struct BinSearchArrayOf : SortedArrayOf<Type, BinSearchHeader> {};
1081 
1082 
1083 /* Lazy struct and blob loaders. */
1084 
1085 /* Logic is shared between hb_lazy_loader_t and hb_lazy_table_loader_t */
1086 template <typename T>
1087 struct hb_lazy_loader_t
1088 {
initOT::hb_lazy_loader_t1089   inline void init (hb_face_t *face_)
1090   {
1091     face = face_;
1092     instance = nullptr;
1093   }
1094 
finiOT::hb_lazy_loader_t1095   inline void fini (void)
1096   {
1097     if (instance && instance != &OT::Null(T))
1098     {
1099       instance->fini();
1100       free (instance);
1101     }
1102   }
1103 
getOT::hb_lazy_loader_t1104   inline const T* get (void) const
1105   {
1106   retry:
1107     T *p = (T *) hb_atomic_ptr_get (&instance);
1108     if (unlikely (!p))
1109     {
1110       p = (T *) calloc (1, sizeof (T));
1111       if (unlikely (!p))
1112         p = const_cast<T *> (&OT::Null(T));
1113       else
1114 	p->init (face);
1115       if (unlikely (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), nullptr, p)))
1116       {
1117 	if (p != &OT::Null(T))
1118 	  p->fini ();
1119 	goto retry;
1120       }
1121     }
1122     return p;
1123   }
1124 
operator ->OT::hb_lazy_loader_t1125   inline const T* operator-> (void) const
1126   {
1127     return get ();
1128   }
1129 
1130   private:
1131   hb_face_t *face;
1132   T *instance;
1133 };
1134 
1135 /* Logic is shared between hb_lazy_loader_t and hb_lazy_table_loader_t */
1136 template <typename T>
1137 struct hb_lazy_table_loader_t
1138 {
initOT::hb_lazy_table_loader_t1139   inline void init (hb_face_t *face_)
1140   {
1141     face = face_;
1142     instance = nullptr;
1143     blob = nullptr;
1144   }
1145 
finiOT::hb_lazy_table_loader_t1146   inline void fini (void)
1147   {
1148     hb_blob_destroy (blob);
1149   }
1150 
getOT::hb_lazy_table_loader_t1151   inline const T* get (void) const
1152   {
1153   retry:
1154     T *p = (T *) hb_atomic_ptr_get (&instance);
1155     if (unlikely (!p))
1156     {
1157       hb_blob_t *blob_ = OT::Sanitizer<T>::sanitize (face->reference_table (T::tableTag));
1158       p = const_cast<T *>(OT::Sanitizer<T>::lock_instance (blob_));
1159       if (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), nullptr, p))
1160       {
1161 	hb_blob_destroy (blob_);
1162 	goto retry;
1163       }
1164       blob = blob_;
1165     }
1166     return p;
1167   }
1168 
operator ->OT::hb_lazy_table_loader_t1169   inline const T* operator-> (void) const
1170   {
1171     return get();
1172   }
1173 
1174   private:
1175   hb_face_t *face;
1176   T *instance;
1177   mutable hb_blob_t *blob;
1178 };
1179 
1180 
1181 } /* namespace OT */
1182 
1183 
1184 #endif /* HB_OPEN_TYPE_PRIVATE_HH */
1185