• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2012  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
30 #define HB_OPEN_TYPE_PRIVATE_HH
31 
32 #include "hb-private.hh"
33 
34 
35 namespace OT {
36 
37 
38 
39 /*
40  * Casts
41  */
42 
43 /* Cast to struct T, reference to reference */
44 template<typename Type, typename TObject>
CastR(const TObject & X)45 static inline const Type& CastR(const TObject &X)
46 { return reinterpret_cast<const Type&> (X); }
47 template<typename Type, typename TObject>
CastR(TObject & X)48 static inline Type& CastR(TObject &X)
49 { return reinterpret_cast<Type&> (X); }
50 
51 /* Cast to struct T, pointer to pointer */
52 template<typename Type, typename TObject>
CastP(const TObject * X)53 static inline const Type* CastP(const TObject *X)
54 { return reinterpret_cast<const Type*> (X); }
55 template<typename Type, typename TObject>
CastP(TObject * X)56 static inline Type* CastP(TObject *X)
57 { return reinterpret_cast<Type*> (X); }
58 
59 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
60  * location pointed to by P plus Ofs bytes. */
61 template<typename Type>
StructAtOffset(const void * P,unsigned int offset)62 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
63 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
64 template<typename Type>
StructAtOffset(void * P,unsigned int offset)65 static inline Type& StructAtOffset(void *P, unsigned int offset)
66 { return * reinterpret_cast<Type*> ((char *) P + offset); }
67 
68 /* StructAfter<T>(X) returns the struct T& that is placed after X.
69  * Works with X of variable size also.  X must implement get_size() */
70 template<typename Type, typename TObject>
StructAfter(const TObject & X)71 static inline const Type& StructAfter(const TObject &X)
72 { return StructAtOffset<Type>(&X, X.get_size()); }
73 template<typename Type, typename TObject>
StructAfter(TObject & X)74 static inline Type& StructAfter(TObject &X)
75 { return StructAtOffset<Type>(&X, X.get_size()); }
76 
77 
78 
79 /*
80  * Size checking
81  */
82 
83 /* Check _assertion in a method environment */
84 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
85   inline void _instance_assertion_on_line_##_line (void) const \
86   { \
87     ASSERT_STATIC (_assertion); \
88     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
89   }
90 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
91 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
92 
93 /* Check that _code compiles in a method environment */
94 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
95   inline void _compiles_assertion_on_line_##_line (void) const \
96   { _code; }
97 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
98 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
99 
100 
101 #define DEFINE_SIZE_STATIC(size) \
102   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
103   static const unsigned int static_size = (size); \
104   static const unsigned int min_size = (size)
105 
106 #define DEFINE_SIZE_UNION(size, _member) \
107   DEFINE_INSTANCE_ASSERTION (this->u._member.static_size == (size)); \
108   static const unsigned int min_size = (size)
109 
110 #define DEFINE_SIZE_MIN(size) \
111   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
112   static const unsigned int min_size = (size)
113 
114 #define DEFINE_SIZE_ARRAY(size, array) \
115   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
116   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
117   static const unsigned int min_size = (size)
118 
119 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
120   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
121   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
122   static const unsigned int min_size = (size)
123 
124 
125 
126 /*
127  * Null objects
128  */
129 
130 /* Global nul-content Null pool.  Enlarge as necessary. */
131 /* TODO This really should be a extern HB_INTERNAL and defined somewhere... */
132 static const void *_NullPool[(256+8) / sizeof (void *)];
133 
134 /* Generic nul-content Null objects. */
135 template <typename Type>
Null(void)136 static inline const Type& Null (void) {
137   ASSERT_STATIC (sizeof (Type) <= sizeof (_NullPool));
138   return *CastP<Type> (_NullPool);
139 }
140 
141 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
142 #define DEFINE_NULL_DATA(Type, data) \
143 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
144 template <> \
145 /*static*/ inline const Type& Null<Type> (void) { \
146   return *CastP<Type> (_Null##Type); \
147 } /* The following line really exists such that we end in a place needing semicolon */ \
148 ASSERT_STATIC (Type::min_size + 1 <= sizeof (_Null##Type))
149 
150 /* Accessor macro. */
151 #define Null(Type) Null<Type>()
152 
153 
154 /*
155  * Dispatch
156  */
157 
158 template <typename Context, typename Return, unsigned int MaxDebugDepth>
159 struct hb_dispatch_context_t
160 {
161   static const unsigned int max_debug_depth = MaxDebugDepth;
162   typedef Return return_t;
163   template <typename T, typename F>
may_dispatchOT::hb_dispatch_context_t164   inline bool may_dispatch (const T *obj, const F *format) { return true; }
no_dispatch_return_valueOT::hb_dispatch_context_t165   static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
166 };
167 
168 
169 /*
170  * Sanitize
171  */
172 
173 #ifndef HB_DEBUG_SANITIZE
174 #define HB_DEBUG_SANITIZE (HB_DEBUG+0)
175 #endif
176 
177 
178 #define TRACE_SANITIZE(this) \
179 	hb_auto_trace_t<HB_DEBUG_SANITIZE, bool> trace \
180 	(&c->debug_depth, c->get_name (), this, HB_FUNC, \
181 	 "");
182 
183 /* This limits sanitizing time on really broken fonts. */
184 #ifndef HB_SANITIZE_MAX_EDITS
185 #define HB_SANITIZE_MAX_EDITS 32
186 #endif
187 
188 struct hb_sanitize_context_t :
189        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
190 {
hb_sanitize_context_tOT::hb_sanitize_context_t191   inline hb_sanitize_context_t (void) :
192 	debug_depth (0),
193 	start (NULL), end (NULL),
194 	writable (false), edit_count (0),
195 	blob (NULL) {}
196 
get_nameOT::hb_sanitize_context_t197   inline const char *get_name (void) { return "SANITIZE"; }
198   template <typename T, typename F>
may_dispatchOT::hb_sanitize_context_t199   inline bool may_dispatch (const T *obj, const F *format)
200   { return format->sanitize (this); }
201   template <typename T>
dispatchOT::hb_sanitize_context_t202   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
default_return_valueOT::hb_sanitize_context_t203   static return_t default_return_value (void) { return true; }
no_dispatch_return_valueOT::hb_sanitize_context_t204   static return_t no_dispatch_return_value (void) { return false; }
stop_sublookup_iterationOT::hb_sanitize_context_t205   bool stop_sublookup_iteration (const return_t r) const { return !r; }
206 
initOT::hb_sanitize_context_t207   inline void init (hb_blob_t *b)
208   {
209     this->blob = hb_blob_reference (b);
210     this->writable = false;
211   }
212 
start_processingOT::hb_sanitize_context_t213   inline void start_processing (void)
214   {
215     this->start = hb_blob_get_data (this->blob, NULL);
216     this->end = this->start + hb_blob_get_length (this->blob);
217     assert (this->start <= this->end); /* Must not overflow. */
218     this->edit_count = 0;
219     this->debug_depth = 0;
220 
221     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
222 		     "start [%p..%p] (%lu bytes)",
223 		     this->start, this->end,
224 		     (unsigned long) (this->end - this->start));
225   }
226 
end_processingOT::hb_sanitize_context_t227   inline void end_processing (void)
228   {
229     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
230 		     "end [%p..%p] %u edit requests",
231 		     this->start, this->end, this->edit_count);
232 
233     hb_blob_destroy (this->blob);
234     this->blob = NULL;
235     this->start = this->end = NULL;
236   }
237 
check_rangeOT::hb_sanitize_context_t238   inline bool check_range (const void *base, unsigned int len) const
239   {
240     const char *p = (const char *) base;
241     bool ok = this->start <= p && p <= this->end && (unsigned int) (this->end - p) >= len;
242 
243     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
244        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
245        p, p + len, len,
246        this->start, this->end,
247        ok ? "OK" : "OUT-OF-RANGE");
248 
249     return likely (ok);
250   }
251 
check_arrayOT::hb_sanitize_context_t252   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
253   {
254     const char *p = (const char *) base;
255     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
256     unsigned int array_size = record_size * len;
257     bool ok = !overflows && this->check_range (base, array_size);
258 
259     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
260        "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
261        p, p + (record_size * len), record_size, len, (unsigned int) array_size,
262        this->start, this->end,
263        overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
264 
265     return likely (ok);
266   }
267 
268   template <typename Type>
check_structOT::hb_sanitize_context_t269   inline bool check_struct (const Type *obj) const
270   {
271     return likely (this->check_range (obj, obj->min_size));
272   }
273 
may_editOT::hb_sanitize_context_t274   inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
275   {
276     if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
277       return false;
278 
279     const char *p = (const char *) base;
280     this->edit_count++;
281 
282     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
283        "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
284        this->edit_count,
285        p, p + len, len,
286        this->start, this->end,
287        this->writable ? "GRANTED" : "DENIED");
288 
289     return this->writable;
290   }
291 
292   template <typename Type, typename ValueType>
try_setOT::hb_sanitize_context_t293   inline bool try_set (const Type *obj, const ValueType &v) {
294     if (this->may_edit (obj, obj->static_size)) {
295       const_cast<Type *> (obj)->set (v);
296       return true;
297     }
298     return false;
299   }
300 
301   mutable unsigned int debug_depth;
302   const char *start, *end;
303   bool writable;
304   unsigned int edit_count;
305   hb_blob_t *blob;
306 };
307 
308 
309 
310 /* Template to sanitize an object. */
311 template <typename Type>
312 struct Sanitizer
313 {
sanitizeOT::Sanitizer314   static hb_blob_t *sanitize (hb_blob_t *blob) {
315     hb_sanitize_context_t c[1];
316     bool sane;
317 
318     /* TODO is_sane() stuff */
319 
320     c->init (blob);
321 
322   retry:
323     DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
324 
325     c->start_processing ();
326 
327     if (unlikely (!c->start)) {
328       c->end_processing ();
329       return blob;
330     }
331 
332     Type *t = CastP<Type> (const_cast<char *> (c->start));
333 
334     sane = t->sanitize (c);
335     if (sane) {
336       if (c->edit_count) {
337 	DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
338 
339         /* sanitize again to ensure no toe-stepping */
340         c->edit_count = 0;
341 	sane = t->sanitize (c);
342 	if (c->edit_count) {
343 	  DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
344 	  sane = false;
345 	}
346       }
347     } else {
348       unsigned int edit_count = c->edit_count;
349       if (edit_count && !c->writable) {
350         c->start = hb_blob_get_data_writable (blob, NULL);
351 	c->end = c->start + hb_blob_get_length (blob);
352 
353 	if (c->start) {
354 	  c->writable = true;
355 	  /* ok, we made it writable by relocating.  try again */
356 	  DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
357 	  goto retry;
358 	}
359       }
360     }
361 
362     c->end_processing ();
363 
364     DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
365     if (sane)
366       return blob;
367     else {
368       hb_blob_destroy (blob);
369       return hb_blob_get_empty ();
370     }
371   }
372 
lock_instanceOT::Sanitizer373   static const Type* lock_instance (hb_blob_t *blob) {
374     hb_blob_make_immutable (blob);
375     const char *base = hb_blob_get_data (blob, NULL);
376     return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
377   }
378 };
379 
380 
381 
382 /*
383  * Serialize
384  */
385 
386 #ifndef HB_DEBUG_SERIALIZE
387 #define HB_DEBUG_SERIALIZE (HB_DEBUG+0)
388 #endif
389 
390 
391 #define TRACE_SERIALIZE(this) \
392 	hb_auto_trace_t<HB_DEBUG_SERIALIZE, bool> trace \
393 	(&c->debug_depth, "SERIALIZE", c, HB_FUNC, \
394 	 "");
395 
396 
397 struct hb_serialize_context_t
398 {
hb_serialize_context_tOT::hb_serialize_context_t399   inline hb_serialize_context_t (void *start_, unsigned int size)
400   {
401     this->start = (char *) start_;
402     this->end = this->start + size;
403 
404     this->ran_out_of_room = false;
405     this->head = this->start;
406     this->debug_depth = 0;
407   }
408 
409   template <typename Type>
start_serializeOT::hb_serialize_context_t410   inline Type *start_serialize (void)
411   {
412     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
413 		     "start [%p..%p] (%lu bytes)",
414 		     this->start, this->end,
415 		     (unsigned long) (this->end - this->start));
416 
417     return start_embed<Type> ();
418   }
419 
end_serializeOT::hb_serialize_context_t420   inline void end_serialize (void)
421   {
422     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
423 		     "end [%p..%p] serialized %d bytes; %s",
424 		     this->start, this->end,
425 		     (int) (this->head - this->start),
426 		     this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
427 
428   }
429 
430   template <typename Type>
copyOT::hb_serialize_context_t431   inline Type *copy (void)
432   {
433     assert (!this->ran_out_of_room);
434     unsigned int len = this->head - this->start;
435     void *p = malloc (len);
436     if (p)
437       memcpy (p, this->start, len);
438     return reinterpret_cast<Type *> (p);
439   }
440 
441   template <typename Type>
allocate_sizeOT::hb_serialize_context_t442   inline Type *allocate_size (unsigned int size)
443   {
444     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
445       this->ran_out_of_room = true;
446       return NULL;
447     }
448     memset (this->head, 0, size);
449     char *ret = this->head;
450     this->head += size;
451     return reinterpret_cast<Type *> (ret);
452   }
453 
454   template <typename Type>
allocate_minOT::hb_serialize_context_t455   inline Type *allocate_min (void)
456   {
457     return this->allocate_size<Type> (Type::min_size);
458   }
459 
460   template <typename Type>
start_embedOT::hb_serialize_context_t461   inline Type *start_embed (void)
462   {
463     Type *ret = reinterpret_cast<Type *> (this->head);
464     return ret;
465   }
466 
467   template <typename Type>
embedOT::hb_serialize_context_t468   inline Type *embed (const Type &obj)
469   {
470     unsigned int size = obj.get_size ();
471     Type *ret = this->allocate_size<Type> (size);
472     if (unlikely (!ret)) return NULL;
473     memcpy (ret, obj, size);
474     return ret;
475   }
476 
477   template <typename Type>
extend_minOT::hb_serialize_context_t478   inline Type *extend_min (Type &obj)
479   {
480     unsigned int size = obj.min_size;
481     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
482     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
483     return reinterpret_cast<Type *> (&obj);
484   }
485 
486   template <typename Type>
extendOT::hb_serialize_context_t487   inline Type *extend (Type &obj)
488   {
489     unsigned int size = obj.get_size ();
490     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
491     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return NULL;
492     return reinterpret_cast<Type *> (&obj);
493   }
494 
truncateOT::hb_serialize_context_t495   inline void truncate (void *new_head)
496   {
497     assert (this->start < new_head && new_head <= this->head);
498     this->head = (char *) new_head;
499   }
500 
501   unsigned int debug_depth;
502   char *start, *end, *head;
503   bool ran_out_of_room;
504 };
505 
506 template <typename Type>
507 struct Supplier
508 {
SupplierOT::Supplier509   inline Supplier (const Type *array, unsigned int len_)
510   {
511     head = array;
512     len = len_;
513   }
operator []OT::Supplier514   inline const Type operator [] (unsigned int i) const
515   {
516     if (unlikely (i >= len)) return Type ();
517     return head[i];
518   }
519 
advanceOT::Supplier520   inline void advance (unsigned int count)
521   {
522     if (unlikely (count > len))
523       count = len;
524     len -= count;
525     head += count;
526   }
527 
528   private:
529   inline Supplier (const Supplier<Type> &); /* Disallow copy */
530   inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
531 
532   unsigned int len;
533   const Type *head;
534 };
535 
536 
537 
538 
539 /*
540  *
541  * The OpenType Font File: Data Types
542  */
543 
544 
545 /* "The following data types are used in the OpenType font file.
546  *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
547 
548 /*
549  * Int types
550  */
551 
552 
553 template <typename Type, int Bytes> struct BEInt;
554 
555 template <typename Type>
556 struct BEInt<Type, 1>
557 {
558   public:
setOT::BEInt559   inline void set (Type V)
560   {
561     v = V;
562   }
operator TypeOT::BEInt563   inline operator Type (void) const
564   {
565     return v;
566   }
567   private: uint8_t v;
568 };
569 template <typename Type>
570 struct BEInt<Type, 2>
571 {
572   public:
setOT::BEInt573   inline void set (Type V)
574   {
575     v[0] = (V >>  8) & 0xFF;
576     v[1] = (V      ) & 0xFF;
577   }
operator TypeOT::BEInt578   inline operator Type (void) const
579   {
580     return (v[0] <<  8)
581          + (v[1]      );
582   }
583   private: uint8_t v[2];
584 };
585 template <typename Type>
586 struct BEInt<Type, 3>
587 {
588   public:
setOT::BEInt589   inline void set (Type V)
590   {
591     v[0] = (V >> 16) & 0xFF;
592     v[1] = (V >>  8) & 0xFF;
593     v[2] = (V      ) & 0xFF;
594   }
operator TypeOT::BEInt595   inline operator Type (void) const
596   {
597     return (v[0] << 16)
598          + (v[1] <<  8)
599          + (v[2]      );
600   }
601   private: uint8_t v[3];
602 };
603 template <typename Type>
604 struct BEInt<Type, 4>
605 {
606   public:
setOT::BEInt607   inline void set (Type V)
608   {
609     v[0] = (V >> 24) & 0xFF;
610     v[1] = (V >> 16) & 0xFF;
611     v[2] = (V >>  8) & 0xFF;
612     v[3] = (V      ) & 0xFF;
613   }
operator TypeOT::BEInt614   inline operator Type (void) const
615   {
616     return (v[0] << 24)
617          + (v[1] << 16)
618          + (v[2] <<  8)
619          + (v[3]      );
620   }
621   private: uint8_t v[4];
622 };
623 
624 /* Integer types in big-endian order and no alignment requirement */
625 template <typename Type, unsigned int Size>
626 struct IntType
627 {
setOT::IntType628   inline void set (Type i) { v.set (i); }
operator TypeOT::IntType629   inline operator Type(void) const { return v; }
operator ==OT::IntType630   inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
operator !=OT::IntType631   inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
cmpOT::IntType632   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
cmpOT::IntType633   inline int cmp (Type a) const
634   {
635     Type b = v;
636     if (sizeof (Type) < sizeof (int))
637       return (int) a - (int) b;
638     else
639       return a < b ? -1 : a == b ? 0 : +1;
640   }
sanitizeOT::IntType641   inline bool sanitize (hb_sanitize_context_t *c) const
642   {
643     TRACE_SANITIZE (this);
644     return_trace (likely (c->check_struct (this)));
645   }
646   protected:
647   BEInt<Type, Size> v;
648   public:
649   DEFINE_SIZE_STATIC (Size);
650 };
651 
652 typedef	IntType<uint8_t	, 1> BYTE;	/* 8-bit unsigned integer. */
653 typedef IntType<uint16_t, 2> USHORT;	/* 16-bit unsigned integer. */
654 typedef IntType<int16_t,  2> SHORT;	/* 16-bit signed integer. */
655 typedef IntType<uint32_t, 4> ULONG;	/* 32-bit unsigned integer. */
656 typedef IntType<int32_t,  4> LONG;	/* 32-bit signed integer. */
657 typedef IntType<uint32_t, 3> UINT24;	/* 24-bit unsigned integer. */
658 
659 /* 16-bit signed integer (SHORT) that describes a quantity in FUnits. */
660 typedef SHORT FWORD;
661 
662 /* 16-bit unsigned integer (USHORT) that describes a quantity in FUnits. */
663 typedef USHORT UFWORD;
664 
665 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
666 struct F2DOT14 : SHORT
667 {
668   //inline float to_float (void) const { return ???; }
669   //inline void set_float (float f) { v.set (f * ???); }
670   public:
671   DEFINE_SIZE_STATIC (2);
672 };
673 
674 /* Date represented in number of seconds since 12:00 midnight, January 1,
675  * 1904. The value is represented as a signed 64-bit integer. */
676 struct LONGDATETIME
677 {
sanitizeOT::LONGDATETIME678   inline bool sanitize (hb_sanitize_context_t *c) const
679   {
680     TRACE_SANITIZE (this);
681     return_trace (likely (c->check_struct (this)));
682   }
683   protected:
684   LONG major;
685   ULONG minor;
686   public:
687   DEFINE_SIZE_STATIC (8);
688 };
689 
690 /* Array of four uint8s (length = 32 bits) used to identify a script, language
691  * system, feature, or baseline */
692 struct Tag : ULONG
693 {
694   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
operator const char*OT::Tag695   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
operator char*OT::Tag696   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
697   public:
698   DEFINE_SIZE_STATIC (4);
699 };
700 DEFINE_NULL_DATA (Tag, "    ");
701 
702 /* Glyph index number, same as uint16 (length = 16 bits) */
703 struct GlyphID : USHORT {
cmpOT::GlyphID704   static inline int cmp (const GlyphID *a, const GlyphID *b) { return b->USHORT::cmp (*a); }
cmpOT::GlyphID705   inline int cmp (hb_codepoint_t a) const { return (int) a - (int) *this; }
706 };
707 
708 /* Script/language-system/feature index */
709 struct Index : USHORT {
710   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
711 };
712 DEFINE_NULL_DATA (Index, "\xff\xff");
713 
714 /* Offset, Null offset = 0 */
715 template <typename Type=USHORT>
716 struct Offset : Type
717 {
is_nullOT::Offset718   inline bool is_null (void) const { return 0 == *this; }
719   public:
720   DEFINE_SIZE_STATIC (sizeof(Type));
721 };
722 
723 
724 /* CheckSum */
725 struct CheckSum : ULONG
726 {
727   /* This is reference implementation from the spec. */
CalcTableChecksumOT::CheckSum728   static inline uint32_t CalcTableChecksum (const ULONG *Table, uint32_t Length)
729   {
730     uint32_t Sum = 0L;
731     const ULONG *EndPtr = Table+((Length+3) & ~3) / ULONG::static_size;
732 
733     while (Table < EndPtr)
734       Sum += *Table++;
735     return Sum;
736   }
737 
738   /* Note: data should be 4byte aligned and have 4byte padding at the end. */
set_for_dataOT::CheckSum739   inline void set_for_data (const void *data, unsigned int length)
740   { set (CalcTableChecksum ((const ULONG *) data, length)); }
741 
742   public:
743   DEFINE_SIZE_STATIC (4);
744 };
745 
746 
747 /*
748  * Version Numbers
749  */
750 
751 template <typename FixedType=USHORT>
752 struct FixedVersion
753 {
to_intOT::FixedVersion754   inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
755 
sanitizeOT::FixedVersion756   inline bool sanitize (hb_sanitize_context_t *c) const
757   {
758     TRACE_SANITIZE (this);
759     return_trace (c->check_struct (this));
760   }
761 
762   FixedType major;
763   FixedType minor;
764   public:
765   DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
766 };
767 
768 
769 
770 /*
771  * Template subclasses of Offset that do the dereferencing.
772  * Use: (base+offset)
773  */
774 
775 template <typename Type, typename OffsetType=USHORT>
776 struct OffsetTo : Offset<OffsetType>
777 {
operator ()OT::OffsetTo778   inline const Type& operator () (const void *base) const
779   {
780     unsigned int offset = *this;
781     if (unlikely (!offset)) return Null(Type);
782     return StructAtOffset<Type> (base, offset);
783   }
784 
serializeOT::OffsetTo785   inline Type& serialize (hb_serialize_context_t *c, const void *base)
786   {
787     Type *t = c->start_embed<Type> ();
788     this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
789     return *t;
790   }
791 
sanitizeOT::OffsetTo792   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
793   {
794     TRACE_SANITIZE (this);
795     if (unlikely (!c->check_struct (this))) return_trace (false);
796     unsigned int offset = *this;
797     if (unlikely (!offset)) return_trace (true);
798     const Type &obj = StructAtOffset<Type> (base, offset);
799     return_trace (likely (obj.sanitize (c)) || neuter (c));
800   }
801   template <typename T>
sanitizeOT::OffsetTo802   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
803   {
804     TRACE_SANITIZE (this);
805     if (unlikely (!c->check_struct (this))) return_trace (false);
806     unsigned int offset = *this;
807     if (unlikely (!offset)) return_trace (true);
808     const Type &obj = StructAtOffset<Type> (base, offset);
809     return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
810   }
811 
812   /* Set the offset to Null */
neuterOT::OffsetTo813   inline bool neuter (hb_sanitize_context_t *c) const {
814     return c->try_set (this, 0);
815   }
816   DEFINE_SIZE_STATIC (sizeof(OffsetType));
817 };
818 template <typename Base, typename OffsetType, typename Type>
operator +(const Base & base,const OffsetTo<Type,OffsetType> & offset)819 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
820 template <typename Base, typename OffsetType, typename Type>
operator +(Base & base,OffsetTo<Type,OffsetType> & offset)821 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
822 
823 
824 /*
825  * Array Types
826  */
827 
828 /* An array with a number of elements. */
829 template <typename Type, typename LenType=USHORT>
830 struct ArrayOf
831 {
sub_arrayOT::ArrayOf832   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
833   {
834     unsigned int count = len;
835     if (unlikely (start_offset > count))
836       count = 0;
837     else
838       count -= start_offset;
839     count = MIN (count, *pcount);
840     *pcount = count;
841     return array + start_offset;
842   }
843 
operator []OT::ArrayOf844   inline const Type& operator [] (unsigned int i) const
845   {
846     if (unlikely (i >= len)) return Null(Type);
847     return array[i];
848   }
operator []OT::ArrayOf849   inline Type& operator [] (unsigned int i)
850   {
851     return array[i];
852   }
get_sizeOT::ArrayOf853   inline unsigned int get_size (void) const
854   { return len.static_size + len * Type::static_size; }
855 
serializeOT::ArrayOf856   inline bool serialize (hb_serialize_context_t *c,
857 			 unsigned int items_len)
858   {
859     TRACE_SERIALIZE (this);
860     if (unlikely (!c->extend_min (*this))) return_trace (false);
861     len.set (items_len); /* TODO(serialize) Overflow? */
862     if (unlikely (!c->extend (*this))) return_trace (false);
863     return_trace (true);
864   }
865 
serializeOT::ArrayOf866   inline bool serialize (hb_serialize_context_t *c,
867 			 Supplier<Type> &items,
868 			 unsigned int items_len)
869   {
870     TRACE_SERIALIZE (this);
871     if (unlikely (!serialize (c, items_len))) return_trace (false);
872     for (unsigned int i = 0; i < items_len; i++)
873       array[i] = items[i];
874     items.advance (items_len);
875     return_trace (true);
876   }
877 
sanitizeOT::ArrayOf878   inline bool sanitize (hb_sanitize_context_t *c) const
879   {
880     TRACE_SANITIZE (this);
881     if (unlikely (!sanitize_shallow (c))) return_trace (false);
882 
883     /* Note: for structs that do not reference other structs,
884      * we do not need to call their sanitize() as we already did
885      * a bound check on the aggregate array size.  We just include
886      * a small unreachable expression to make sure the structs
887      * pointed to do have a simple sanitize(), ie. they do not
888      * reference other structs via offsets.
889      */
890     (void) (false && array[0].sanitize (c));
891 
892     return_trace (true);
893   }
sanitizeOT::ArrayOf894   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
895   {
896     TRACE_SANITIZE (this);
897     if (unlikely (!sanitize_shallow (c))) return_trace (false);
898     unsigned int count = len;
899     for (unsigned int i = 0; i < count; i++)
900       if (unlikely (!array[i].sanitize (c, base)))
901         return_trace (false);
902     return_trace (true);
903   }
904   template <typename T>
sanitizeOT::ArrayOf905   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
906   {
907     TRACE_SANITIZE (this);
908     if (unlikely (!sanitize_shallow (c))) return_trace (false);
909     unsigned int count = len;
910     for (unsigned int i = 0; i < count; i++)
911       if (unlikely (!array[i].sanitize (c, base, user_data)))
912         return_trace (false);
913     return_trace (true);
914   }
915 
916   template <typename SearchType>
lsearchOT::ArrayOf917   inline int lsearch (const SearchType &x) const
918   {
919     unsigned int count = len;
920     for (unsigned int i = 0; i < count; i++)
921       if (!this->array[i].cmp (x))
922         return i;
923     return -1;
924   }
925 
926   private:
sanitize_shallowOT::ArrayOf927   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
928   {
929     TRACE_SANITIZE (this);
930     return_trace (c->check_struct (this) && c->check_array (array, Type::static_size, len));
931   }
932 
933   public:
934   LenType len;
935   Type array[VAR];
936   public:
937   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
938 };
939 
940 /* Array of Offset's */
941 template <typename Type>
942 struct OffsetArrayOf : ArrayOf<OffsetTo<Type> > {};
943 
944 /* Array of offsets relative to the beginning of the array itself. */
945 template <typename Type>
946 struct OffsetListOf : OffsetArrayOf<Type>
947 {
operator []OT::OffsetListOf948   inline const Type& operator [] (unsigned int i) const
949   {
950     if (unlikely (i >= this->len)) return Null(Type);
951     return this+this->array[i];
952   }
953 
sanitizeOT::OffsetListOf954   inline bool sanitize (hb_sanitize_context_t *c) const
955   {
956     TRACE_SANITIZE (this);
957     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
958   }
959   template <typename T>
sanitizeOT::OffsetListOf960   inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
961   {
962     TRACE_SANITIZE (this);
963     return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
964   }
965 };
966 
967 
968 /* An array starting at second element. */
969 template <typename Type, typename LenType=USHORT>
970 struct HeadlessArrayOf
971 {
operator []OT::HeadlessArrayOf972   inline const Type& operator [] (unsigned int i) const
973   {
974     if (unlikely (i >= len || !i)) return Null(Type);
975     return array[i-1];
976   }
get_sizeOT::HeadlessArrayOf977   inline unsigned int get_size (void) const
978   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
979 
serializeOT::HeadlessArrayOf980   inline bool serialize (hb_serialize_context_t *c,
981 			 Supplier<Type> &items,
982 			 unsigned int items_len)
983   {
984     TRACE_SERIALIZE (this);
985     if (unlikely (!c->extend_min (*this))) return_trace (false);
986     len.set (items_len); /* TODO(serialize) Overflow? */
987     if (unlikely (!items_len)) return_trace (true);
988     if (unlikely (!c->extend (*this))) return_trace (false);
989     for (unsigned int i = 0; i < items_len - 1; i++)
990       array[i] = items[i];
991     items.advance (items_len - 1);
992     return_trace (true);
993   }
994 
sanitize_shallowOT::HeadlessArrayOf995   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
996   {
997     return c->check_struct (this)
998 	&& c->check_array (this, Type::static_size, len);
999   }
1000 
sanitizeOT::HeadlessArrayOf1001   inline bool sanitize (hb_sanitize_context_t *c) const
1002   {
1003     TRACE_SANITIZE (this);
1004     if (unlikely (!sanitize_shallow (c))) return_trace (false);
1005 
1006     /* Note: for structs that do not reference other structs,
1007      * we do not need to call their sanitize() as we already did
1008      * a bound check on the aggregate array size.  We just include
1009      * a small unreachable expression to make sure the structs
1010      * pointed to do have a simple sanitize(), ie. they do not
1011      * reference other structs via offsets.
1012      */
1013     (void) (false && array[0].sanitize (c));
1014 
1015     return_trace (true);
1016   }
1017 
1018   LenType len;
1019   Type array[VAR];
1020   public:
1021   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
1022 };
1023 
1024 
1025 /* An array with sorted elements.  Supports binary searching. */
1026 template <typename Type, typename LenType=USHORT>
1027 struct SortedArrayOf : ArrayOf<Type, LenType>
1028 {
1029   template <typename SearchType>
bsearchOT::SortedArrayOf1030   inline int bsearch (const SearchType &x) const
1031   {
1032     /* Hand-coded bsearch here since this is in the hot inner loop. */
1033     int min = 0, max = (int) this->len - 1;
1034     while (min <= max)
1035     {
1036       int mid = (min + max) / 2;
1037       int c = this->array[mid].cmp (x);
1038       if (c < 0)
1039         max = mid - 1;
1040       else if (c > 0)
1041         min = mid + 1;
1042       else
1043         return mid;
1044     }
1045     return -1;
1046   }
1047 };
1048 
1049 
1050 } /* namespace OT */
1051 
1052 
1053 #endif /* HB_OPEN_TYPE_PRIVATE_HH */
1054