• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2007,2008,2009  Red Hat, Inc.
3  * Copyright © 2010,2012  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
31 
32 #include "hb.hh"
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
35 #include "hb-set.hh"
36 #include "hb-bimap.hh"
37 
38 
39 #ifndef HB_MAX_NESTING_LEVEL
40 #define HB_MAX_NESTING_LEVEL	6
41 #endif
42 #ifndef HB_MAX_CONTEXT_LENGTH
43 #define HB_MAX_CONTEXT_LENGTH	64
44 #endif
45 #ifndef HB_CLOSURE_MAX_STAGES
46 /*
47  * The maximum number of times a lookup can be applied during shaping.
48  * Used to limit the number of iterations of the closure algorithm.
49  * This must be larger than the number of times add_pause() is
50  * called in a collect_features call of any shaper.
51  */
52 #define HB_CLOSURE_MAX_STAGES	32
53 #endif
54 
55 #ifndef HB_MAX_SCRIPTS
56 #define HB_MAX_SCRIPTS	500
57 #endif
58 
59 #ifndef HB_MAX_LANGSYS
60 #define HB_MAX_LANGSYS	2000
61 #endif
62 
63 
64 namespace OT {
65 
66 
67 #define NOT_COVERED		((unsigned int) -1)
68 
69 
70 template<typename Iterator>
71 static inline void Coverage_serialize (hb_serialize_context_t *c,
72 				       Iterator it);
73 
74 template<typename Iterator>
75 static inline void ClassDef_serialize (hb_serialize_context_t *c,
76                                        Iterator it);
77 
78 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
79                                           const hb_set_t &glyphset,
80                                           const hb_map_t &gid_klass_map,
81                                           hb_sorted_vector_t<HBGlyphID> glyphs,
82                                           hb_sorted_vector_t<unsigned> klasses,
83                                           hb_map_t *klass_map /*INOUT*/);
84 
85 
86 template<typename OutputArray>
87 struct subset_offset_array_t
88 {
subset_offset_array_tOT::subset_offset_array_t89   subset_offset_array_t
90   (hb_subset_context_t *subset_context,
91    OutputArray& out,
92    const void *src_base,
93    const void *dest_base)
94       : _subset_context(subset_context), _out (out), _src_base (src_base), _dest_base (dest_base) {}
95 
96   template <typename T>
97   bool
operator ()OT::subset_offset_array_t98   operator ()
99   (T&& offset)
100   {
101     auto *o = _out.serialize_append (_subset_context->serializer);
102     if (unlikely (!o)) return false;
103     auto snap = _subset_context->serializer->snapshot ();
104     bool ret = o->serialize_subset (_subset_context, offset, _src_base, _dest_base);
105     if (!ret)
106     {
107       _out.pop ();
108       _subset_context->serializer->revert (snap);
109     }
110     return ret;
111   }
112 
113   private:
114   hb_subset_context_t *_subset_context;
115   OutputArray &_out;
116   const void *_src_base;
117   const void *_dest_base;
118 };
119 
120 /*
121  * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
122  * and discards the offset in the array if the subset operation results in an empty
123  * thing.
124  */
125 struct
126 {
127   template<typename OutputArray>
128   subset_offset_array_t<OutputArray>
operator ()OT::__anond4e9a9b70108129   operator ()
130   (hb_subset_context_t *subset_context,
131    OutputArray& out,
132    const void *src_base,
133    const void *dest_base) const
134   {
135     return subset_offset_array_t<OutputArray> (subset_context, out, src_base, dest_base);
136   }
137 }
138 HB_FUNCOBJ (subset_offset_array);
139 
140 /*
141  *
142  * OpenType Layout Common Table Formats
143  *
144  */
145 
146 
147 /*
148  * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
149  */
150 
151 struct Record_sanitize_closure_t {
152   hb_tag_t tag;
153   const void *list_base;
154 };
155 
156 struct RecordList_subset_context_t {
157 
RecordList_subset_context_tOT::RecordList_subset_context_t158   RecordList_subset_context_t() : script_count (0), langsys_count (0)
159   {}
160 
visitScriptOT::RecordList_subset_context_t161   bool visitScript ()
162   {
163     return script_count++ < HB_MAX_SCRIPTS;
164   }
165 
visitLangSysOT::RecordList_subset_context_t166   bool visitLangSys ()
167   {
168     return langsys_count++ < HB_MAX_LANGSYS;
169   }
170 
171   private:
172   unsigned int script_count;
173   unsigned int langsys_count;
174 };
175 
176 template <typename Type>
177 struct Record
178 {
cmpOT::Record179   int cmp (hb_tag_t a) const { return tag.cmp (a); }
180 
sanitizeOT::Record181   bool sanitize (hb_sanitize_context_t *c, const void *base) const
182   {
183     TRACE_SANITIZE (this);
184     const Record_sanitize_closure_t closure = {tag, base};
185     return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
186   }
187 
188   Tag		tag;		/* 4-byte Tag identifier */
189   OffsetTo<Type>
190 		offset;		/* Offset from beginning of object holding
191 				 * the Record */
192   public:
193   DEFINE_SIZE_STATIC (6);
194 };
195 
196 template <typename Type>
197 struct RecordArrayOf : SortedArrayOf<Record<Type>>
198 {
get_offsetOT::RecordArrayOf199   const OffsetTo<Type>& get_offset (unsigned int i) const
200   { return (*this)[i].offset; }
get_offsetOT::RecordArrayOf201   OffsetTo<Type>& get_offset (unsigned int i)
202   { return (*this)[i].offset; }
get_tagOT::RecordArrayOf203   const Tag& get_tag (unsigned int i) const
204   { return (*this)[i].tag; }
get_tagsOT::RecordArrayOf205   unsigned int get_tags (unsigned int start_offset,
206 			 unsigned int *record_count /* IN/OUT */,
207 			 hb_tag_t     *record_tags /* OUT */) const
208   {
209     if (record_count) {
210       const Record<Type> *arr = this->sub_array (start_offset, record_count);
211       unsigned int count = *record_count;
212       for (unsigned int i = 0; i < count; i++)
213 	record_tags[i] = arr[i].tag;
214     }
215     return this->len;
216   }
find_indexOT::RecordArrayOf217   bool find_index (hb_tag_t tag, unsigned int *index) const
218   {
219     return this->bfind (tag, index, HB_BFIND_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
220   }
221 };
222 
223 template <typename Type>
224 struct RecordListOf : RecordArrayOf<Type>
225 {
operator []OT::RecordListOf226   const Type& operator [] (unsigned int i) const
227   { return this+this->get_offset (i); }
228 
subsetOT::RecordListOf229   bool subset (hb_subset_context_t *c) const
230   {
231     TRACE_SUBSET (this);
232     auto *out = c->serializer->start_embed (*this);
233     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
234 
235     RecordList_subset_context_t record_list_context;
236 
237     unsigned int count = this->len;
238     for (unsigned int i = 0; i < count; i++)
239     {
240       auto *record = out->serialize_append (c->serializer);
241       if (unlikely (!record)) return false;
242       auto snap = c->serializer->snapshot ();
243       if (record->offset.serialize_subset (c, this->get_offset (i), this, out, &record_list_context))
244       {
245         record->tag = this->get_tag(i);
246         continue;
247       }
248       out->pop ();
249       c->serializer->revert (snap);
250     }
251 
252     return_trace (true);
253   }
254 
sanitizeOT::RecordListOf255   bool sanitize (hb_sanitize_context_t *c) const
256   {
257     TRACE_SANITIZE (this);
258     return_trace (RecordArrayOf<Type>::sanitize (c, this));
259   }
260 };
261 
262 
263 struct RangeRecord
264 {
cmpOT::RangeRecord265   int cmp (hb_codepoint_t g) const
266   { return g < start ? -1 : g <= end ? 0 : +1; }
267 
sanitizeOT::RangeRecord268   bool sanitize (hb_sanitize_context_t *c) const
269   {
270     TRACE_SANITIZE (this);
271     return_trace (c->check_struct (this));
272   }
273 
intersectsOT::RangeRecord274   bool intersects (const hb_set_t *glyphs) const
275   { return glyphs->intersects (start, end); }
276 
277   template <typename set_t>
add_coverageOT::RangeRecord278   bool add_coverage (set_t *glyphs) const
279   { return glyphs->add_range (start, end); }
280 
281   HBGlyphID	start;		/* First GlyphID in the range */
282   HBGlyphID	end;		/* Last GlyphID in the range */
283   HBUINT16	value;		/* Value */
284   public:
285   DEFINE_SIZE_STATIC (6);
286 };
287 DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
288 
289 
290 struct IndexArray : ArrayOf<Index>
291 {
get_indexesOT::IndexArray292   unsigned int get_indexes (unsigned int start_offset,
293 			    unsigned int *_count /* IN/OUT */,
294 			    unsigned int *_indexes /* OUT */) const
295   {
296     if (_count) {
297       const HBUINT16 *arr = this->sub_array (start_offset, _count);
298       unsigned int count = *_count;
299       for (unsigned int i = 0; i < count; i++)
300 	_indexes[i] = arr[i];
301     }
302     return this->len;
303   }
304 
add_indexes_toOT::IndexArray305   void add_indexes_to (hb_set_t* output /* OUT */) const
306   {
307     output->add_array (arrayZ, len);
308   }
309 };
310 
311 
312 struct Script;
313 struct LangSys;
314 struct Feature;
315 
316 struct LangSys
317 {
get_feature_countOT::LangSys318   unsigned int get_feature_count () const
319   { return featureIndex.len; }
get_feature_indexOT::LangSys320   hb_tag_t get_feature_index (unsigned int i) const
321   { return featureIndex[i]; }
get_feature_indexesOT::LangSys322   unsigned int get_feature_indexes (unsigned int start_offset,
323 				    unsigned int *feature_count /* IN/OUT */,
324 				    unsigned int *feature_indexes /* OUT */) const
325   { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
add_feature_indexes_toOT::LangSys326   void add_feature_indexes_to (hb_set_t *feature_indexes) const
327   { featureIndex.add_indexes_to (feature_indexes); }
328 
has_required_featureOT::LangSys329   bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
get_required_feature_indexOT::LangSys330   unsigned int get_required_feature_index () const
331   {
332     if (reqFeatureIndex == 0xFFFFu)
333       return Index::NOT_FOUND_INDEX;
334    return reqFeatureIndex;
335   }
336 
copyOT::LangSys337   LangSys* copy (hb_serialize_context_t *c) const
338   {
339     TRACE_SERIALIZE (this);
340     return_trace (c->embed (*this));
341   }
342 
sanitizeOT::LangSys343   bool sanitize (hb_sanitize_context_t *c,
344 		 const Record_sanitize_closure_t * = nullptr) const
345   {
346     TRACE_SANITIZE (this);
347     return_trace (c->check_struct (this) && featureIndex.sanitize (c));
348   }
349 
350   Offset16	lookupOrderZ;	/* = Null (reserved for an offset to a
351 				 * reordering table) */
352   HBUINT16	reqFeatureIndex;/* Index of a feature required for this
353 				 * language system--if no required features
354 				 * = 0xFFFFu */
355   IndexArray	featureIndex;	/* Array of indices into the FeatureList */
356   public:
357   DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
358 };
359 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
360 
361 struct Script
362 {
get_lang_sys_countOT::Script363   unsigned int get_lang_sys_count () const
364   { return langSys.len; }
get_lang_sys_tagOT::Script365   const Tag& get_lang_sys_tag (unsigned int i) const
366   { return langSys.get_tag (i); }
get_lang_sys_tagsOT::Script367   unsigned int get_lang_sys_tags (unsigned int start_offset,
368 				  unsigned int *lang_sys_count /* IN/OUT */,
369 				  hb_tag_t     *lang_sys_tags /* OUT */) const
370   { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
get_lang_sysOT::Script371   const LangSys& get_lang_sys (unsigned int i) const
372   {
373     if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
374     return this+langSys[i].offset;
375   }
find_lang_sys_indexOT::Script376   bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
377   { return langSys.find_index (tag, index); }
378 
has_default_lang_sysOT::Script379   bool has_default_lang_sys () const           { return defaultLangSys != 0; }
get_default_lang_sysOT::Script380   const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
381 
subsetOT::Script382   bool subset (hb_subset_context_t *c, RecordList_subset_context_t *record_list_context) const
383   {
384     TRACE_SUBSET (this);
385     if (!record_list_context->visitScript ()) return_trace (false);
386 
387     auto *out = c->serializer->start_embed (*this);
388     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
389 
390     out->defaultLangSys.serialize_copy (c->serializer, defaultLangSys, this, out);
391 
392     for (const auto &src: langSys)
393     {
394       if (!record_list_context->visitLangSys ()) {
395         continue;
396       }
397 
398       auto snap = c->serializer->snapshot ();
399       auto *lang_sys = c->serializer->embed (src);
400 
401       if (likely(lang_sys)
402           && lang_sys->offset.serialize_copy (c->serializer, src.offset, this, out))
403       {
404         out->langSys.len++;
405         continue;
406       }
407       c->serializer->revert (snap);
408     }
409     return_trace (true);
410   }
411 
sanitizeOT::Script412   bool sanitize (hb_sanitize_context_t *c,
413 		 const Record_sanitize_closure_t * = nullptr) const
414   {
415     TRACE_SANITIZE (this);
416     return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
417   }
418 
419   protected:
420   OffsetTo<LangSys>
421 		defaultLangSys;	/* Offset to DefaultLangSys table--from
422 				 * beginning of Script table--may be Null */
423   RecordArrayOf<LangSys>
424 		langSys;	/* Array of LangSysRecords--listed
425 				 * alphabetically by LangSysTag */
426   public:
427   DEFINE_SIZE_ARRAY_SIZED (4, langSys);
428 };
429 
430 typedef RecordListOf<Script> ScriptList;
431 
432 
433 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
434 struct FeatureParamsSize
435 {
sanitizeOT::FeatureParamsSize436   bool sanitize (hb_sanitize_context_t *c) const
437   {
438     TRACE_SANITIZE (this);
439     if (unlikely (!c->check_struct (this))) return_trace (false);
440 
441     /* This subtable has some "history", if you will.  Some earlier versions of
442      * Adobe tools calculated the offset of the FeatureParams sutable from the
443      * beginning of the FeatureList table!  Now, that is dealt with in the
444      * Feature implementation.  But we still need to be able to tell junk from
445      * real data.  Note: We don't check that the nameID actually exists.
446      *
447      * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
448      *
449      * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
450      * coming out soon, and that the makeotf program will build a font with a
451      * 'size' feature that is correct by the specification.
452      *
453      * The specification for this feature tag is in the "OpenType Layout Tag
454      * Registry". You can see a copy of this at:
455      * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
456      *
457      * Here is one set of rules to determine if the 'size' feature is built
458      * correctly, or as by the older versions of MakeOTF. You may be able to do
459      * better.
460      *
461      * Assume that the offset to the size feature is according to specification,
462      * and make the following value checks. If it fails, assume the size
463      * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
464      * If this fails, reject the 'size' feature. The older makeOTF's calculated the
465      * offset from the beginning of the FeatureList table, rather than from the
466      * beginning of the 'size' Feature table.
467      *
468      * If "design size" == 0:
469      *     fails check
470      *
471      * Else if ("subfamily identifier" == 0 and
472      *     "range start" == 0 and
473      *     "range end" == 0 and
474      *     "range start" == 0 and
475      *     "menu name ID" == 0)
476      *     passes check: this is the format used when there is a design size
477      * specified, but there is no recommended size range.
478      *
479      * Else if ("design size" <  "range start" or
480      *     "design size" >   "range end" or
481      *     "range end" <= "range start" or
482      *     "menu name ID"  < 256 or
483      *     "menu name ID"  > 32767 or
484      *     menu name ID is not a name ID which is actually in the name table)
485      *     fails test
486      * Else
487      *     passes test.
488      */
489 
490     if (!designSize)
491       return_trace (false);
492     else if (subfamilyID == 0 &&
493 	     subfamilyNameID == 0 &&
494 	     rangeStart == 0 &&
495 	     rangeEnd == 0)
496       return_trace (true);
497     else if (designSize < rangeStart ||
498 	     designSize > rangeEnd ||
499 	     subfamilyNameID < 256 ||
500 	     subfamilyNameID > 32767)
501       return_trace (false);
502     else
503       return_trace (true);
504   }
505 
506   HBUINT16	designSize;	/* Represents the design size in 720/inch
507 				 * units (decipoints).  The design size entry
508 				 * must be non-zero.  When there is a design
509 				 * size but no recommended size range, the
510 				 * rest of the array will consist of zeros. */
511   HBUINT16	subfamilyID;	/* Has no independent meaning, but serves
512 				 * as an identifier that associates fonts
513 				 * in a subfamily. All fonts which share a
514 				 * Preferred or Font Family name and which
515 				 * differ only by size range shall have the
516 				 * same subfamily value, and no fonts which
517 				 * differ in weight or style shall have the
518 				 * same subfamily value. If this value is
519 				 * zero, the remaining fields in the array
520 				 * will be ignored. */
521   NameID	subfamilyNameID;/* If the preceding value is non-zero, this
522 				 * value must be set in the range 256 - 32767
523 				 * (inclusive). It records the value of a
524 				 * field in the name table, which must
525 				 * contain English-language strings encoded
526 				 * in Windows Unicode and Macintosh Roman,
527 				 * and may contain additional strings
528 				 * localized to other scripts and languages.
529 				 * Each of these strings is the name an
530 				 * application should use, in combination
531 				 * with the family name, to represent the
532 				 * subfamily in a menu.  Applications will
533 				 * choose the appropriate version based on
534 				 * their selection criteria. */
535   HBUINT16	rangeStart;	/* Large end of the recommended usage range
536 				 * (inclusive), stored in 720/inch units
537 				 * (decipoints). */
538   HBUINT16	rangeEnd;	/* Small end of the recommended usage range
539 				   (exclusive), stored in 720/inch units
540 				 * (decipoints). */
541   public:
542   DEFINE_SIZE_STATIC (10);
543 };
544 
545 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
546 struct FeatureParamsStylisticSet
547 {
sanitizeOT::FeatureParamsStylisticSet548   bool sanitize (hb_sanitize_context_t *c) const
549   {
550     TRACE_SANITIZE (this);
551     /* Right now minorVersion is at zero.  Which means, any table supports
552      * the uiNameID field. */
553     return_trace (c->check_struct (this));
554   }
555 
556   HBUINT16	version;	/* (set to 0): This corresponds to a “minor”
557 				 * version number. Additional data may be
558 				 * added to the end of this Feature Parameters
559 				 * table in the future. */
560 
561   NameID	uiNameID;	/* The 'name' table name ID that specifies a
562 				 * string (or strings, for multiple languages)
563 				 * for a user-interface label for this
564 				 * feature.  The values of uiLabelNameId and
565 				 * sampleTextNameId are expected to be in the
566 				 * font-specific name ID range (256-32767),
567 				 * though that is not a requirement in this
568 				 * Feature Parameters specification. The
569 				 * user-interface label for the feature can
570 				 * be provided in multiple languages. An
571 				 * English string should be included as a
572 				 * fallback. The string should be kept to a
573 				 * minimal length to fit comfortably with
574 				 * different application interfaces. */
575   public:
576   DEFINE_SIZE_STATIC (4);
577 };
578 
579 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
580 struct FeatureParamsCharacterVariants
581 {
sanitizeOT::FeatureParamsCharacterVariants582   bool sanitize (hb_sanitize_context_t *c) const
583   {
584     TRACE_SANITIZE (this);
585     return_trace (c->check_struct (this) &&
586 		  characters.sanitize (c));
587   }
588 
589   HBUINT16	format;			/* Format number is set to 0. */
590   NameID	featUILableNameID;	/* The ‘name’ table name ID that
591 					 * specifies a string (or strings,
592 					 * for multiple languages) for a
593 					 * user-interface label for this
594 					 * feature. (May be NULL.) */
595   NameID	featUITooltipTextNameID;/* The ‘name’ table name ID that
596 					 * specifies a string (or strings,
597 					 * for multiple languages) that an
598 					 * application can use for tooltip
599 					 * text for this feature. (May be
600 					 * nullptr.) */
601   NameID	sampleTextNameID;	/* The ‘name’ table name ID that
602 					 * specifies sample text that
603 					 * illustrates the effect of this
604 					 * feature. (May be NULL.) */
605   HBUINT16	numNamedParameters;	/* Number of named parameters. (May
606 					 * be zero.) */
607   NameID	firstParamUILabelNameID;/* The first ‘name’ table name ID
608 					 * used to specify strings for
609 					 * user-interface labels for the
610 					 * feature parameters. (Must be zero
611 					 * if numParameters is zero.) */
612   ArrayOf<HBUINT24>
613 		characters;		/* Array of the Unicode Scalar Value
614 					 * of the characters for which this
615 					 * feature provides glyph variants.
616 					 * (May be zero.) */
617   public:
618   DEFINE_SIZE_ARRAY (14, characters);
619 };
620 
621 struct FeatureParams
622 {
sanitizeOT::FeatureParams623   bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
624   {
625 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
626     return true;
627 #endif
628     TRACE_SANITIZE (this);
629     if (tag == HB_TAG ('s','i','z','e'))
630       return_trace (u.size.sanitize (c));
631     if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
632       return_trace (u.stylisticSet.sanitize (c));
633     if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
634       return_trace (u.characterVariants.sanitize (c));
635     return_trace (true);
636   }
637 
638 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
get_size_paramsOT::FeatureParams639   const FeatureParamsSize& get_size_params (hb_tag_t tag) const
640   {
641     if (tag == HB_TAG ('s','i','z','e'))
642       return u.size;
643     return Null (FeatureParamsSize);
644   }
get_stylistic_set_paramsOT::FeatureParams645   const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
646   {
647     if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
648       return u.stylisticSet;
649     return Null (FeatureParamsStylisticSet);
650   }
get_character_variants_paramsOT::FeatureParams651   const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
652   {
653     if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
654       return u.characterVariants;
655     return Null (FeatureParamsCharacterVariants);
656   }
657 #endif
658 
659   private:
660   union {
661   FeatureParamsSize			size;
662   FeatureParamsStylisticSet		stylisticSet;
663   FeatureParamsCharacterVariants	characterVariants;
664   } u;
665   public:
666   DEFINE_SIZE_MIN (0);
667 };
668 
669 struct Feature
670 {
get_lookup_countOT::Feature671   unsigned int get_lookup_count () const
672   { return lookupIndex.len; }
get_lookup_indexOT::Feature673   hb_tag_t get_lookup_index (unsigned int i) const
674   { return lookupIndex[i]; }
get_lookup_indexesOT::Feature675   unsigned int get_lookup_indexes (unsigned int start_index,
676 				   unsigned int *lookup_count /* IN/OUT */,
677 				   unsigned int *lookup_tags /* OUT */) const
678   { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
add_lookup_indexes_toOT::Feature679   void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
680   { lookupIndex.add_indexes_to (lookup_indexes); }
681 
get_feature_paramsOT::Feature682   const FeatureParams &get_feature_params () const
683   { return this+featureParams; }
684 
subsetOT::Feature685   bool subset (hb_subset_context_t *c, RecordList_subset_context_t *r) const
686   {
687     TRACE_SUBSET (this);
688     auto *out = c->serializer->embed (*this);
689     if (unlikely (!out)) return_trace (false);
690     out->featureParams = 0; /* TODO(subset) FeatureParams. */
691     return_trace (true);
692   }
693 
sanitizeOT::Feature694   bool sanitize (hb_sanitize_context_t *c,
695 		 const Record_sanitize_closure_t *closure = nullptr) const
696   {
697     TRACE_SANITIZE (this);
698     if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
699       return_trace (false);
700 
701     /* Some earlier versions of Adobe tools calculated the offset of the
702      * FeatureParams subtable from the beginning of the FeatureList table!
703      *
704      * If sanitizing "failed" for the FeatureParams subtable, try it with the
705      * alternative location.  We would know sanitize "failed" if old value
706      * of the offset was non-zero, but it's zeroed now.
707      *
708      * Only do this for the 'size' feature, since at the time of the faulty
709      * Adobe tools, only the 'size' feature had FeatureParams defined.
710      */
711 
712     if (likely (featureParams.is_null ()))
713       return_trace (true);
714 
715     unsigned int orig_offset = featureParams;
716     if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
717       return_trace (false);
718 
719     if (featureParams == 0 && closure &&
720 	closure->tag == HB_TAG ('s','i','z','e') &&
721 	closure->list_base && closure->list_base < this)
722     {
723       unsigned int new_offset_int = orig_offset -
724 				    (((char *) this) - ((char *) closure->list_base));
725 
726       OffsetTo<FeatureParams> new_offset;
727       /* Check that it would not overflow. */
728       new_offset = new_offset_int;
729       if (new_offset == new_offset_int &&
730 	  c->try_set (&featureParams, new_offset_int) &&
731 	  !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
732 	return_trace (false);
733     }
734 
735     return_trace (true);
736   }
737 
738   OffsetTo<FeatureParams>
739 		 featureParams;	/* Offset to Feature Parameters table (if one
740 				 * has been defined for the feature), relative
741 				 * to the beginning of the Feature Table; = Null
742 				 * if not required */
743   IndexArray	 lookupIndex;	/* Array of LookupList indices */
744   public:
745   DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
746 };
747 
748 typedef RecordListOf<Feature> FeatureList;
749 
750 
751 struct LookupFlag : HBUINT16
752 {
753   enum Flags {
754     RightToLeft		= 0x0001u,
755     IgnoreBaseGlyphs	= 0x0002u,
756     IgnoreLigatures	= 0x0004u,
757     IgnoreMarks		= 0x0008u,
758     IgnoreFlags		= 0x000Eu,
759     UseMarkFilteringSet	= 0x0010u,
760     Reserved		= 0x00E0u,
761     MarkAttachmentType	= 0xFF00u
762   };
763   public:
764   DEFINE_SIZE_STATIC (2);
765 };
766 
767 } /* namespace OT */
768 /* This has to be outside the namespace. */
769 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
770 namespace OT {
771 
772 struct Lookup
773 {
get_subtable_countOT::Lookup774   unsigned int get_subtable_count () const { return subTable.len; }
775 
776   template <typename TSubTable>
get_subtablesOT::Lookup777   const OffsetArrayOf<TSubTable>& get_subtables () const
778   { return CastR<OffsetArrayOf<TSubTable>> (subTable); }
779   template <typename TSubTable>
get_subtablesOT::Lookup780   OffsetArrayOf<TSubTable>& get_subtables ()
781   { return CastR<OffsetArrayOf<TSubTable>> (subTable); }
782 
783   template <typename TSubTable>
get_subtableOT::Lookup784   const TSubTable& get_subtable (unsigned int i) const
785   { return this+get_subtables<TSubTable> ()[i]; }
786   template <typename TSubTable>
get_subtableOT::Lookup787   TSubTable& get_subtable (unsigned int i)
788   { return this+get_subtables<TSubTable> ()[i]; }
789 
get_sizeOT::Lookup790   unsigned int get_size () const
791   {
792     const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
793     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
794       return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
795     return (const char *) &markFilteringSet - (const char *) this;
796   }
797 
get_typeOT::Lookup798   unsigned int get_type () const { return lookupType; }
799 
800   /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
801    * higher 16-bit is mark-filtering-set if the lookup uses one.
802    * Not to be confused with glyph_props which is very similar. */
get_propsOT::Lookup803   uint32_t get_props () const
804   {
805     unsigned int flag = lookupFlag;
806     if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
807     {
808       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
809       flag += (markFilteringSet << 16);
810     }
811     return flag;
812   }
813 
814   template <typename TSubTable, typename context_t, typename ...Ts>
dispatchOT::Lookup815   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
816   {
817     unsigned int lookup_type = get_type ();
818     TRACE_DISPATCH (this, lookup_type);
819     unsigned int count = get_subtable_count ();
820     for (unsigned int i = 0; i < count; i++) {
821       typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, hb_forward<Ts> (ds)...);
822       if (c->stop_sublookup_iteration (r))
823 	return_trace (r);
824     }
825     return_trace (c->default_return_value ());
826   }
827 
serializeOT::Lookup828   bool serialize (hb_serialize_context_t *c,
829 		  unsigned int lookup_type,
830 		  uint32_t lookup_props,
831 		  unsigned int num_subtables)
832   {
833     TRACE_SERIALIZE (this);
834     if (unlikely (!c->extend_min (*this))) return_trace (false);
835     lookupType = lookup_type;
836     lookupFlag = lookup_props & 0xFFFFu;
837     if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
838     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
839     {
840       if (unlikely (!c->extend (*this))) return_trace (false);
841       HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
842       markFilteringSet = lookup_props >> 16;
843     }
844     return_trace (true);
845   }
846 
847   template <typename TSubTable>
subsetOT::Lookup848   bool subset (hb_subset_context_t *c) const
849   {
850     TRACE_SUBSET (this);
851     auto *out = c->serializer->embed (*this);
852     if (unlikely (!out)) return_trace (false);
853 
854     /* Subset the actual subtables. */
855     /* TODO Drop empty ones, either by calling intersects() beforehand,
856      * or just dropping null offsets after. */
857     const OffsetArrayOf<TSubTable>& subtables = get_subtables<TSubTable> ();
858     OffsetArrayOf<TSubTable>& out_subtables = out->get_subtables<TSubTable> ();
859     unsigned int count = subTable.len;
860     for (unsigned int i = 0; i < count; i++)
861       out_subtables[i].serialize_subset (c, subtables[i], this, out, get_type ());
862 
863     return_trace (true);
864   }
865 
866   template <typename TSubTable>
sanitizeOT::Lookup867   bool sanitize (hb_sanitize_context_t *c) const
868   {
869     TRACE_SANITIZE (this);
870     if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
871     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
872     {
873       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
874       if (!markFilteringSet.sanitize (c)) return_trace (false);
875     }
876 
877     if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
878       return_trace (false);
879 
880     if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
881     {
882       /* The spec says all subtables of an Extension lookup should
883        * have the same type, which shall not be the Extension type
884        * itself (but we already checked for that).
885        * This is specially important if one has a reverse type!
886        *
887        * We only do this if sanitizer edit_count is zero.  Otherwise,
888        * some of the subtables might have become insane after they
889        * were sanity-checked by the edits of subsequent subtables.
890        * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
891        */
892       unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
893       unsigned int count = get_subtable_count ();
894       for (unsigned int i = 1; i < count; i++)
895 	if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
896 	  return_trace (false);
897     }
898     return_trace (true);
899   }
900 
901   private:
902   HBUINT16	lookupType;		/* Different enumerations for GSUB and GPOS */
903   HBUINT16	lookupFlag;		/* Lookup qualifiers */
904   ArrayOf<Offset16>
905 		subTable;		/* Array of SubTables */
906 /*HBUINT16	markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
907 					 * structure. This field is only present if bit
908 					 * UseMarkFilteringSet of lookup flags is set. */
909   public:
910   DEFINE_SIZE_ARRAY (6, subTable);
911 };
912 
913 typedef OffsetListOf<Lookup> LookupList;
914 
915 
916 /*
917  * Coverage Table
918  */
919 
920 struct CoverageFormat1
921 {
922   friend struct Coverage;
923 
924   private:
get_coverageOT::CoverageFormat1925   unsigned int get_coverage (hb_codepoint_t glyph_id) const
926   {
927     unsigned int i;
928     glyphArray.bfind (glyph_id, &i, HB_BFIND_NOT_FOUND_STORE, NOT_COVERED);
929     return i;
930   }
931 
932   template <typename Iterator,
933       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::CoverageFormat1934   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
935   {
936     TRACE_SERIALIZE (this);
937     return_trace (glyphArray.serialize (c, glyphs));
938   }
939 
sanitizeOT::CoverageFormat1940   bool sanitize (hb_sanitize_context_t *c) const
941   {
942     TRACE_SANITIZE (this);
943     return_trace (glyphArray.sanitize (c));
944   }
945 
intersectsOT::CoverageFormat1946   bool intersects (const hb_set_t *glyphs) const
947   {
948     /* TODO Speed up, using hb_set_next() and bsearch()? */
949     unsigned int count = glyphArray.len;
950     for (unsigned int i = 0; i < count; i++)
951       if (glyphs->has (glyphArray[i]))
952 	return true;
953     return false;
954   }
intersects_coverageOT::CoverageFormat1955   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
956   { return glyphs->has (glyphArray[index]); }
957 
958   template <typename set_t>
add_coverageOT::CoverageFormat1959   bool add_coverage (set_t *glyphs) const
960   { return glyphs->add_sorted_array (glyphArray.arrayZ, glyphArray.len); }
961 
962   public:
963   /* Older compilers need this to be public. */
964   struct iter_t
965   {
initOT::CoverageFormat1::iter_t966     void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
finiOT::CoverageFormat1::iter_t967     void fini () {}
moreOT::CoverageFormat1::iter_t968     bool more () const { return i < c->glyphArray.len; }
nextOT::CoverageFormat1::iter_t969     void next () { i++; }
get_glyphOT::CoverageFormat1::iter_t970     hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
operator !=OT::CoverageFormat1::iter_t971     bool operator != (const iter_t& o) const
972     { return i != o.i || c != o.c; }
973 
974     private:
975     const struct CoverageFormat1 *c;
976     unsigned int i;
977   };
978   private:
979 
980   protected:
981   HBUINT16	coverageFormat;	/* Format identifier--format = 1 */
982   SortedArrayOf<HBGlyphID>
983 		glyphArray;	/* Array of GlyphIDs--in numerical order */
984   public:
985   DEFINE_SIZE_ARRAY (4, glyphArray);
986 };
987 
988 struct CoverageFormat2
989 {
990   friend struct Coverage;
991 
992   private:
get_coverageOT::CoverageFormat2993   unsigned int get_coverage (hb_codepoint_t glyph_id) const
994   {
995     const RangeRecord &range = rangeRecord.bsearch (glyph_id);
996     return likely (range.start <= range.end) ?
997 	   (unsigned int) range.value + (glyph_id - range.start) :
998 	   NOT_COVERED;
999   }
1000 
1001   template <typename Iterator,
1002       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::CoverageFormat21003   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1004   {
1005     TRACE_SERIALIZE (this);
1006     if (unlikely (!c->extend_min (*this))) return_trace (false);
1007 
1008     if (unlikely (!glyphs))
1009     {
1010       rangeRecord.len = 0;
1011       return_trace (true);
1012     }
1013 
1014     /* TODO(iter) Write more efficiently? */
1015 
1016     unsigned num_ranges = 0;
1017     hb_codepoint_t last = (hb_codepoint_t) -2;
1018     for (auto g: glyphs)
1019     {
1020       if (last + 1 != g)
1021 	num_ranges++;
1022       last = g;
1023     }
1024 
1025     if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
1026 
1027     unsigned count = 0;
1028     unsigned range = (unsigned) -1;
1029     last = (hb_codepoint_t) -2;
1030     for (auto g: glyphs)
1031     {
1032       if (last + 1 != g)
1033       {
1034 	range++;
1035 	rangeRecord[range].start = g;
1036 	rangeRecord[range].value = count;
1037       }
1038       rangeRecord[range].end = g;
1039       last = g;
1040       count++;
1041     }
1042 
1043     return_trace (true);
1044   }
1045 
sanitizeOT::CoverageFormat21046   bool sanitize (hb_sanitize_context_t *c) const
1047   {
1048     TRACE_SANITIZE (this);
1049     return_trace (rangeRecord.sanitize (c));
1050   }
1051 
intersectsOT::CoverageFormat21052   bool intersects (const hb_set_t *glyphs) const
1053   {
1054     /* TODO Speed up, using hb_set_next() and bsearch()? */
1055     unsigned int count = rangeRecord.len;
1056     for (unsigned int i = 0; i < count; i++)
1057       if (rangeRecord[i].intersects (glyphs))
1058 	return true;
1059     return false;
1060   }
intersects_coverageOT::CoverageFormat21061   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1062   {
1063     unsigned int i;
1064     unsigned int count = rangeRecord.len;
1065     for (i = 0; i < count; i++) {
1066       const RangeRecord &range = rangeRecord[i];
1067       if (range.value <= index &&
1068 	  index < (unsigned int) range.value + (range.end - range.start) &&
1069 	  range.intersects (glyphs))
1070 	return true;
1071       else if (index < range.value)
1072 	return false;
1073     }
1074     return false;
1075   }
1076 
1077   template <typename set_t>
add_coverageOT::CoverageFormat21078   bool add_coverage (set_t *glyphs) const
1079   {
1080     unsigned int count = rangeRecord.len;
1081     for (unsigned int i = 0; i < count; i++)
1082       if (unlikely (!rangeRecord[i].add_coverage (glyphs)))
1083 	return false;
1084     return true;
1085   }
1086 
1087   public:
1088   /* Older compilers need this to be public. */
1089   struct iter_t
1090   {
initOT::CoverageFormat2::iter_t1091     void init (const CoverageFormat2 &c_)
1092     {
1093       c = &c_;
1094       coverage = 0;
1095       i = 0;
1096       j = c->rangeRecord.len ? c->rangeRecord[0].start : 0;
1097       if (unlikely (c->rangeRecord[0].start > c->rangeRecord[0].end))
1098       {
1099 	/* Broken table. Skip. */
1100 	i = c->rangeRecord.len;
1101       }
1102     }
finiOT::CoverageFormat2::iter_t1103     void fini () {}
moreOT::CoverageFormat2::iter_t1104     bool more () const { return i < c->rangeRecord.len; }
nextOT::CoverageFormat2::iter_t1105     void next ()
1106     {
1107       if (j >= c->rangeRecord[i].end)
1108       {
1109 	i++;
1110 	if (more ())
1111 	{
1112 	  unsigned int old = coverage;
1113 	  j = c->rangeRecord[i].start;
1114 	  coverage = c->rangeRecord[i].value;
1115 	  if (unlikely (coverage != old + 1))
1116 	  {
1117 	    /* Broken table. Skip. Important to avoid DoS.
1118 	     * Also, our callers depend on coverage being
1119 	     * consecutive and monotonically increasing,
1120 	     * ie. iota(). */
1121 	   i = c->rangeRecord.len;
1122 	   return;
1123 	  }
1124 	}
1125 	return;
1126       }
1127       coverage++;
1128       j++;
1129     }
get_glyphOT::CoverageFormat2::iter_t1130     hb_codepoint_t get_glyph () const { return j; }
operator !=OT::CoverageFormat2::iter_t1131     bool operator != (const iter_t& o) const
1132     { return i != o.i || j != o.j || c != o.c; }
1133 
1134     private:
1135     const struct CoverageFormat2 *c;
1136     unsigned int i, coverage;
1137     hb_codepoint_t j;
1138   };
1139   private:
1140 
1141   protected:
1142   HBUINT16	coverageFormat;	/* Format identifier--format = 2 */
1143   SortedArrayOf<RangeRecord>
1144 		rangeRecord;	/* Array of glyph ranges--ordered by
1145 				 * Start GlyphID. rangeCount entries
1146 				 * long */
1147   public:
1148   DEFINE_SIZE_ARRAY (4, rangeRecord);
1149 };
1150 
1151 struct Coverage
1152 {
1153   /* Has interface. */
1154   static constexpr unsigned SENTINEL = NOT_COVERED;
1155   typedef unsigned int value_t;
operator []OT::Coverage1156   value_t operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::Coverage1157   bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
1158   /* Predicate. */
operator ()OT::Coverage1159   bool operator () (hb_codepoint_t k) const { return has (k); }
1160 
getOT::Coverage1161   unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
get_coverageOT::Coverage1162   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1163   {
1164     switch (u.format) {
1165     case 1: return u.format1.get_coverage (glyph_id);
1166     case 2: return u.format2.get_coverage (glyph_id);
1167     default:return NOT_COVERED;
1168     }
1169   }
1170 
1171   template <typename Iterator,
1172       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::Coverage1173   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1174   {
1175     TRACE_SERIALIZE (this);
1176     if (unlikely (!c->extend_min (*this))) return_trace (false);
1177 
1178     unsigned count = 0;
1179     unsigned num_ranges = 0;
1180     hb_codepoint_t last = (hb_codepoint_t) -2;
1181     for (auto g: glyphs)
1182     {
1183       if (last + 1 != g)
1184 	num_ranges++;
1185       last = g;
1186       count++;
1187     }
1188     u.format = count <= num_ranges * 3 ? 1 : 2;
1189 
1190     switch (u.format)
1191     {
1192     case 1: return_trace (u.format1.serialize (c, glyphs));
1193     case 2: return_trace (u.format2.serialize (c, glyphs));
1194     default:return_trace (false);
1195     }
1196   }
1197 
subsetOT::Coverage1198   bool subset (hb_subset_context_t *c) const
1199   {
1200     TRACE_SUBSET (this);
1201     const hb_set_t &glyphset = *c->plan->glyphset ();
1202     const hb_map_t &glyph_map = *c->plan->glyph_map;
1203 
1204     auto it =
1205     + iter ()
1206     | hb_filter (glyphset)
1207     | hb_map_retains_sorting (glyph_map)
1208     ;
1209 
1210     bool ret = bool (it);
1211     Coverage_serialize (c->serializer, it);
1212     return_trace (ret);
1213   }
1214 
sanitizeOT::Coverage1215   bool sanitize (hb_sanitize_context_t *c) const
1216   {
1217     TRACE_SANITIZE (this);
1218     if (!u.format.sanitize (c)) return_trace (false);
1219     switch (u.format)
1220     {
1221     case 1: return_trace (u.format1.sanitize (c));
1222     case 2: return_trace (u.format2.sanitize (c));
1223     default:return_trace (true);
1224     }
1225   }
1226 
intersectsOT::Coverage1227   bool intersects (const hb_set_t *glyphs) const
1228   {
1229     switch (u.format)
1230     {
1231     case 1: return u.format1.intersects (glyphs);
1232     case 2: return u.format2.intersects (glyphs);
1233     default:return false;
1234     }
1235   }
intersects_coverageOT::Coverage1236   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1237   {
1238     switch (u.format)
1239     {
1240     case 1: return u.format1.intersects_coverage (glyphs, index);
1241     case 2: return u.format2.intersects_coverage (glyphs, index);
1242     default:return false;
1243     }
1244   }
1245 
1246   /* Might return false if array looks unsorted.
1247    * Used for faster rejection of corrupt data. */
1248   template <typename set_t>
add_coverageOT::Coverage1249   bool add_coverage (set_t *glyphs) const
1250   {
1251     switch (u.format)
1252     {
1253     case 1: return u.format1.add_coverage (glyphs);
1254     case 2: return u.format2.add_coverage (glyphs);
1255     default:return false;
1256     }
1257   }
1258 
1259   struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
1260   {
1261     static constexpr bool is_sorted_iterator = true;
iter_tOT::Coverage::iter_t1262     iter_t (const Coverage &c_ = Null(Coverage))
1263     {
1264       memset (this, 0, sizeof (*this));
1265       format = c_.u.format;
1266       switch (format)
1267       {
1268       case 1: u.format1.init (c_.u.format1); return;
1269       case 2: u.format2.init (c_.u.format2); return;
1270       default:				     return;
1271       }
1272     }
__more__OT::Coverage::iter_t1273     bool __more__ () const
1274     {
1275       switch (format)
1276       {
1277       case 1: return u.format1.more ();
1278       case 2: return u.format2.more ();
1279       default:return false;
1280       }
1281     }
__next__OT::Coverage::iter_t1282     void __next__ ()
1283     {
1284       switch (format)
1285       {
1286       case 1: u.format1.next (); break;
1287       case 2: u.format2.next (); break;
1288       default:			 break;
1289       }
1290     }
1291     typedef hb_codepoint_t __item_t__;
__item__OT::Coverage::iter_t1292     __item_t__ __item__ () const { return get_glyph (); }
1293 
get_glyphOT::Coverage::iter_t1294     hb_codepoint_t get_glyph () const
1295     {
1296       switch (format)
1297       {
1298       case 1: return u.format1.get_glyph ();
1299       case 2: return u.format2.get_glyph ();
1300       default:return 0;
1301       }
1302     }
operator !=OT::Coverage::iter_t1303     bool operator != (const iter_t& o) const
1304     {
1305       if (format != o.format) return true;
1306       switch (format)
1307       {
1308       case 1: return u.format1 != o.u.format1;
1309       case 2: return u.format2 != o.u.format2;
1310       default:return false;
1311       }
1312     }
1313 
1314     private:
1315     unsigned int format;
1316     union {
1317     CoverageFormat2::iter_t	format2; /* Put this one first since it's larger; helps shut up compiler. */
1318     CoverageFormat1::iter_t	format1;
1319     } u;
1320   };
iterOT::Coverage1321   iter_t iter () const { return iter_t (*this); }
1322 
1323   protected:
1324   union {
1325   HBUINT16		format;		/* Format identifier */
1326   CoverageFormat1	format1;
1327   CoverageFormat2	format2;
1328   } u;
1329   public:
1330   DEFINE_SIZE_UNION (2, format);
1331 };
1332 
1333 template<typename Iterator>
1334 static inline void
Coverage_serialize(hb_serialize_context_t * c,Iterator it)1335 Coverage_serialize (hb_serialize_context_t *c,
1336                     Iterator it)
1337 { c->start_embed<Coverage> ()->serialize (c, it); }
1338 
ClassDef_remap_and_serialize(hb_serialize_context_t * c,const hb_set_t & glyphset,const hb_map_t & gid_klass_map,hb_sorted_vector_t<HBGlyphID> glyphs,hb_sorted_vector_t<unsigned> klasses,hb_map_t * klass_map)1339 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
1340                                           const hb_set_t &glyphset,
1341                                           const hb_map_t &gid_klass_map,
1342                                           hb_sorted_vector_t<HBGlyphID> glyphs,
1343                                           hb_sorted_vector_t<unsigned> klasses,
1344                                           hb_map_t *klass_map /*INOUT*/)
1345 {
1346   bool has_no_match = glyphset.get_population () > gid_klass_map.get_population ();
1347 
1348   hb_map_t m;
1349   if (!klass_map) klass_map = &m;
1350 
1351   if (has_no_match) klass_map->set (0, 0);
1352   unsigned idx = klass_map->has (0) ? 1 : 0;
1353   for (const unsigned k: klasses.iter ())
1354   {
1355     if (klass_map->has (k)) continue;
1356     klass_map->set (k, idx);
1357     idx++;
1358   }
1359 
1360   auto it =
1361   + glyphs.iter ()
1362   | hb_map_retains_sorting ([&] (const HBGlyphID& gid) -> hb_pair_t<hb_codepoint_t, HBUINT16>
1363                             {
1364                               HBUINT16 new_klass;
1365                               new_klass = klass_map->get (gid_klass_map[gid]);
1366                               return hb_pair ((hb_codepoint_t)gid, new_klass);
1367                             })
1368   ;
1369 
1370   c->propagate_error (glyphs, klasses);
1371   ClassDef_serialize (c, it);
1372 }
1373 
1374 /*
1375  * Class Definition Table
1376  */
1377 
1378 struct ClassDefFormat1
1379 {
1380   friend struct ClassDef;
1381 
1382   private:
get_classOT::ClassDefFormat11383   unsigned int get_class (hb_codepoint_t glyph_id) const
1384   {
1385     return classValue[(unsigned int) (glyph_id - startGlyph)];
1386   }
1387 
1388   template<typename Iterator,
1389 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDefFormat11390   bool serialize (hb_serialize_context_t *c,
1391                   Iterator it)
1392   {
1393     TRACE_SERIALIZE (this);
1394     if (unlikely (!c->extend_min (*this))) return_trace (false);
1395 
1396     if (unlikely (!it))
1397     {
1398       startGlyph = 0;
1399       classValue.len = 0;
1400       return_trace (true);
1401     }
1402 
1403     startGlyph = (*it).first;
1404     classValue.serialize (c, + it
1405                              | hb_map (hb_second));
1406     return_trace (true);
1407   }
1408 
subsetOT::ClassDefFormat11409   bool subset (hb_subset_context_t *c,
1410                hb_map_t *klass_map = nullptr /*OUT*/) const
1411   {
1412     TRACE_SUBSET (this);
1413     const hb_set_t &glyphset = *c->plan->glyphset ();
1414     const hb_map_t &glyph_map = *c->plan->glyph_map;
1415 
1416     hb_sorted_vector_t<HBGlyphID> glyphs;
1417     hb_sorted_vector_t<unsigned> orig_klasses;
1418     hb_map_t gid_org_klass_map;
1419 
1420     hb_codepoint_t start = startGlyph;
1421     hb_codepoint_t end   = start + classValue.len;
1422     for (const hb_codepoint_t gid : + hb_range (start, end)
1423 				    | hb_filter (glyphset))
1424     {
1425       unsigned klass = classValue[gid - start];
1426       if (!klass) continue;
1427 
1428       glyphs.push (glyph_map[gid]);
1429       gid_org_klass_map.set (glyph_map[gid], klass);
1430       orig_klasses.push (klass);
1431     }
1432 
1433     ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map,
1434                                   glyphs, orig_klasses, klass_map);
1435     return_trace ((bool) glyphs);
1436   }
1437 
sanitizeOT::ClassDefFormat11438   bool sanitize (hb_sanitize_context_t *c) const
1439   {
1440     TRACE_SANITIZE (this);
1441     return_trace (c->check_struct (this) && classValue.sanitize (c));
1442   }
1443 
1444   template <typename set_t>
add_coverageOT::ClassDefFormat11445   bool add_coverage (set_t *glyphs) const
1446   {
1447     unsigned int start = 0;
1448     unsigned int count = classValue.len;
1449     for (unsigned int i = 0; i < count; i++)
1450     {
1451       if (classValue[i])
1452 	continue;
1453 
1454       if (start != i)
1455 	if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
1456 	  return false;
1457 
1458       start = i + 1;
1459     }
1460     if (start != count)
1461       if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
1462 	return false;
1463 
1464     return true;
1465   }
1466 
1467   template <typename set_t>
add_classOT::ClassDefFormat11468   bool add_class (set_t *glyphs, unsigned int klass) const
1469   {
1470     unsigned int count = classValue.len;
1471     for (unsigned int i = 0; i < count; i++)
1472       if (classValue[i] == klass) glyphs->add (startGlyph + i);
1473     return true;
1474   }
1475 
intersectsOT::ClassDefFormat11476   bool intersects (const hb_set_t *glyphs) const
1477   {
1478     /* TODO Speed up, using hb_set_next()? */
1479     hb_codepoint_t start = startGlyph;
1480     hb_codepoint_t end = startGlyph + classValue.len;
1481     for (hb_codepoint_t iter = startGlyph - 1;
1482 	 hb_set_next (glyphs, &iter) && iter < end;)
1483       if (classValue[iter - start]) return true;
1484     return false;
1485   }
intersects_classOT::ClassDefFormat11486   bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
1487   {
1488     unsigned int count = classValue.len;
1489     if (klass == 0)
1490     {
1491       /* Match if there's any glyph that is not listed! */
1492       hb_codepoint_t g = HB_SET_VALUE_INVALID;
1493       if (!hb_set_next (glyphs, &g)) return false;
1494       if (g < startGlyph) return true;
1495       g = startGlyph + count - 1;
1496       if (hb_set_next (glyphs, &g)) return true;
1497       /* Fall through. */
1498     }
1499     for (unsigned int i = 0; i < count; i++)
1500       if (classValue[i] == klass && glyphs->has (startGlyph + i))
1501 	return true;
1502     return false;
1503   }
1504 
1505   protected:
1506   HBUINT16	classFormat;	/* Format identifier--format = 1 */
1507   HBGlyphID	startGlyph;	/* First GlyphID of the classValueArray */
1508   ArrayOf<HBUINT16>
1509 		classValue;	/* Array of Class Values--one per GlyphID */
1510   public:
1511   DEFINE_SIZE_ARRAY (6, classValue);
1512 };
1513 
1514 struct ClassDefFormat2
1515 {
1516   friend struct ClassDef;
1517 
1518   private:
get_classOT::ClassDefFormat21519   unsigned int get_class (hb_codepoint_t glyph_id) const
1520   {
1521     return rangeRecord.bsearch (glyph_id).value;
1522   }
1523 
1524   template<typename Iterator,
1525 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDefFormat21526   bool serialize (hb_serialize_context_t *c,
1527                   Iterator it)
1528   {
1529     TRACE_SERIALIZE (this);
1530     if (unlikely (!c->extend_min (*this))) return_trace (false);
1531 
1532     if (unlikely (!it))
1533     {
1534       rangeRecord.len = 0;
1535       return_trace (true);
1536     }
1537 
1538     unsigned num_ranges = 1;
1539     hb_codepoint_t prev_gid = (*it).first;
1540     unsigned prev_klass = (*it).second;
1541 
1542     RangeRecord range_rec;
1543     range_rec.start = prev_gid;
1544     range_rec.end = prev_gid;
1545     range_rec.value = prev_klass;
1546 
1547     RangeRecord *record = c->copy (range_rec);
1548     if (unlikely (!record)) return_trace (false);
1549 
1550     for (const auto gid_klass_pair : + (++it))
1551     {
1552       hb_codepoint_t cur_gid = gid_klass_pair.first;
1553       unsigned cur_klass = gid_klass_pair.second;
1554 
1555       if (cur_gid != prev_gid + 1 ||
1556           cur_klass != prev_klass)
1557       {
1558         if (unlikely (!record)) break;
1559         record->end = prev_gid;
1560         num_ranges++;
1561 
1562         range_rec.start = cur_gid;
1563         range_rec.end = cur_gid;
1564         range_rec.value = cur_klass;
1565 
1566         record = c->copy (range_rec);
1567       }
1568 
1569       prev_klass = cur_klass;
1570       prev_gid = cur_gid;
1571     }
1572 
1573     if (likely (record)) record->end = prev_gid;
1574     rangeRecord.len = num_ranges;
1575     return_trace (true);
1576   }
1577 
subsetOT::ClassDefFormat21578   bool subset (hb_subset_context_t *c,
1579                hb_map_t *klass_map = nullptr /*OUT*/) const
1580   {
1581     TRACE_SUBSET (this);
1582     const hb_set_t &glyphset = *c->plan->glyphset ();
1583     const hb_map_t &glyph_map = *c->plan->glyph_map;
1584 
1585     hb_sorted_vector_t<HBGlyphID> glyphs;
1586     hb_sorted_vector_t<unsigned> orig_klasses;
1587     hb_map_t gid_org_klass_map;
1588 
1589     unsigned count = rangeRecord.len;
1590     for (unsigned i = 0; i < count; i++)
1591     {
1592       unsigned klass = rangeRecord[i].value;
1593       if (!klass) continue;
1594       hb_codepoint_t start = rangeRecord[i].start;
1595       hb_codepoint_t end   = rangeRecord[i].end + 1;
1596       for (hb_codepoint_t g = start; g < end; g++)
1597       {
1598 	if (!glyphset.has (g)) continue;
1599 	glyphs.push (glyph_map[g]);
1600         gid_org_klass_map.set (glyph_map[g], klass);
1601         orig_klasses.push (klass);
1602       }
1603     }
1604 
1605     ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map,
1606                                   glyphs, orig_klasses, klass_map);
1607     return_trace ((bool) glyphs);
1608   }
1609 
sanitizeOT::ClassDefFormat21610   bool sanitize (hb_sanitize_context_t *c) const
1611   {
1612     TRACE_SANITIZE (this);
1613     return_trace (rangeRecord.sanitize (c));
1614   }
1615 
1616   template <typename set_t>
add_coverageOT::ClassDefFormat21617   bool add_coverage (set_t *glyphs) const
1618   {
1619     unsigned int count = rangeRecord.len;
1620     for (unsigned int i = 0; i < count; i++)
1621       if (rangeRecord[i].value)
1622 	if (unlikely (!rangeRecord[i].add_coverage (glyphs)))
1623 	  return false;
1624     return true;
1625   }
1626 
1627   template <typename set_t>
add_classOT::ClassDefFormat21628   bool add_class (set_t *glyphs, unsigned int klass) const
1629   {
1630     unsigned int count = rangeRecord.len;
1631     for (unsigned int i = 0; i < count; i++)
1632     {
1633       if (rangeRecord[i].value == klass)
1634 	if (unlikely (!rangeRecord[i].add_coverage (glyphs)))
1635 	  return false;
1636     }
1637     return true;
1638   }
1639 
intersectsOT::ClassDefFormat21640   bool intersects (const hb_set_t *glyphs) const
1641   {
1642     /* TODO Speed up, using hb_set_next() and bsearch()? */
1643     unsigned int count = rangeRecord.len;
1644     for (unsigned int i = 0; i < count; i++)
1645       if (rangeRecord[i].intersects (glyphs))
1646 	return true;
1647     return false;
1648   }
intersects_classOT::ClassDefFormat21649   bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
1650   {
1651     unsigned int count = rangeRecord.len;
1652     if (klass == 0)
1653     {
1654       /* Match if there's any glyph that is not listed! */
1655       hb_codepoint_t g = HB_SET_VALUE_INVALID;
1656       for (unsigned int i = 0; i < count; i++)
1657       {
1658 	if (!hb_set_next (glyphs, &g))
1659 	  break;
1660 	if (g < rangeRecord[i].start)
1661 	  return true;
1662 	g = rangeRecord[i].end;
1663       }
1664       if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
1665 	return true;
1666       /* Fall through. */
1667     }
1668     for (unsigned int i = 0; i < count; i++)
1669       if (rangeRecord[i].value == klass && rangeRecord[i].intersects (glyphs))
1670 	return true;
1671     return false;
1672   }
1673 
1674   protected:
1675   HBUINT16	classFormat;	/* Format identifier--format = 2 */
1676   SortedArrayOf<RangeRecord>
1677 		rangeRecord;	/* Array of glyph ranges--ordered by
1678 				 * Start GlyphID */
1679   public:
1680   DEFINE_SIZE_ARRAY (4, rangeRecord);
1681 };
1682 
1683 struct ClassDef
1684 {
1685   /* Has interface. */
1686   static constexpr unsigned SENTINEL = 0;
1687   typedef unsigned int value_t;
operator []OT::ClassDef1688   value_t operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::ClassDef1689   bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
1690   /* Projection. */
operator ()OT::ClassDef1691   hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
1692 
getOT::ClassDef1693   unsigned int get (hb_codepoint_t k) const { return get_class (k); }
get_classOT::ClassDef1694   unsigned int get_class (hb_codepoint_t glyph_id) const
1695   {
1696     switch (u.format) {
1697     case 1: return u.format1.get_class (glyph_id);
1698     case 2: return u.format2.get_class (glyph_id);
1699     default:return 0;
1700     }
1701   }
1702 
1703   template<typename Iterator,
1704 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDef1705   bool serialize (hb_serialize_context_t *c, Iterator it)
1706   {
1707     TRACE_SERIALIZE (this);
1708     if (unlikely (!c->extend_min (*this))) return_trace (false);
1709 
1710     unsigned format = 2;
1711     if (likely (it))
1712     {
1713       hb_codepoint_t glyph_min = (*it).first;
1714       hb_codepoint_t glyph_max = + it
1715 				 | hb_map (hb_first)
1716                                  | hb_reduce (hb_max, 0u);
1717 
1718       unsigned num_ranges = 1;
1719       hb_codepoint_t prev_gid = glyph_min;
1720       unsigned prev_klass = (*it).second;
1721 
1722       for (const auto gid_klass_pair : it)
1723       {
1724         hb_codepoint_t cur_gid = gid_klass_pair.first;
1725         unsigned cur_klass = gid_klass_pair.second;
1726         if (cur_gid != prev_gid + 1 ||
1727             cur_klass != prev_klass)
1728           num_ranges++;
1729 
1730         prev_gid = cur_gid;
1731         prev_klass = cur_klass;
1732       }
1733 
1734       if (1 + (glyph_max - glyph_min + 1) < num_ranges * 3)
1735 	format = 1;
1736     }
1737     u.format = format;
1738 
1739     switch (u.format)
1740     {
1741     case 1: return_trace (u.format1.serialize (c, it));
1742     case 2: return_trace (u.format2.serialize (c, it));
1743     default:return_trace (false);
1744     }
1745   }
1746 
subsetOT::ClassDef1747   bool subset (hb_subset_context_t *c,
1748                hb_map_t *klass_map = nullptr /*OUT*/) const
1749   {
1750     TRACE_SUBSET (this);
1751     switch (u.format) {
1752     case 1: return_trace (u.format1.subset (c, klass_map));
1753     case 2: return_trace (u.format2.subset (c, klass_map));
1754     default:return_trace (false);
1755     }
1756   }
1757 
sanitizeOT::ClassDef1758   bool sanitize (hb_sanitize_context_t *c) const
1759   {
1760     TRACE_SANITIZE (this);
1761     if (!u.format.sanitize (c)) return_trace (false);
1762     switch (u.format) {
1763     case 1: return_trace (u.format1.sanitize (c));
1764     case 2: return_trace (u.format2.sanitize (c));
1765     default:return_trace (true);
1766     }
1767   }
1768 
1769   /* Might return false if array looks unsorted.
1770    * Used for faster rejection of corrupt data. */
1771   template <typename set_t>
add_coverageOT::ClassDef1772   bool add_coverage (set_t *glyphs) const
1773   {
1774     switch (u.format) {
1775     case 1: return u.format1.add_coverage (glyphs);
1776     case 2: return u.format2.add_coverage (glyphs);
1777     default:return false;
1778     }
1779   }
1780 
1781   /* Might return false if array looks unsorted.
1782    * Used for faster rejection of corrupt data. */
1783   template <typename set_t>
add_classOT::ClassDef1784   bool add_class (set_t *glyphs, unsigned int klass) const
1785   {
1786     switch (u.format) {
1787     case 1: return u.format1.add_class (glyphs, klass);
1788     case 2: return u.format2.add_class (glyphs, klass);
1789     default:return false;
1790     }
1791   }
1792 
intersectsOT::ClassDef1793   bool intersects (const hb_set_t *glyphs) const
1794   {
1795     switch (u.format) {
1796     case 1: return u.format1.intersects (glyphs);
1797     case 2: return u.format2.intersects (glyphs);
1798     default:return false;
1799     }
1800   }
intersects_classOT::ClassDef1801   bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
1802   {
1803     switch (u.format) {
1804     case 1: return u.format1.intersects_class (glyphs, klass);
1805     case 2: return u.format2.intersects_class (glyphs, klass);
1806     default:return false;
1807     }
1808   }
1809 
1810   protected:
1811   union {
1812   HBUINT16		format;		/* Format identifier */
1813   ClassDefFormat1	format1;
1814   ClassDefFormat2	format2;
1815   } u;
1816   public:
1817   DEFINE_SIZE_UNION (2, format);
1818 };
1819 
1820 template<typename Iterator>
ClassDef_serialize(hb_serialize_context_t * c,Iterator it)1821 static inline void ClassDef_serialize (hb_serialize_context_t *c,
1822                                        Iterator it)
1823 { c->start_embed<ClassDef> ()->serialize (c, it); }
1824 
1825 
1826 /*
1827  * Item Variation Store
1828  */
1829 
1830 struct VarRegionAxis
1831 {
evaluateOT::VarRegionAxis1832   float evaluate (int coord) const
1833   {
1834     int start = startCoord, peak = peakCoord, end = endCoord;
1835 
1836     /* TODO Move these to sanitize(). */
1837     if (unlikely (start > peak || peak > end))
1838       return 1.;
1839     if (unlikely (start < 0 && end > 0 && peak != 0))
1840       return 1.;
1841 
1842     if (peak == 0 || coord == peak)
1843       return 1.;
1844 
1845     if (coord <= start || end <= coord)
1846       return 0.;
1847 
1848     /* Interpolate */
1849     if (coord < peak)
1850       return float (coord - start) / (peak - start);
1851     else
1852       return float (end - coord) / (end - peak);
1853   }
1854 
sanitizeOT::VarRegionAxis1855   bool sanitize (hb_sanitize_context_t *c) const
1856   {
1857     TRACE_SANITIZE (this);
1858     return_trace (c->check_struct (this));
1859     /* TODO Handle invalid start/peak/end configs, so we don't
1860      * have to do that at runtime. */
1861   }
1862 
1863   public:
1864   F2DOT14	startCoord;
1865   F2DOT14	peakCoord;
1866   F2DOT14	endCoord;
1867   public:
1868   DEFINE_SIZE_STATIC (6);
1869 };
1870 
1871 struct VarRegionList
1872 {
evaluateOT::VarRegionList1873   float evaluate (unsigned int region_index,
1874 			 const int *coords, unsigned int coord_len) const
1875   {
1876     if (unlikely (region_index >= regionCount))
1877       return 0.;
1878 
1879     const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
1880 
1881     float v = 1.;
1882     unsigned int count = axisCount;
1883     for (unsigned int i = 0; i < count; i++)
1884     {
1885       int coord = i < coord_len ? coords[i] : 0;
1886       float factor = axes[i].evaluate (coord);
1887       if (factor == 0.f)
1888 	return 0.;
1889       v *= factor;
1890     }
1891     return v;
1892   }
1893 
sanitizeOT::VarRegionList1894   bool sanitize (hb_sanitize_context_t *c) const
1895   {
1896     TRACE_SANITIZE (this);
1897     return_trace (c->check_struct (this) &&
1898 		  axesZ.sanitize (c, (unsigned int) axisCount * (unsigned int) regionCount));
1899   }
1900 
serializeOT::VarRegionList1901   bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t &region_map)
1902   {
1903     TRACE_SERIALIZE (this);
1904     VarRegionList *out = c->allocate_min<VarRegionList> ();
1905     if (unlikely (!out)) return_trace (false);
1906     axisCount = src->axisCount;
1907     regionCount = region_map.get_population ();
1908     if (unlikely (!c->allocate_size<VarRegionList> (get_size () - min_size))) return_trace (false);
1909     for (unsigned int r = 0; r < regionCount; r++)
1910       memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * region_map.backward (r)], VarRegionAxis::static_size * axisCount);
1911 
1912     return_trace (true);
1913   }
1914 
get_sizeOT::VarRegionList1915   unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
get_region_countOT::VarRegionList1916   unsigned int get_region_count () const { return regionCount; }
1917 
1918   protected:
1919   HBUINT16	axisCount;
1920   HBUINT16	regionCount;
1921   UnsizedArrayOf<VarRegionAxis>
1922 		axesZ;
1923   public:
1924   DEFINE_SIZE_ARRAY (4, axesZ);
1925 };
1926 
1927 struct VarData
1928 {
get_region_index_countOT::VarData1929   unsigned int get_region_index_count () const
1930   { return regionIndices.len; }
1931 
get_row_sizeOT::VarData1932   unsigned int get_row_size () const
1933   { return shortCount + regionIndices.len; }
1934 
get_sizeOT::VarData1935   unsigned int get_size () const
1936   { return itemCount * get_row_size (); }
1937 
get_deltaOT::VarData1938   float get_delta (unsigned int inner,
1939 			  const int *coords, unsigned int coord_count,
1940 			  const VarRegionList &regions) const
1941   {
1942     if (unlikely (inner >= itemCount))
1943       return 0.;
1944 
1945    unsigned int count = regionIndices.len;
1946    unsigned int scount = shortCount;
1947 
1948    const HBUINT8 *bytes = get_delta_bytes ();
1949    const HBUINT8 *row = bytes + inner * (scount + count);
1950 
1951    float delta = 0.;
1952    unsigned int i = 0;
1953 
1954    const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
1955    for (; i < scount; i++)
1956    {
1957      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
1958      delta += scalar * *scursor++;
1959    }
1960    const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
1961    for (; i < count; i++)
1962    {
1963      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
1964      delta += scalar * *bcursor++;
1965    }
1966 
1967    return delta;
1968   }
1969 
get_scalarsOT::VarData1970   void get_scalars (int *coords, unsigned int coord_count,
1971 		    const VarRegionList &regions,
1972 		    float *scalars /*OUT */,
1973 		    unsigned int num_scalars) const
1974   {
1975     unsigned count = hb_min (num_scalars, regionIndices.len);
1976     for (unsigned int i = 0; i < count; i++)
1977       scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
1978     for (unsigned int i = count; i < num_scalars; i++)
1979       scalars[i] = 0.f;
1980   }
1981 
sanitizeOT::VarData1982   bool sanitize (hb_sanitize_context_t *c) const
1983   {
1984     TRACE_SANITIZE (this);
1985     return_trace (c->check_struct (this) &&
1986 		  regionIndices.sanitize (c) &&
1987 		  shortCount <= regionIndices.len &&
1988 		  c->check_range (get_delta_bytes (),
1989 				  itemCount,
1990 				  get_row_size ()));
1991   }
1992 
serializeOT::VarData1993   bool serialize (hb_serialize_context_t *c,
1994 		  const VarData *src,
1995 		  const hb_inc_bimap_t &inner_map,
1996 		  const hb_bimap_t &region_map)
1997   {
1998     TRACE_SERIALIZE (this);
1999     if (unlikely (!c->extend_min (*this))) return_trace (false);
2000     itemCount = inner_map.get_next_value ();
2001 
2002     /* Optimize short count */
2003     unsigned short ri_count = src->regionIndices.len;
2004     enum delta_size_t { kZero=0, kByte, kShort };
2005     hb_vector_t<delta_size_t> delta_sz;
2006     hb_vector_t<unsigned int> ri_map;	/* maps old index to new index */
2007     delta_sz.resize (ri_count);
2008     ri_map.resize (ri_count);
2009     unsigned int new_short_count = 0;
2010     unsigned int r;
2011     for (r = 0; r < ri_count; r++)
2012     {
2013       delta_sz[r] = kZero;
2014       for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2015       {
2016 	unsigned int old = inner_map.backward (i);
2017 	int16_t delta = src->get_item_delta (old, r);
2018 	if (delta < -128 || 127 < delta)
2019 	{
2020 	  delta_sz[r] = kShort;
2021 	  new_short_count++;
2022 	  break;
2023 	}
2024 	else if (delta != 0)
2025 	  delta_sz[r] = kByte;
2026       }
2027     }
2028     unsigned int short_index = 0;
2029     unsigned int byte_index = new_short_count;
2030     unsigned int new_ri_count = 0;
2031     for (r = 0; r < ri_count; r++)
2032       if (delta_sz[r])
2033       {
2034       	ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
2035       	new_ri_count++;
2036       }
2037 
2038     shortCount = new_short_count;
2039     regionIndices.len = new_ri_count;
2040 
2041     unsigned int size = regionIndices.get_size () - HBUINT16::static_size/*regionIndices.len*/ + (get_row_size () * itemCount);
2042     if (unlikely (!c->allocate_size<HBUINT8> (size)))
2043       return_trace (false);
2044 
2045     for (r = 0; r < ri_count; r++)
2046       if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
2047 
2048     for (unsigned int i = 0; i < itemCount; i++)
2049     {
2050       unsigned int	old = inner_map.backward (i);
2051       for (unsigned int r = 0; r < ri_count; r++)
2052 	if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
2053     }
2054 
2055     return_trace (true);
2056   }
2057 
collect_region_refsOT::VarData2058   void collect_region_refs (hb_inc_bimap_t &region_map, const hb_inc_bimap_t &inner_map) const
2059   {
2060     for (unsigned int r = 0; r < regionIndices.len; r++)
2061     {
2062       unsigned int region = regionIndices[r];
2063       if (region_map.has (region)) continue;
2064       for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2065 	if (get_item_delta (inner_map.backward (i), r) != 0)
2066 	{
2067 	  region_map.add (region);
2068 	  break;
2069 	}
2070     }
2071   }
2072 
2073   protected:
get_delta_bytesOT::VarData2074   const HBUINT8 *get_delta_bytes () const
2075   { return &StructAfter<HBUINT8> (regionIndices); }
2076 
get_delta_bytesOT::VarData2077   HBUINT8 *get_delta_bytes ()
2078   { return &StructAfter<HBUINT8> (regionIndices); }
2079 
get_item_deltaOT::VarData2080   int16_t get_item_delta (unsigned int item, unsigned int region) const
2081   {
2082     if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
2083     const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
2084     if (region < shortCount)
2085       return ((const HBINT16 *)p)[region];
2086     else
2087       return (p + HBINT16::static_size * shortCount)[region - shortCount];
2088   }
2089 
set_item_deltaOT::VarData2090   void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
2091   {
2092     HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
2093     if (region < shortCount)
2094       ((HBINT16 *)p)[region] = delta;
2095     else
2096       (p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
2097   }
2098 
2099   protected:
2100   HBUINT16		itemCount;
2101   HBUINT16		shortCount;
2102   ArrayOf<HBUINT16>	regionIndices;
2103 /*UnsizedArrayOf<HBUINT8>bytesX;*/
2104   public:
2105   DEFINE_SIZE_ARRAY (6, regionIndices);
2106 };
2107 
2108 struct VariationStore
2109 {
get_deltaOT::VariationStore2110   float get_delta (unsigned int outer, unsigned int inner,
2111 		   const int *coords, unsigned int coord_count) const
2112   {
2113 #ifdef HB_NO_VAR
2114     return 0.f;
2115 #endif
2116 
2117     if (unlikely (outer >= dataSets.len))
2118       return 0.f;
2119 
2120     return (this+dataSets[outer]).get_delta (inner,
2121 					     coords, coord_count,
2122 					     this+regions);
2123   }
2124 
get_deltaOT::VariationStore2125   float get_delta (unsigned int index,
2126 		   const int *coords, unsigned int coord_count) const
2127   {
2128     unsigned int outer = index >> 16;
2129     unsigned int inner = index & 0xFFFF;
2130     return get_delta (outer, inner, coords, coord_count);
2131   }
2132 
sanitizeOT::VariationStore2133   bool sanitize (hb_sanitize_context_t *c) const
2134   {
2135 #ifdef HB_NO_VAR
2136     return true;
2137 #endif
2138 
2139     TRACE_SANITIZE (this);
2140     return_trace (c->check_struct (this) &&
2141 		  format == 1 &&
2142 		  regions.sanitize (c, this) &&
2143 		  dataSets.sanitize (c, this));
2144   }
2145 
serializeOT::VariationStore2146   bool serialize (hb_serialize_context_t *c,
2147 		  const VariationStore *src,
2148   		  const hb_array_t <hb_inc_bimap_t> &inner_maps)
2149   {
2150     TRACE_SERIALIZE (this);
2151     unsigned int set_count = 0;
2152     for (unsigned int i = 0; i < inner_maps.length; i++)
2153       if (inner_maps[i].get_population () > 0) set_count++;
2154 
2155     unsigned int size = min_size + HBUINT32::static_size * set_count;
2156     if (unlikely (!c->allocate_size<HBUINT32> (size))) return_trace (false);
2157     format = 1;
2158 
2159     hb_inc_bimap_t region_map;
2160     for (unsigned int i = 0; i < inner_maps.length; i++)
2161       (src+src->dataSets[i]).collect_region_refs (region_map, inner_maps[i]);
2162     region_map.sort ();
2163 
2164     if (unlikely (!regions.serialize (c, this)
2165 		  .serialize (c, &(src+src->regions), region_map))) return_trace (false);
2166 
2167     /* TODO: The following code could be simplified when
2168      * OffsetListOf::subset () can take a custom param to be passed to VarData::serialize ()
2169      */
2170     dataSets.len = set_count;
2171     unsigned int set_index = 0;
2172     for (unsigned int i = 0; i < inner_maps.length; i++)
2173     {
2174       if (inner_maps[i].get_population () == 0) continue;
2175       if (unlikely (!dataSets[set_index++].serialize (c, this)
2176 		      .serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
2177 	return_trace (false);
2178     }
2179 
2180     return_trace (true);
2181   }
2182 
get_region_index_countOT::VariationStore2183   unsigned int get_region_index_count (unsigned int ivs) const
2184   { return (this+dataSets[ivs]).get_region_index_count (); }
2185 
get_scalarsOT::VariationStore2186   void get_scalars (unsigned int ivs,
2187 		    int *coords, unsigned int coord_count,
2188 		    float *scalars /*OUT*/,
2189 		    unsigned int num_scalars) const
2190   {
2191 #ifdef HB_NO_VAR
2192     for (unsigned i = 0; i < num_scalars; i++)
2193       scalars[i] = 0.f;
2194     return;
2195 #endif
2196 
2197     (this+dataSets[ivs]).get_scalars (coords, coord_count, this+regions,
2198 				      &scalars[0], num_scalars);
2199   }
2200 
get_sub_table_countOT::VariationStore2201   unsigned int get_sub_table_count () const { return dataSets.len; }
2202 
2203   protected:
2204   HBUINT16				format;
2205   LOffsetTo<VarRegionList>		regions;
2206   LOffsetArrayOf<VarData>		dataSets;
2207   public:
2208   DEFINE_SIZE_ARRAY (8, dataSets);
2209 };
2210 
2211 /*
2212  * Feature Variations
2213  */
2214 
2215 struct ConditionFormat1
2216 {
2217   friend struct Condition;
2218 
2219   private:
evaluateOT::ConditionFormat12220   bool evaluate (const int *coords, unsigned int coord_len) const
2221   {
2222     int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
2223     return filterRangeMinValue <= coord && coord <= filterRangeMaxValue;
2224   }
2225 
sanitizeOT::ConditionFormat12226   bool sanitize (hb_sanitize_context_t *c) const
2227   {
2228     TRACE_SANITIZE (this);
2229     return_trace (c->check_struct (this));
2230   }
2231 
2232   protected:
2233   HBUINT16	format;		/* Format identifier--format = 1 */
2234   HBUINT16	axisIndex;
2235   F2DOT14	filterRangeMinValue;
2236   F2DOT14	filterRangeMaxValue;
2237   public:
2238   DEFINE_SIZE_STATIC (8);
2239 };
2240 
2241 struct Condition
2242 {
evaluateOT::Condition2243   bool evaluate (const int *coords, unsigned int coord_len) const
2244   {
2245     switch (u.format) {
2246     case 1: return u.format1.evaluate (coords, coord_len);
2247     default:return false;
2248     }
2249   }
2250 
sanitizeOT::Condition2251   bool sanitize (hb_sanitize_context_t *c) const
2252   {
2253     TRACE_SANITIZE (this);
2254     if (!u.format.sanitize (c)) return_trace (false);
2255     switch (u.format) {
2256     case 1: return_trace (u.format1.sanitize (c));
2257     default:return_trace (true);
2258     }
2259   }
2260 
2261   protected:
2262   union {
2263   HBUINT16		format;		/* Format identifier */
2264   ConditionFormat1	format1;
2265   } u;
2266   public:
2267   DEFINE_SIZE_UNION (2, format);
2268 };
2269 
2270 struct ConditionSet
2271 {
evaluateOT::ConditionSet2272   bool evaluate (const int *coords, unsigned int coord_len) const
2273   {
2274     unsigned int count = conditions.len;
2275     for (unsigned int i = 0; i < count; i++)
2276       if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
2277 	return false;
2278     return true;
2279   }
2280 
sanitizeOT::ConditionSet2281   bool sanitize (hb_sanitize_context_t *c) const
2282   {
2283     TRACE_SANITIZE (this);
2284     return_trace (conditions.sanitize (c, this));
2285   }
2286 
2287   protected:
2288   LOffsetArrayOf<Condition>	conditions;
2289   public:
2290   DEFINE_SIZE_ARRAY (2, conditions);
2291 };
2292 
2293 struct FeatureTableSubstitutionRecord
2294 {
2295   friend struct FeatureTableSubstitution;
2296 
sanitizeOT::FeatureTableSubstitutionRecord2297   bool sanitize (hb_sanitize_context_t *c, const void *base) const
2298   {
2299     TRACE_SANITIZE (this);
2300     return_trace (c->check_struct (this) && feature.sanitize (c, base));
2301   }
2302 
2303   protected:
2304   HBUINT16		featureIndex;
2305   LOffsetTo<Feature>	feature;
2306   public:
2307   DEFINE_SIZE_STATIC (6);
2308 };
2309 
2310 struct FeatureTableSubstitution
2311 {
find_substituteOT::FeatureTableSubstitution2312   const Feature *find_substitute (unsigned int feature_index) const
2313   {
2314     unsigned int count = substitutions.len;
2315     for (unsigned int i = 0; i < count; i++)
2316     {
2317       const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
2318       if (record.featureIndex == feature_index)
2319 	return &(this+record.feature);
2320     }
2321     return nullptr;
2322   }
2323 
sanitizeOT::FeatureTableSubstitution2324   bool sanitize (hb_sanitize_context_t *c) const
2325   {
2326     TRACE_SANITIZE (this);
2327     return_trace (version.sanitize (c) &&
2328 		  likely (version.major == 1) &&
2329 		  substitutions.sanitize (c, this));
2330   }
2331 
2332   protected:
2333   FixedVersion<>	version;	/* Version--0x00010000u */
2334   ArrayOf<FeatureTableSubstitutionRecord>
2335 			substitutions;
2336   public:
2337   DEFINE_SIZE_ARRAY (6, substitutions);
2338 };
2339 
2340 struct FeatureVariationRecord
2341 {
2342   friend struct FeatureVariations;
2343 
sanitizeOT::FeatureVariationRecord2344   bool sanitize (hb_sanitize_context_t *c, const void *base) const
2345   {
2346     TRACE_SANITIZE (this);
2347     return_trace (conditions.sanitize (c, base) &&
2348 		  substitutions.sanitize (c, base));
2349   }
2350 
2351   protected:
2352   LOffsetTo<ConditionSet>
2353 			conditions;
2354   LOffsetTo<FeatureTableSubstitution>
2355 			substitutions;
2356   public:
2357   DEFINE_SIZE_STATIC (8);
2358 };
2359 
2360 struct FeatureVariations
2361 {
2362   static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
2363 
find_indexOT::FeatureVariations2364   bool find_index (const int *coords, unsigned int coord_len,
2365 			  unsigned int *index) const
2366   {
2367     unsigned int count = varRecords.len;
2368     for (unsigned int i = 0; i < count; i++)
2369     {
2370       const FeatureVariationRecord &record = varRecords.arrayZ[i];
2371       if ((this+record.conditions).evaluate (coords, coord_len))
2372       {
2373 	*index = i;
2374 	return true;
2375       }
2376     }
2377     *index = NOT_FOUND_INDEX;
2378     return false;
2379   }
2380 
find_substituteOT::FeatureVariations2381   const Feature *find_substitute (unsigned int variations_index,
2382 				  unsigned int feature_index) const
2383   {
2384     const FeatureVariationRecord &record = varRecords[variations_index];
2385     return (this+record.substitutions).find_substitute (feature_index);
2386   }
2387 
copyOT::FeatureVariations2388   FeatureVariations* copy (hb_serialize_context_t *c) const
2389   {
2390     TRACE_SERIALIZE (this);
2391     return_trace (c->embed (*this));
2392   }
2393 
sanitizeOT::FeatureVariations2394   bool sanitize (hb_sanitize_context_t *c) const
2395   {
2396     TRACE_SANITIZE (this);
2397     return_trace (version.sanitize (c) &&
2398 		  likely (version.major == 1) &&
2399 		  varRecords.sanitize (c, this));
2400   }
2401 
2402   protected:
2403   FixedVersion<>	version;	/* Version--0x00010000u */
2404   LArrayOf<FeatureVariationRecord>
2405 			varRecords;
2406   public:
2407   DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
2408 };
2409 
2410 
2411 /*
2412  * Device Tables
2413  */
2414 
2415 struct HintingDevice
2416 {
2417   friend struct Device;
2418 
2419   private:
2420 
get_x_deltaOT::HintingDevice2421   hb_position_t get_x_delta (hb_font_t *font) const
2422   { return get_delta (font->x_ppem, font->x_scale); }
2423 
get_y_deltaOT::HintingDevice2424   hb_position_t get_y_delta (hb_font_t *font) const
2425   { return get_delta (font->y_ppem, font->y_scale); }
2426 
2427   public:
2428 
get_sizeOT::HintingDevice2429   unsigned int get_size () const
2430   {
2431     unsigned int f = deltaFormat;
2432     if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
2433     return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
2434   }
2435 
sanitizeOT::HintingDevice2436   bool sanitize (hb_sanitize_context_t *c) const
2437   {
2438     TRACE_SANITIZE (this);
2439     return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
2440   }
2441 
copyOT::HintingDevice2442   HintingDevice* copy (hb_serialize_context_t *c) const
2443   {
2444     TRACE_SERIALIZE (this);
2445     return_trace (c->embed<HintingDevice> (this));
2446   }
2447 
2448   private:
2449 
get_deltaOT::HintingDevice2450   int get_delta (unsigned int ppem, int scale) const
2451   {
2452     if (!ppem) return 0;
2453 
2454     int pixels = get_delta_pixels (ppem);
2455 
2456     if (!pixels) return 0;
2457 
2458     return (int) (pixels * (int64_t) scale / ppem);
2459   }
get_delta_pixelsOT::HintingDevice2460   int get_delta_pixels (unsigned int ppem_size) const
2461   {
2462     unsigned int f = deltaFormat;
2463     if (unlikely (f < 1 || f > 3))
2464       return 0;
2465 
2466     if (ppem_size < startSize || ppem_size > endSize)
2467       return 0;
2468 
2469     unsigned int s = ppem_size - startSize;
2470 
2471     unsigned int byte = deltaValueZ[s >> (4 - f)];
2472     unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
2473     unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
2474 
2475     int delta = bits & mask;
2476 
2477     if ((unsigned int) delta >= ((mask + 1) >> 1))
2478       delta -= mask + 1;
2479 
2480     return delta;
2481   }
2482 
2483   protected:
2484   HBUINT16	startSize;		/* Smallest size to correct--in ppem */
2485   HBUINT16	endSize;		/* Largest size to correct--in ppem */
2486   HBUINT16	deltaFormat;		/* Format of DeltaValue array data: 1, 2, or 3
2487 					 * 1	Signed 2-bit value, 8 values per uint16
2488 					 * 2	Signed 4-bit value, 4 values per uint16
2489 					 * 3	Signed 8-bit value, 2 values per uint16
2490 					 */
2491   UnsizedArrayOf<HBUINT16>
2492 		deltaValueZ;		/* Array of compressed data */
2493   public:
2494   DEFINE_SIZE_ARRAY (6, deltaValueZ);
2495 };
2496 
2497 struct VariationDevice
2498 {
2499   friend struct Device;
2500 
2501   private:
2502 
get_x_deltaOT::VariationDevice2503   hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
2504   { return font->em_scalef_x (get_delta (font, store)); }
2505 
get_y_deltaOT::VariationDevice2506   hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
2507   { return font->em_scalef_y (get_delta (font, store)); }
2508 
copyOT::VariationDevice2509   VariationDevice* copy (hb_serialize_context_t *c) const
2510   {
2511     TRACE_SERIALIZE (this);
2512     return_trace (c->embed<VariationDevice> (this));
2513   }
2514 
sanitizeOT::VariationDevice2515   bool sanitize (hb_sanitize_context_t *c) const
2516   {
2517     TRACE_SANITIZE (this);
2518     return_trace (c->check_struct (this));
2519   }
2520 
2521   private:
2522 
get_deltaOT::VariationDevice2523   float get_delta (hb_font_t *font, const VariationStore &store) const
2524   {
2525     return store.get_delta (outerIndex, innerIndex, font->coords, font->num_coords);
2526   }
2527 
2528   protected:
2529   HBUINT16	outerIndex;
2530   HBUINT16	innerIndex;
2531   HBUINT16	deltaFormat;	/* Format identifier for this table: 0x0x8000 */
2532   public:
2533   DEFINE_SIZE_STATIC (6);
2534 };
2535 
2536 struct DeviceHeader
2537 {
2538   protected:
2539   HBUINT16		reserved1;
2540   HBUINT16		reserved2;
2541   public:
2542   HBUINT16		format;		/* Format identifier */
2543   public:
2544   DEFINE_SIZE_STATIC (6);
2545 };
2546 
2547 struct Device
2548 {
get_x_deltaOT::Device2549   hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
2550   {
2551     switch (u.b.format)
2552     {
2553 #ifndef HB_NO_HINTING
2554     case 1: case 2: case 3:
2555       return u.hinting.get_x_delta (font);
2556 #endif
2557 #ifndef HB_NO_VAR
2558     case 0x8000:
2559       return u.variation.get_x_delta (font, store);
2560 #endif
2561     default:
2562       return 0;
2563     }
2564   }
get_y_deltaOT::Device2565   hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
2566   {
2567     switch (u.b.format)
2568     {
2569     case 1: case 2: case 3:
2570 #ifndef HB_NO_HINTING
2571       return u.hinting.get_y_delta (font);
2572 #endif
2573 #ifndef HB_NO_VAR
2574     case 0x8000:
2575       return u.variation.get_y_delta (font, store);
2576 #endif
2577     default:
2578       return 0;
2579     }
2580   }
2581 
sanitizeOT::Device2582   bool sanitize (hb_sanitize_context_t *c) const
2583   {
2584     TRACE_SANITIZE (this);
2585     if (!u.b.format.sanitize (c)) return_trace (false);
2586     switch (u.b.format) {
2587 #ifndef HB_NO_HINTING
2588     case 1: case 2: case 3:
2589       return_trace (u.hinting.sanitize (c));
2590 #endif
2591 #ifndef HB_NO_VAR
2592     case 0x8000:
2593       return_trace (u.variation.sanitize (c));
2594 #endif
2595     default:
2596       return_trace (true);
2597     }
2598   }
2599 
copyOT::Device2600   Device* copy (hb_serialize_context_t *c) const
2601   {
2602     TRACE_SERIALIZE (this);
2603     switch (u.b.format) {
2604 #ifndef HB_NO_HINTING
2605     case 1:
2606     case 2:
2607     case 3:
2608       return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
2609 #endif
2610 #ifndef HB_NO_VAR
2611     case 0x8000:
2612       return_trace (reinterpret_cast<Device *> (u.variation.copy (c)));
2613 #endif
2614     default:
2615       return_trace (nullptr);
2616     }
2617   }
2618 
2619   protected:
2620   union {
2621   DeviceHeader		b;
2622   HintingDevice		hinting;
2623 #ifndef HB_NO_VAR
2624   VariationDevice	variation;
2625 #endif
2626   } u;
2627   public:
2628   DEFINE_SIZE_UNION (6, b);
2629 };
2630 
2631 
2632 } /* namespace OT */
2633 
2634 
2635 #endif /* HB_OT_LAYOUT_COMMON_HH */
2636