• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2007,2008,2009  Red Hat, Inc.
3  * Copyright © 2010,2012  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
31 
32 #include "hb.hh"
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
35 #include "hb-set.hh"
36 #include "hb-bimap.hh"
37 
38 
39 #ifndef HB_MAX_NESTING_LEVEL
40 #define HB_MAX_NESTING_LEVEL	6
41 #endif
42 #ifndef HB_MAX_CONTEXT_LENGTH
43 #define HB_MAX_CONTEXT_LENGTH	64
44 #endif
45 #ifndef HB_CLOSURE_MAX_STAGES
46 /*
47  * The maximum number of times a lookup can be applied during shaping.
48  * Used to limit the number of iterations of the closure algorithm.
49  * This must be larger than the number of times add_pause() is
50  * called in a collect_features call of any shaper.
51  */
52 #define HB_CLOSURE_MAX_STAGES	32
53 #endif
54 
55 #ifndef HB_MAX_SCRIPTS
56 #define HB_MAX_SCRIPTS	500
57 #endif
58 
59 #ifndef HB_MAX_LANGSYS
60 #define HB_MAX_LANGSYS	2000
61 #endif
62 
63 #ifndef HB_MAX_FEATURES
64 #define HB_MAX_FEATURES 750
65 #endif
66 
67 #ifndef HB_MAX_FEATURE_INDICES
68 #define HB_MAX_FEATURE_INDICES	1500
69 #endif
70 
71 #ifndef HB_MAX_LOOKUP_INDICES
72 #define HB_MAX_LOOKUP_INDICES	20000
73 #endif
74 
75 
76 namespace OT {
77 
78 
79 #define NOT_COVERED		((unsigned int) -1)
80 
81 
82 template<typename Iterator>
83 static inline void Coverage_serialize (hb_serialize_context_t *c,
84 				       Iterator it);
85 
86 template<typename Iterator>
87 static inline void ClassDef_serialize (hb_serialize_context_t *c,
88 				       Iterator it);
89 
90 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
91 					  const hb_map_t &gid_klass_map,
92 					  hb_sorted_vector_t<HBGlyphID> &glyphs,
93 					  const hb_set_t &klasses,
94 					  bool use_class_zero,
95 					  hb_map_t *klass_map /*INOUT*/);
96 
97 
98 struct hb_prune_langsys_context_t
99 {
hb_prune_langsys_context_tOT::hb_prune_langsys_context_t100   hb_prune_langsys_context_t (const void         *table_,
101                               hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map_,
102                               const hb_map_t     *duplicate_feature_map_,
103                               hb_set_t           *new_collected_feature_indexes_)
104       :table (table_),
105       script_langsys_map (script_langsys_map_),
106       duplicate_feature_map (duplicate_feature_map_),
107       new_feature_indexes (new_collected_feature_indexes_),
108       script_count (0),langsys_count (0) {}
109 
visitedScriptOT::hb_prune_langsys_context_t110   bool visitedScript (const void *s)
111   {
112     if (script_count++ > HB_MAX_SCRIPTS)
113       return true;
114 
115     return visited (s, visited_script);
116   }
117 
visitedLangsysOT::hb_prune_langsys_context_t118   bool visitedLangsys (const void *l)
119   {
120     if (langsys_count++ > HB_MAX_LANGSYS)
121       return true;
122 
123     return visited (l, visited_langsys);
124   }
125 
126   private:
127   template <typename T>
visitedOT::hb_prune_langsys_context_t128   bool visited (const T *p, hb_set_t &visited_set)
129   {
130     hb_codepoint_t delta = (hb_codepoint_t) ((uintptr_t) p - (uintptr_t) table);
131      if (visited_set.has (delta))
132       return true;
133 
134     visited_set.add (delta);
135     return false;
136   }
137 
138   public:
139   const void *table;
140   hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map;
141   const hb_map_t     *duplicate_feature_map;
142   hb_set_t           *new_feature_indexes;
143 
144   private:
145   hb_set_t visited_script;
146   hb_set_t visited_langsys;
147   unsigned script_count;
148   unsigned langsys_count;
149 };
150 
151 struct hb_subset_layout_context_t :
152   hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
153 {
get_nameOT::hb_subset_layout_context_t154   const char *get_name () { return "SUBSET_LAYOUT"; }
default_return_valueOT::hb_subset_layout_context_t155   static return_t default_return_value () { return hb_empty_t (); }
156 
visitScriptOT::hb_subset_layout_context_t157   bool visitScript ()
158   {
159     return script_count++ < HB_MAX_SCRIPTS;
160   }
161 
visitLangSysOT::hb_subset_layout_context_t162   bool visitLangSys ()
163   {
164     return langsys_count++ < HB_MAX_LANGSYS;
165   }
166 
visitFeatureIndexOT::hb_subset_layout_context_t167   bool visitFeatureIndex (int count)
168   {
169     feature_index_count += count;
170     return feature_index_count < HB_MAX_FEATURE_INDICES;
171   }
172 
visitLookupIndexOT::hb_subset_layout_context_t173   bool visitLookupIndex()
174   {
175     lookup_index_count++;
176     return lookup_index_count < HB_MAX_LOOKUP_INDICES;
177   }
178 
179   hb_subset_context_t *subset_context;
180   const hb_tag_t table_tag;
181   const hb_map_t *lookup_index_map;
182   const hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map;
183   const hb_map_t *feature_index_map;
184   unsigned cur_script_index;
185 
hb_subset_layout_context_tOT::hb_subset_layout_context_t186   hb_subset_layout_context_t (hb_subset_context_t *c_,
187 			      hb_tag_t tag_,
188 			      hb_map_t *lookup_map_,
189 			      hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map_,
190 			      hb_map_t *feature_index_map_) :
191 				subset_context (c_),
192 				table_tag (tag_),
193 				lookup_index_map (lookup_map_),
194 				script_langsys_map (script_langsys_map_),
195 				feature_index_map (feature_index_map_),
196 				cur_script_index (0xFFFFu),
197 				script_count (0),
198 				langsys_count (0),
199 				feature_index_count (0),
200 				lookup_index_count (0)
201   {}
202 
203   private:
204   unsigned script_count;
205   unsigned langsys_count;
206   unsigned feature_index_count;
207   unsigned lookup_index_count;
208 };
209 
210 struct hb_collect_variation_indices_context_t :
211        hb_dispatch_context_t<hb_collect_variation_indices_context_t>
212 {
213   template <typename T>
dispatchOT::hb_collect_variation_indices_context_t214   return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_variation_indices_context_t215   static return_t default_return_value () { return hb_empty_t (); }
216 
217   hb_set_t *layout_variation_indices;
218   const hb_set_t *glyph_set;
219   const hb_map_t *gpos_lookups;
220 
hb_collect_variation_indices_context_tOT::hb_collect_variation_indices_context_t221   hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
222 					  const hb_set_t *glyph_set_,
223 					  const hb_map_t *gpos_lookups_) :
224 					layout_variation_indices (layout_variation_indices_),
225 					glyph_set (glyph_set_),
226 					gpos_lookups (gpos_lookups_) {}
227 };
228 
229 template<typename OutputArray>
230 struct subset_offset_array_t
231 {
subset_offset_array_tOT::subset_offset_array_t232   subset_offset_array_t (hb_subset_context_t *subset_context_,
233 			 OutputArray& out_,
234 			 const void *base_) : subset_context (subset_context_),
235 					      out (out_), base (base_) {}
236 
237   template <typename T>
operator ()OT::subset_offset_array_t238   bool operator () (T&& offset)
239   {
240     auto *o = out.serialize_append (subset_context->serializer);
241     if (unlikely (!o)) return false;
242     auto snap = subset_context->serializer->snapshot ();
243     bool ret = o->serialize_subset (subset_context, offset, base);
244     if (!ret)
245     {
246       out.pop ();
247       subset_context->serializer->revert (snap);
248     }
249     return ret;
250   }
251 
252   private:
253   hb_subset_context_t *subset_context;
254   OutputArray &out;
255   const void *base;
256 };
257 
258 
259 template<typename OutputArray, typename Arg>
260 struct subset_offset_array_arg_t
261 {
subset_offset_array_arg_tOT::subset_offset_array_arg_t262   subset_offset_array_arg_t (hb_subset_context_t *subset_context_,
263 			     OutputArray& out_,
264 			     const void *base_,
265 			     Arg &&arg_) : subset_context (subset_context_), out (out_),
266 					  base (base_), arg (arg_) {}
267 
268   template <typename T>
operator ()OT::subset_offset_array_arg_t269   bool operator () (T&& offset)
270   {
271     auto *o = out.serialize_append (subset_context->serializer);
272     if (unlikely (!o)) return false;
273     auto snap = subset_context->serializer->snapshot ();
274     bool ret = o->serialize_subset (subset_context, offset, base, arg);
275     if (!ret)
276     {
277       out.pop ();
278       subset_context->serializer->revert (snap);
279     }
280     return ret;
281   }
282 
283   private:
284   hb_subset_context_t *subset_context;
285   OutputArray &out;
286   const void *base;
287   Arg &&arg;
288 };
289 
290 /*
291  * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
292  * and discards the offset in the array if the subset operation results in an empty
293  * thing.
294  */
295 struct
296 {
297   template<typename OutputArray>
298   subset_offset_array_t<OutputArray>
operator ()OT::__anond47ddfed0108299   operator () (hb_subset_context_t *subset_context, OutputArray& out,
300 	       const void *base) const
301   { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
302 
303   /* Variant with one extra argument passed to serialize_subset */
304   template<typename OutputArray, typename Arg>
305   subset_offset_array_arg_t<OutputArray, Arg>
operator ()OT::__anond47ddfed0108306   operator () (hb_subset_context_t *subset_context, OutputArray& out,
307 	       const void *base, Arg &&arg) const
308   { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
309 }
310 HB_FUNCOBJ (subset_offset_array);
311 
312 template<typename OutputArray>
313 struct subset_record_array_t
314 {
subset_record_array_tOT::subset_record_array_t315   subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_,
316 			 const void *base_) : subset_layout_context (c_),
317 					      out (out_), base (base_) {}
318 
319   template <typename T>
320   void
operator ()OT::subset_record_array_t321   operator () (T&& record)
322   {
323     auto snap = subset_layout_context->subset_context->serializer->snapshot ();
324     bool ret = record.subset (subset_layout_context, base);
325     if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
326     else out->len++;
327   }
328 
329   private:
330   hb_subset_layout_context_t *subset_layout_context;
331   OutputArray *out;
332   const void *base;
333 };
334 
335 /*
336  * Helper to subset a RecordList/record array. Subsets each Record in the array and
337  * discards the record if the subset operation returns false.
338  */
339 struct
340 {
341   template<typename OutputArray>
342   subset_record_array_t<OutputArray>
operator ()OT::__anond47ddfed0208343   operator () (hb_subset_layout_context_t *c, OutputArray* out,
344 	       const void *base) const
345   { return subset_record_array_t<OutputArray> (c, out, base); }
346 }
347 HB_FUNCOBJ (subset_record_array);
348 
349 /*
350  *
351  * OpenType Layout Common Table Formats
352  *
353  */
354 
355 
356 /*
357  * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
358  */
359 
360 struct Record_sanitize_closure_t {
361   hb_tag_t tag;
362   const void *list_base;
363 };
364 
365 template <typename Type>
366 struct Record
367 {
cmpOT::Record368   int cmp (hb_tag_t a) const { return tag.cmp (a); }
369 
subsetOT::Record370   bool subset (hb_subset_layout_context_t *c, const void *base) const
371   {
372     TRACE_SUBSET (this);
373     auto *out = c->subset_context->serializer->embed (this);
374     if (unlikely (!out)) return_trace (false);
375     bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag);
376     return_trace (ret);
377   }
378 
sanitizeOT::Record379   bool sanitize (hb_sanitize_context_t *c, const void *base) const
380   {
381     TRACE_SANITIZE (this);
382     const Record_sanitize_closure_t closure = {tag, base};
383     return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
384   }
385 
386   Tag		tag;		/* 4-byte Tag identifier */
387   Offset16To<Type>
388 		offset;		/* Offset from beginning of object holding
389 				 * the Record */
390   public:
391   DEFINE_SIZE_STATIC (6);
392 };
393 
394 template <typename Type>
395 struct RecordArrayOf : SortedArray16Of<Record<Type>>
396 {
get_offsetOT::RecordArrayOf397   const Offset16To<Type>& get_offset (unsigned int i) const
398   { return (*this)[i].offset; }
get_offsetOT::RecordArrayOf399   Offset16To<Type>& get_offset (unsigned int i)
400   { return (*this)[i].offset; }
get_tagOT::RecordArrayOf401   const Tag& get_tag (unsigned int i) const
402   { return (*this)[i].tag; }
get_tagsOT::RecordArrayOf403   unsigned int get_tags (unsigned int start_offset,
404 			 unsigned int *record_count /* IN/OUT */,
405 			 hb_tag_t     *record_tags /* OUT */) const
406   {
407     if (record_count)
408     {
409       + this->sub_array (start_offset, record_count)
410       | hb_map (&Record<Type>::tag)
411       | hb_sink (hb_array (record_tags, *record_count))
412       ;
413     }
414     return this->len;
415   }
find_indexOT::RecordArrayOf416   bool find_index (hb_tag_t tag, unsigned int *index) const
417   {
418     return this->bfind (tag, index, HB_BFIND_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
419   }
420 };
421 
422 template <typename Type>
423 struct RecordListOf : RecordArrayOf<Type>
424 {
operator []OT::RecordListOf425   const Type& operator [] (unsigned int i) const
426   { return this+this->get_offset (i); }
427 
subsetOT::RecordListOf428   bool subset (hb_subset_context_t *c,
429 	       hb_subset_layout_context_t *l) const
430   {
431     TRACE_SUBSET (this);
432     auto *out = c->serializer->start_embed (*this);
433     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
434 
435     + this->iter ()
436     | hb_apply (subset_record_array (l, out, this))
437     ;
438     return_trace (true);
439   }
440 
sanitizeOT::RecordListOf441   bool sanitize (hb_sanitize_context_t *c) const
442   {
443     TRACE_SANITIZE (this);
444     return_trace (RecordArrayOf<Type>::sanitize (c, this));
445   }
446 };
447 
448 #ifndef ENABLE_ICCARM  // Adaptive compilation, only changes the order of the code, does not affect the function
449 struct Feature;
450 
451 struct RecordListOfFeature : RecordListOf<Feature>
452 {
subsetOT::RecordListOfFeature453   bool subset (hb_subset_context_t *c,
454 	       hb_subset_layout_context_t *l) const
455   {
456     TRACE_SUBSET (this);
457     auto *out = c->serializer->start_embed (*this);
458     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
459 
460     unsigned count = this->len;
461     + hb_zip (*this, hb_range (count))
462     | hb_filter (l->feature_index_map, hb_second)
463     | hb_map (hb_first)
464     | hb_apply (subset_record_array (l, out, this))
465     ;
466     return_trace (true);
467   }
468 };
469 
470 struct Script;
471 struct RecordListOfScript : RecordListOf<Script>
472 {
subsetOT::RecordListOfScript473   bool subset (hb_subset_context_t *c,
474                hb_subset_layout_context_t *l) const
475   {
476     TRACE_SUBSET (this);
477     auto *out = c->serializer->start_embed (*this);
478     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
479 
480     unsigned count = this->len;
481     for (auto _ : + hb_zip (*this, hb_range (count)))
482     {
483       auto snap = c->serializer->snapshot ();
484       l->cur_script_index = _.second;
485       bool ret = _.first.subset (l, this);
486       if (!ret) c->serializer->revert (snap);
487       else out->len++;
488     }
489 
490     return_trace (true);
491   }
492 };
493 
494 struct RangeRecord
495 {
cmpOT::RangeRecord496   int cmp (hb_codepoint_t g) const
497   { return g < first ? -1 : g <= last ? 0 : +1; }
498 
sanitizeOT::RangeRecord499   bool sanitize (hb_sanitize_context_t *c) const
500   {
501     TRACE_SANITIZE (this);
502     return_trace (c->check_struct (this));
503   }
504 
intersectsOT::RangeRecord505   bool intersects (const hb_set_t *glyphs) const
506   { return glyphs->intersects (first, last); }
507 
508   template <typename set_t>
collect_coverageOT::RangeRecord509   bool collect_coverage (set_t *glyphs) const
510   { return glyphs->add_range (first, last); }
511 
512   HBGlyphID	first;		/* First GlyphID in the range */
513   HBGlyphID	last;		/* Last GlyphID in the range */
514   HBUINT16	value;		/* Value */
515   public:
516   DEFINE_SIZE_STATIC (6);
517 };
518 DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
519 
520 
521 struct IndexArray : Array16Of<Index>
522 {
intersectsOT::IndexArray523   bool intersects (const hb_map_t *indexes) const
524   { return hb_any (*this, indexes); }
525 
526   template <typename Iterator,
527 	    hb_requires (hb_is_iterator (Iterator))>
serializeOT::IndexArray528   void serialize (hb_serialize_context_t *c,
529 		  hb_subset_layout_context_t *l,
530 		  Iterator it)
531   {
532     if (!it) return;
533     if (unlikely (!c->extend_min ((*this)))) return;
534 
535     for (const auto _ : it)
536     {
537       if (!l->visitLookupIndex()) break;
538 
539       Index i;
540       i = _;
541       c->copy (i);
542       this->len++;
543     }
544   }
545 
get_indexesOT::IndexArray546   unsigned int get_indexes (unsigned int start_offset,
547 			    unsigned int *_count /* IN/OUT */,
548 			    unsigned int *_indexes /* OUT */) const
549   {
550     if (_count)
551     {
552       + this->sub_array (start_offset, _count)
553       | hb_sink (hb_array (_indexes, *_count))
554       ;
555     }
556     return this->len;
557   }
558 
add_indexes_toOT::IndexArray559   void add_indexes_to (hb_set_t* output /* OUT */) const
560   {
561     output->add_array (as_array ());
562   }
563 };
564 
565 
566 struct LangSys
567 {
get_feature_countOT::LangSys568   unsigned int get_feature_count () const
569   { return featureIndex.len; }
get_feature_indexOT::LangSys570   hb_tag_t get_feature_index (unsigned int i) const
571   { return featureIndex[i]; }
get_feature_indexesOT::LangSys572   unsigned int get_feature_indexes (unsigned int start_offset,
573 				    unsigned int *feature_count /* IN/OUT */,
574 				    unsigned int *feature_indexes /* OUT */) const
575   { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
add_feature_indexes_toOT::LangSys576   void add_feature_indexes_to (hb_set_t *feature_indexes) const
577   { featureIndex.add_indexes_to (feature_indexes); }
578 
has_required_featureOT::LangSys579   bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
get_required_feature_indexOT::LangSys580   unsigned int get_required_feature_index () const
581   {
582     if (reqFeatureIndex == 0xFFFFu)
583       return Index::NOT_FOUND_INDEX;
584    return reqFeatureIndex;
585   }
586 
copyOT::LangSys587   LangSys* copy (hb_serialize_context_t *c) const
588   {
589     TRACE_SERIALIZE (this);
590     return_trace (c->embed (*this));
591   }
592 
compareOT::LangSys593   bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
594   {
595     if (reqFeatureIndex != o.reqFeatureIndex)
596       return false;
597 
598     auto iter =
599     + hb_iter (featureIndex)
600     | hb_filter (feature_index_map)
601     | hb_map (feature_index_map)
602     ;
603 
604     auto o_iter =
605     + hb_iter (o.featureIndex)
606     | hb_filter (feature_index_map)
607     | hb_map (feature_index_map)
608     ;
609 
610     if (iter.len () != o_iter.len ())
611       return false;
612 
613     for (const auto _ : + hb_zip (iter, o_iter))
614       if (_.first != _.second) return false;
615 
616     return true;
617   }
618 
collect_featuresOT::LangSys619   void collect_features (hb_prune_langsys_context_t *c) const
620   {
621     if (!has_required_feature () && !get_feature_count ()) return;
622     if (c->visitedLangsys (this)) return;
623     if (has_required_feature () &&
624         c->duplicate_feature_map->has (reqFeatureIndex))
625       c->new_feature_indexes->add (get_required_feature_index ());
626 
627     + hb_iter (featureIndex)
628     | hb_filter (c->duplicate_feature_map)
629     | hb_sink (c->new_feature_indexes)
630     ;
631   }
632 
subsetOT::LangSys633   bool subset (hb_subset_context_t        *c,
634 	       hb_subset_layout_context_t *l,
635 	       const Tag                  *tag = nullptr) const
636   {
637     TRACE_SUBSET (this);
638     auto *out = c->serializer->start_embed (*this);
639     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
640 
641     out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu;
642 
643     if (!l->visitFeatureIndex (featureIndex.len))
644       return_trace (false);
645 
646     auto it =
647     + hb_iter (featureIndex)
648     | hb_filter (l->feature_index_map)
649     | hb_map (l->feature_index_map)
650     ;
651 
652     bool ret = bool (it);
653     out->featureIndex.serialize (c->serializer, l, it);
654     return_trace (ret);
655   }
656 
sanitizeOT::LangSys657   bool sanitize (hb_sanitize_context_t *c,
658 		 const Record_sanitize_closure_t * = nullptr) const
659   {
660     TRACE_SANITIZE (this);
661     return_trace (c->check_struct (this) && featureIndex.sanitize (c));
662   }
663 
664   Offset16	lookupOrderZ;	/* = Null (reserved for an offset to a
665 				 * reordering table) */
666   HBUINT16	reqFeatureIndex;/* Index of a feature required for this
667 				 * language system--if no required features
668 				 * = 0xFFFFu */
669   IndexArray	featureIndex;	/* Array of indices into the FeatureList */
670   public:
671   DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
672 };
673 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
674 
675 struct Script
676 {
get_lang_sys_countOT::Script677   unsigned int get_lang_sys_count () const
678   { return langSys.len; }
get_lang_sys_tagOT::Script679   const Tag& get_lang_sys_tag (unsigned int i) const
680   { return langSys.get_tag (i); }
get_lang_sys_tagsOT::Script681   unsigned int get_lang_sys_tags (unsigned int start_offset,
682 				  unsigned int *lang_sys_count /* IN/OUT */,
683 				  hb_tag_t     *lang_sys_tags /* OUT */) const
684   { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
get_lang_sysOT::Script685   const LangSys& get_lang_sys (unsigned int i) const
686   {
687     if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
688     return this+langSys[i].offset;
689   }
find_lang_sys_indexOT::Script690   bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
691   { return langSys.find_index (tag, index); }
692 
has_default_lang_sysOT::Script693   bool has_default_lang_sys () const           { return defaultLangSys != 0; }
get_default_lang_sysOT::Script694   const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
695 
prune_langsysOT::Script696   void prune_langsys (hb_prune_langsys_context_t *c,
697                       unsigned script_index) const
698   {
699     if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
700     if (c->visitedScript (this)) return;
701 
702     if (!c->script_langsys_map->has (script_index))
703     {
704       hb_set_t* empty_set = hb_set_create ();
705       if (unlikely (!c->script_langsys_map->set (script_index, empty_set)))
706       {
707 	hb_set_destroy (empty_set);
708 	return;
709       }
710     }
711 
712     unsigned langsys_count = get_lang_sys_count ();
713     if (has_default_lang_sys ())
714     {
715       //only collect features from non-redundant langsys
716       const LangSys& d = get_default_lang_sys ();
717       d.collect_features (c);
718 
719       for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
720       {
721         const LangSys& l = this+_.first.offset;
722         if (l.compare (d, c->duplicate_feature_map)) continue;
723 
724         l.collect_features (c);
725         c->script_langsys_map->get (script_index)->add (_.second);
726       }
727     }
728     else
729     {
730       for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
731       {
732         const LangSys& l = this+_.first.offset;
733         l.collect_features (c);
734         c->script_langsys_map->get (script_index)->add (_.second);
735       }
736     }
737   }
738 
subsetOT::Script739   bool subset (hb_subset_context_t         *c,
740 	       hb_subset_layout_context_t  *l,
741 	       const Tag                   *tag) const
742   {
743     TRACE_SUBSET (this);
744     if (!l->visitScript ()) return_trace (false);
745 
746     auto *out = c->serializer->start_embed (*this);
747     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
748 
749     bool defaultLang = false;
750     if (has_default_lang_sys ())
751     {
752       c->serializer->push ();
753       const LangSys& ls = this+defaultLangSys;
754       bool ret = ls.subset (c, l);
755       if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
756       {
757 	c->serializer->pop_discard ();
758 	out->defaultLangSys = 0;
759       }
760       else
761       {
762 	c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
763 	defaultLang = true;
764       }
765     }
766 
767     const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
768     if (active_langsys)
769     {
770       unsigned count = langSys.len;
771       + hb_zip (langSys, hb_range (count))
772       | hb_filter (active_langsys, hb_second)
773       | hb_map (hb_first)
774       | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
775       | hb_apply (subset_record_array (l, &(out->langSys), this))
776       ;
777     }
778 
779     return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
780   }
781 
sanitizeOT::Script782   bool sanitize (hb_sanitize_context_t *c,
783 		 const Record_sanitize_closure_t * = nullptr) const
784   {
785     TRACE_SANITIZE (this);
786     return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
787   }
788 
789   protected:
790   Offset16To<LangSys>
791 		defaultLangSys;	/* Offset to DefaultLangSys table--from
792 				 * beginning of Script table--may be Null */
793   RecordArrayOf<LangSys>
794 		langSys;	/* Array of LangSysRecords--listed
795 				 * alphabetically by LangSysTag */
796   public:
797   DEFINE_SIZE_ARRAY_SIZED (4, langSys);
798 };
799 
800 typedef RecordListOfScript ScriptList;
801 #endif  //  ENABLE_ICCARM
802 
803 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
804 struct FeatureParamsSize
805 {
sanitizeOT::FeatureParamsSize806   bool sanitize (hb_sanitize_context_t *c) const
807   {
808     TRACE_SANITIZE (this);
809     if (unlikely (!c->check_struct (this))) return_trace (false);
810 
811     /* This subtable has some "history", if you will.  Some earlier versions of
812      * Adobe tools calculated the offset of the FeatureParams sutable from the
813      * beginning of the FeatureList table!  Now, that is dealt with in the
814      * Feature implementation.  But we still need to be able to tell junk from
815      * real data.  Note: We don't check that the nameID actually exists.
816      *
817      * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
818      *
819      * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
820      * coming out soon, and that the makeotf program will build a font with a
821      * 'size' feature that is correct by the specification.
822      *
823      * The specification for this feature tag is in the "OpenType Layout Tag
824      * Registry". You can see a copy of this at:
825      * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
826      *
827      * Here is one set of rules to determine if the 'size' feature is built
828      * correctly, or as by the older versions of MakeOTF. You may be able to do
829      * better.
830      *
831      * Assume that the offset to the size feature is according to specification,
832      * and make the following value checks. If it fails, assume the size
833      * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
834      * If this fails, reject the 'size' feature. The older makeOTF's calculated the
835      * offset from the beginning of the FeatureList table, rather than from the
836      * beginning of the 'size' Feature table.
837      *
838      * If "design size" == 0:
839      *     fails check
840      *
841      * Else if ("subfamily identifier" == 0 and
842      *     "range start" == 0 and
843      *     "range end" == 0 and
844      *     "range start" == 0 and
845      *     "menu name ID" == 0)
846      *     passes check: this is the format used when there is a design size
847      * specified, but there is no recommended size range.
848      *
849      * Else if ("design size" <  "range start" or
850      *     "design size" >   "range end" or
851      *     "range end" <= "range start" or
852      *     "menu name ID"  < 256 or
853      *     "menu name ID"  > 32767 or
854      *     menu name ID is not a name ID which is actually in the name table)
855      *     fails test
856      * Else
857      *     passes test.
858      */
859 
860     if (!designSize)
861       return_trace (false);
862     else if (subfamilyID == 0 &&
863 	     subfamilyNameID == 0 &&
864 	     rangeStart == 0 &&
865 	     rangeEnd == 0)
866       return_trace (true);
867     else if (designSize < rangeStart ||
868 	     designSize > rangeEnd ||
869 	     subfamilyNameID < 256 ||
870 	     subfamilyNameID > 32767)
871       return_trace (false);
872     else
873       return_trace (true);
874   }
875 
subsetOT::FeatureParamsSize876   bool subset (hb_subset_context_t *c) const
877   {
878     TRACE_SUBSET (this);
879     return_trace ((bool) c->serializer->embed (*this));
880   }
881 
882   HBUINT16	designSize;	/* Represents the design size in 720/inch
883 				 * units (decipoints).  The design size entry
884 				 * must be non-zero.  When there is a design
885 				 * size but no recommended size range, the
886 				 * rest of the array will consist of zeros. */
887   HBUINT16	subfamilyID;	/* Has no independent meaning, but serves
888 				 * as an identifier that associates fonts
889 				 * in a subfamily. All fonts which share a
890 				 * Preferred or Font Family name and which
891 				 * differ only by size range shall have the
892 				 * same subfamily value, and no fonts which
893 				 * differ in weight or style shall have the
894 				 * same subfamily value. If this value is
895 				 * zero, the remaining fields in the array
896 				 * will be ignored. */
897   NameID	subfamilyNameID;/* If the preceding value is non-zero, this
898 				 * value must be set in the range 256 - 32767
899 				 * (inclusive). It records the value of a
900 				 * field in the name table, which must
901 				 * contain English-language strings encoded
902 				 * in Windows Unicode and Macintosh Roman,
903 				 * and may contain additional strings
904 				 * localized to other scripts and languages.
905 				 * Each of these strings is the name an
906 				 * application should use, in combination
907 				 * with the family name, to represent the
908 				 * subfamily in a menu.  Applications will
909 				 * choose the appropriate version based on
910 				 * their selection criteria. */
911   HBUINT16	rangeStart;	/* Large end of the recommended usage range
912 				 * (inclusive), stored in 720/inch units
913 				 * (decipoints). */
914   HBUINT16	rangeEnd;	/* Small end of the recommended usage range
915 				   (exclusive), stored in 720/inch units
916 				 * (decipoints). */
917   public:
918   DEFINE_SIZE_STATIC (10);
919 };
920 
921 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
922 struct FeatureParamsStylisticSet
923 {
sanitizeOT::FeatureParamsStylisticSet924   bool sanitize (hb_sanitize_context_t *c) const
925   {
926     TRACE_SANITIZE (this);
927     /* Right now minorVersion is at zero.  Which means, any table supports
928      * the uiNameID field. */
929     return_trace (c->check_struct (this));
930   }
931 
subsetOT::FeatureParamsStylisticSet932   bool subset (hb_subset_context_t *c) const
933   {
934     TRACE_SUBSET (this);
935     return_trace ((bool) c->serializer->embed (*this));
936   }
937 
938   HBUINT16	version;	/* (set to 0): This corresponds to a “minor”
939 				 * version number. Additional data may be
940 				 * added to the end of this Feature Parameters
941 				 * table in the future. */
942 
943   NameID	uiNameID;	/* The 'name' table name ID that specifies a
944 				 * string (or strings, for multiple languages)
945 				 * for a user-interface label for this
946 				 * feature.  The values of uiLabelNameId and
947 				 * sampleTextNameId are expected to be in the
948 				 * font-specific name ID range (256-32767),
949 				 * though that is not a requirement in this
950 				 * Feature Parameters specification. The
951 				 * user-interface label for the feature can
952 				 * be provided in multiple languages. An
953 				 * English string should be included as a
954 				 * fallback. The string should be kept to a
955 				 * minimal length to fit comfortably with
956 				 * different application interfaces. */
957   public:
958   DEFINE_SIZE_STATIC (4);
959 };
960 
961 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
962 struct FeatureParamsCharacterVariants
963 {
964   unsigned
get_charactersOT::FeatureParamsCharacterVariants965   get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
966   {
967     if (char_count)
968     {
969       + characters.sub_array (start_offset, char_count)
970       | hb_sink (hb_array (chars, *char_count))
971       ;
972     }
973     return characters.len;
974   }
975 
get_sizeOT::FeatureParamsCharacterVariants976   unsigned get_size () const
977   { return min_size + characters.len * HBUINT24::static_size; }
978 
subsetOT::FeatureParamsCharacterVariants979   bool subset (hb_subset_context_t *c) const
980   {
981     TRACE_SUBSET (this);
982     return_trace ((bool) c->serializer->embed (*this));
983   }
984 
sanitizeOT::FeatureParamsCharacterVariants985   bool sanitize (hb_sanitize_context_t *c) const
986   {
987     TRACE_SANITIZE (this);
988     return_trace (c->check_struct (this) &&
989 		  characters.sanitize (c));
990   }
991 
992   HBUINT16	format;			/* Format number is set to 0. */
993   NameID	featUILableNameID;	/* The ‘name’ table name ID that
994 					 * specifies a string (or strings,
995 					 * for multiple languages) for a
996 					 * user-interface label for this
997 					 * feature. (May be NULL.) */
998   NameID	featUITooltipTextNameID;/* The ‘name’ table name ID that
999 					 * specifies a string (or strings,
1000 					 * for multiple languages) that an
1001 					 * application can use for tooltip
1002 					 * text for this feature. (May be
1003 					 * nullptr.) */
1004   NameID	sampleTextNameID;	/* The ‘name’ table name ID that
1005 					 * specifies sample text that
1006 					 * illustrates the effect of this
1007 					 * feature. (May be NULL.) */
1008   HBUINT16	numNamedParameters;	/* Number of named parameters. (May
1009 					 * be zero.) */
1010   NameID	firstParamUILabelNameID;/* The first ‘name’ table name ID
1011 					 * used to specify strings for
1012 					 * user-interface labels for the
1013 					 * feature parameters. (Must be zero
1014 					 * if numParameters is zero.) */
1015   Array16Of<HBUINT24>
1016 		characters;		/* Array of the Unicode Scalar Value
1017 					 * of the characters for which this
1018 					 * feature provides glyph variants.
1019 					 * (May be zero.) */
1020   public:
1021   DEFINE_SIZE_ARRAY (14, characters);
1022 };
1023 
1024 struct FeatureParams
1025 {
sanitizeOT::FeatureParams1026   bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
1027   {
1028 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
1029     return true;
1030 #endif
1031     TRACE_SANITIZE (this);
1032     if (tag == HB_TAG ('s','i','z','e'))
1033       return_trace (u.size.sanitize (c));
1034     if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1035       return_trace (u.stylisticSet.sanitize (c));
1036     if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1037       return_trace (u.characterVariants.sanitize (c));
1038     return_trace (true);
1039   }
1040 
subsetOT::FeatureParams1041   bool subset (hb_subset_context_t *c, const Tag* tag) const
1042   {
1043     TRACE_SUBSET (this);
1044     if (!tag) return_trace (false);
1045     if (*tag == HB_TAG ('s','i','z','e'))
1046       return_trace (u.size.subset (c));
1047     if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1048       return_trace (u.stylisticSet.subset (c));
1049     if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1050       return_trace (u.characterVariants.subset (c));
1051     return_trace (false);
1052   }
1053 
1054 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
get_size_paramsOT::FeatureParams1055   const FeatureParamsSize& get_size_params (hb_tag_t tag) const
1056   {
1057     if (tag == HB_TAG ('s','i','z','e'))
1058       return u.size;
1059     return Null (FeatureParamsSize);
1060   }
get_stylistic_set_paramsOT::FeatureParams1061   const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
1062   {
1063     if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1064       return u.stylisticSet;
1065     return Null (FeatureParamsStylisticSet);
1066   }
get_character_variants_paramsOT::FeatureParams1067   const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
1068   {
1069     if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1070       return u.characterVariants;
1071     return Null (FeatureParamsCharacterVariants);
1072   }
1073 #endif
1074 
1075   private:
1076   union {
1077   FeatureParamsSize			size;
1078   FeatureParamsStylisticSet		stylisticSet;
1079   FeatureParamsCharacterVariants	characterVariants;
1080   } u;
1081   public:
1082   DEFINE_SIZE_MIN (0);
1083 };
1084 
1085 #ifdef ENABLE_ICCARM  // Adaptive compilation, only changes the order of the code, does not affect the function
1086 struct IndexArray : Array16Of<Index>
1087 {
intersectsOT::IndexArray1088   bool intersects (const hb_map_t *indexes) const
1089   { return hb_any (*this, indexes); }
1090 
1091   template <typename Iterator,
1092 	    hb_requires (hb_is_iterator (Iterator))>
serializeOT::IndexArray1093   void serialize (hb_serialize_context_t *c,
1094 		  hb_subset_layout_context_t *l,
1095 		  Iterator it)
1096   {
1097     if (!it) return;
1098     if (unlikely (!c->extend_min ((*this)))) return;
1099 
1100     for (const auto _ : it)
1101     {
1102       if (!l->visitLookupIndex()) break;
1103 
1104       Index i;
1105       i = _;
1106       c->copy (i);
1107       this->len++;
1108     }
1109   }
1110 
get_indexesOT::IndexArray1111   unsigned int get_indexes (unsigned int start_offset,
1112 			    unsigned int *_count /* IN/OUT */,
1113 			    unsigned int *_indexes /* OUT */) const
1114   {
1115     if (_count)
1116     {
1117       + this->sub_array (start_offset, _count)
1118       | hb_sink (hb_array (_indexes, *_count))
1119       ;
1120     }
1121     return this->len;
1122   }
1123 
add_indexes_toOT::IndexArray1124   void add_indexes_to (hb_set_t* output /* OUT */) const
1125   {
1126     output->add_array (as_array ());
1127   }
1128 };
1129 #endif // ENABLE_ICCARM
1130 
1131 struct Feature
1132 {
get_lookup_countOT::Feature1133   unsigned int get_lookup_count () const
1134   { return lookupIndex.len; }
get_lookup_indexOT::Feature1135   hb_tag_t get_lookup_index (unsigned int i) const
1136   { return lookupIndex[i]; }
get_lookup_indexesOT::Feature1137   unsigned int get_lookup_indexes (unsigned int start_index,
1138 				   unsigned int *lookup_count /* IN/OUT */,
1139 				   unsigned int *lookup_tags /* OUT */) const
1140   { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
add_lookup_indexes_toOT::Feature1141   void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
1142   { lookupIndex.add_indexes_to (lookup_indexes); }
1143 
get_feature_paramsOT::Feature1144   const FeatureParams &get_feature_params () const
1145   { return this+featureParams; }
1146 
intersects_lookup_indexesOT::Feature1147   bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
1148   { return lookupIndex.intersects (lookup_indexes); }
1149 
subsetOT::Feature1150   bool subset (hb_subset_context_t         *c,
1151 	       hb_subset_layout_context_t  *l,
1152 	       const Tag                   *tag = nullptr) const
1153   {
1154     TRACE_SUBSET (this);
1155     auto *out = c->serializer->start_embed (*this);
1156     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1157 
1158     out->featureParams.serialize_subset (c, featureParams, this, tag);
1159 
1160     auto it =
1161     + hb_iter (lookupIndex)
1162     | hb_filter (l->lookup_index_map)
1163     | hb_map (l->lookup_index_map)
1164     ;
1165 
1166     out->lookupIndex.serialize (c->serializer, l, it);
1167     // The decision to keep or drop this feature is already made before we get here
1168     // so always retain it.
1169     return_trace (true);
1170   }
1171 
sanitizeOT::Feature1172   bool sanitize (hb_sanitize_context_t *c,
1173 		 const Record_sanitize_closure_t *closure = nullptr) const
1174   {
1175     TRACE_SANITIZE (this);
1176     if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
1177       return_trace (false);
1178 
1179     /* Some earlier versions of Adobe tools calculated the offset of the
1180      * FeatureParams subtable from the beginning of the FeatureList table!
1181      *
1182      * If sanitizing "failed" for the FeatureParams subtable, try it with the
1183      * alternative location.  We would know sanitize "failed" if old value
1184      * of the offset was non-zero, but it's zeroed now.
1185      *
1186      * Only do this for the 'size' feature, since at the time of the faulty
1187      * Adobe tools, only the 'size' feature had FeatureParams defined.
1188      */
1189 
1190     if (likely (featureParams.is_null ()))
1191       return_trace (true);
1192 
1193     unsigned int orig_offset = featureParams;
1194     if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
1195       return_trace (false);
1196 
1197     if (featureParams == 0 && closure &&
1198 	closure->tag == HB_TAG ('s','i','z','e') &&
1199 	closure->list_base && closure->list_base < this)
1200     {
1201       unsigned int new_offset_int = orig_offset -
1202 				    (((char *) this) - ((char *) closure->list_base));
1203 
1204       Offset16To<FeatureParams> new_offset;
1205       /* Check that it would not overflow. */
1206       new_offset = new_offset_int;
1207       if (new_offset == new_offset_int &&
1208 	  c->try_set (&featureParams, new_offset_int) &&
1209 	  !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
1210 	return_trace (false);
1211     }
1212 
1213     return_trace (true);
1214   }
1215 
1216   Offset16To<FeatureParams>
1217 		 featureParams;	/* Offset to Feature Parameters table (if one
1218 				 * has been defined for the feature), relative
1219 				 * to the beginning of the Feature Table; = Null
1220 				 * if not required */
1221   IndexArray	 lookupIndex;	/* Array of LookupList indices */
1222   public:
1223   DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
1224 };
1225 
1226 #ifdef ENABLE_ICCARM // Adaptive compilation, only changes the order of the code, does not affect the function
1227 struct RecordListOfFeature : RecordListOf<Feature>
1228 {
subsetOT::RecordListOfFeature1229   bool subset (hb_subset_context_t *c,
1230 	       hb_subset_layout_context_t *l) const
1231   {
1232     TRACE_SUBSET (this);
1233     auto *out = c->serializer->start_embed (*this);
1234     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1235 
1236     unsigned count = this->len;
1237     + hb_zip (*this, hb_range (count))
1238     | hb_filter (l->feature_index_map, hb_second)
1239     | hb_map (hb_first)
1240     | hb_apply (subset_record_array (l, out, this))
1241     ;
1242     return_trace (true);
1243   }
1244 };
1245 
1246 struct LangSys
1247 {
get_feature_countOT::LangSys1248   unsigned int get_feature_count () const
1249   { return featureIndex.len; }
get_feature_indexOT::LangSys1250   hb_tag_t get_feature_index (unsigned int i) const
1251   { return featureIndex[i]; }
get_feature_indexesOT::LangSys1252   unsigned int get_feature_indexes (unsigned int start_offset,
1253 				    unsigned int *feature_count /* IN/OUT */,
1254 				    unsigned int *feature_indexes /* OUT */) const
1255   { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
add_feature_indexes_toOT::LangSys1256   void add_feature_indexes_to (hb_set_t *feature_indexes) const
1257   { featureIndex.add_indexes_to (feature_indexes); }
1258 
has_required_featureOT::LangSys1259   bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
get_required_feature_indexOT::LangSys1260   unsigned int get_required_feature_index () const
1261   {
1262     if (reqFeatureIndex == 0xFFFFu)
1263       return Index::NOT_FOUND_INDEX;
1264    return reqFeatureIndex;
1265   }
1266 
copyOT::LangSys1267   LangSys* copy (hb_serialize_context_t *c) const
1268   {
1269     TRACE_SERIALIZE (this);
1270     return_trace (c->embed (*this));
1271   }
1272 
compareOT::LangSys1273   bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
1274   {
1275     if (reqFeatureIndex != o.reqFeatureIndex)
1276       return false;
1277 
1278     auto iter =
1279     + hb_iter (featureIndex)
1280     | hb_filter (feature_index_map)
1281     | hb_map (feature_index_map)
1282     ;
1283 
1284     auto o_iter =
1285     + hb_iter (o.featureIndex)
1286     | hb_filter (feature_index_map)
1287     | hb_map (feature_index_map)
1288     ;
1289 
1290     if (iter.len () != o_iter.len ())
1291       return false;
1292 
1293     for (const auto _ : + hb_zip (iter, o_iter))
1294       if (_.first != _.second) return false;
1295 
1296     return true;
1297   }
1298 
collect_featuresOT::LangSys1299   void collect_features (hb_prune_langsys_context_t *c) const
1300   {
1301     if (!has_required_feature () && !get_feature_count ()) return;
1302     if (c->visitedLangsys (this)) return;
1303     if (has_required_feature () &&
1304         c->duplicate_feature_map->has (reqFeatureIndex))
1305       c->new_feature_indexes->add (get_required_feature_index ());
1306 
1307     + hb_iter (featureIndex)
1308     | hb_filter (c->duplicate_feature_map)
1309     | hb_sink (c->new_feature_indexes)
1310     ;
1311   }
1312 
subsetOT::LangSys1313   bool subset (hb_subset_context_t        *c,
1314 	       hb_subset_layout_context_t *l,
1315 	       const Tag                  *tag = nullptr) const
1316   {
1317     TRACE_SUBSET (this);
1318     auto *out = c->serializer->start_embed (*this);
1319     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1320 
1321     out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu;
1322 
1323     if (!l->visitFeatureIndex (featureIndex.len))
1324       return_trace (false);
1325 
1326     auto it =
1327     + hb_iter (featureIndex)
1328     | hb_filter (l->feature_index_map)
1329     | hb_map (l->feature_index_map)
1330     ;
1331 
1332     bool ret = bool (it);
1333     out->featureIndex.serialize (c->serializer, l, it);
1334     return_trace (ret);
1335   }
1336 
sanitizeOT::LangSys1337   bool sanitize (hb_sanitize_context_t *c,
1338 		 const Record_sanitize_closure_t * = nullptr) const
1339   {
1340     TRACE_SANITIZE (this);
1341     return_trace (c->check_struct (this) && featureIndex.sanitize (c));
1342   }
1343 
1344   Offset16	lookupOrderZ;	/* = Null (reserved for an offset to a
1345 				 * reordering table) */
1346   HBUINT16	reqFeatureIndex;/* Index of a feature required for this
1347 				 * language system--if no required features
1348 				 * = 0xFFFFu */
1349   IndexArray	featureIndex;	/* Array of indices into the FeatureList */
1350   public:
1351   DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
1352 };
1353 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
1354 
1355 struct Script
1356 {
get_lang_sys_countOT::Script1357   unsigned int get_lang_sys_count () const
1358   { return langSys.len; }
get_lang_sys_tagOT::Script1359   const Tag& get_lang_sys_tag (unsigned int i) const
1360   { return langSys.get_tag (i); }
get_lang_sys_tagsOT::Script1361   unsigned int get_lang_sys_tags (unsigned int start_offset,
1362 				  unsigned int *lang_sys_count /* IN/OUT */,
1363 				  hb_tag_t     *lang_sys_tags /* OUT */) const
1364   { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
get_lang_sysOT::Script1365   const LangSys& get_lang_sys (unsigned int i) const
1366   {
1367     if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
1368     return this+langSys[i].offset;
1369   }
find_lang_sys_indexOT::Script1370   bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
1371   { return langSys.find_index (tag, index); }
1372 
has_default_lang_sysOT::Script1373   bool has_default_lang_sys () const           { return defaultLangSys != 0; }
get_default_lang_sysOT::Script1374   const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
1375 
prune_langsysOT::Script1376   void prune_langsys (hb_prune_langsys_context_t *c,
1377                       unsigned script_index) const
1378   {
1379     if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
1380     if (c->visitedScript (this)) return;
1381 
1382     if (!c->script_langsys_map->has (script_index))
1383     {
1384       hb_set_t* empty_set = hb_set_create ();
1385       if (unlikely (!c->script_langsys_map->set (script_index, empty_set)))
1386       {
1387 	hb_set_destroy (empty_set);
1388 	return;
1389       }
1390     }
1391 
1392     unsigned langsys_count = get_lang_sys_count ();
1393     if (has_default_lang_sys ())
1394     {
1395       //only collect features from non-redundant langsys
1396       const LangSys& d = get_default_lang_sys ();
1397       d.collect_features (c);
1398 
1399       for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
1400       {
1401         const LangSys& l = this+_.first.offset;
1402         if (l.compare (d, c->duplicate_feature_map)) continue;
1403 
1404         l.collect_features (c);
1405         c->script_langsys_map->get (script_index)->add (_.second);
1406       }
1407     }
1408     else
1409     {
1410       for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
1411       {
1412         const LangSys& l = this+_.first.offset;
1413         l.collect_features (c);
1414         c->script_langsys_map->get (script_index)->add (_.second);
1415       }
1416     }
1417   }
1418 
subsetOT::Script1419   bool subset (hb_subset_context_t         *c,
1420 	       hb_subset_layout_context_t  *l,
1421 	       const Tag                   *tag) const
1422   {
1423     TRACE_SUBSET (this);
1424     if (!l->visitScript ()) return_trace (false);
1425 
1426     auto *out = c->serializer->start_embed (*this);
1427     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1428 
1429     bool defaultLang = false;
1430     if (has_default_lang_sys ())
1431     {
1432       c->serializer->push ();
1433       const LangSys& ls = this+defaultLangSys;
1434       bool ret = ls.subset (c, l);
1435       if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
1436       {
1437 	c->serializer->pop_discard ();
1438 	out->defaultLangSys = 0;
1439       }
1440       else
1441       {
1442 	c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
1443 	defaultLang = true;
1444       }
1445     }
1446 
1447     const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
1448     if (active_langsys)
1449     {
1450       unsigned count = langSys.len;
1451       + hb_zip (langSys, hb_range (count))
1452       | hb_filter (active_langsys, hb_second)
1453       | hb_map (hb_first)
1454       | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
1455       | hb_apply (subset_record_array (l, &(out->langSys), this))
1456       ;
1457     }
1458 
1459     return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
1460   }
1461 
sanitizeOT::Script1462   bool sanitize (hb_sanitize_context_t *c,
1463 		 const Record_sanitize_closure_t * = nullptr) const
1464   {
1465     TRACE_SANITIZE (this);
1466     return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
1467   }
1468 
1469   protected:
1470   Offset16To<LangSys>
1471 		defaultLangSys;	/* Offset to DefaultLangSys table--from
1472 				 * beginning of Script table--may be Null */
1473   RecordArrayOf<LangSys>
1474 		langSys;	/* Array of LangSysRecords--listed
1475 				 * alphabetically by LangSysTag */
1476   public:
1477   DEFINE_SIZE_ARRAY_SIZED (4, langSys);
1478 };
1479 struct RecordListOfScript : RecordListOf<Script>
1480 {
subsetOT::RecordListOfScript1481   bool subset (hb_subset_context_t *c,
1482                hb_subset_layout_context_t *l) const
1483   {
1484     TRACE_SUBSET (this);
1485     auto *out = c->serializer->start_embed (*this);
1486     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1487 
1488     unsigned count = this->len;
1489     for (auto _ : + hb_zip (*this, hb_range (count)))
1490     {
1491       auto snap = c->serializer->snapshot ();
1492       l->cur_script_index = _.second;
1493       bool ret = _.first.subset (l, this);
1494       if (!ret) c->serializer->revert (snap);
1495       else out->len++;
1496     }
1497 
1498     return_trace (true);
1499   }
1500 };
1501 
1502 struct RangeRecord
1503 {
cmpOT::RangeRecord1504   int cmp (hb_codepoint_t g) const
1505   { return g < first ? -1 : g <= last ? 0 : +1; }
1506 
sanitizeOT::RangeRecord1507   bool sanitize (hb_sanitize_context_t *c) const
1508   {
1509     TRACE_SANITIZE (this);
1510     return_trace (c->check_struct (this));
1511   }
1512 
intersectsOT::RangeRecord1513   bool intersects (const hb_set_t *glyphs) const
1514   { return glyphs->intersects (first, last); }
1515 
1516   template <typename set_t>
collect_coverageOT::RangeRecord1517   bool collect_coverage (set_t *glyphs) const
1518   { return glyphs->add_range (first, last); }
1519 
1520   HBGlyphID	first;		/* First GlyphID in the range */
1521   HBGlyphID	last;		/* Last GlyphID in the range */
1522   HBUINT16	value;		/* Value */
1523   public:
1524   DEFINE_SIZE_STATIC (6);
1525 };
1526 DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
1527 
1528 typedef RecordListOfScript ScriptList;
1529 #endif // ENABLE_ICCARM
1530 
1531 typedef RecordListOf<Feature> FeatureList;
1532 
1533 
1534 struct LookupFlag : HBUINT16
1535 {
1536   enum Flags {
1537     RightToLeft		= 0x0001u,
1538     IgnoreBaseGlyphs	= 0x0002u,
1539     IgnoreLigatures	= 0x0004u,
1540     IgnoreMarks		= 0x0008u,
1541     IgnoreFlags		= 0x000Eu,
1542     UseMarkFilteringSet	= 0x0010u,
1543     Reserved		= 0x00E0u,
1544     MarkAttachmentType	= 0xFF00u
1545   };
1546   public:
1547   DEFINE_SIZE_STATIC (2);
1548 };
1549 
1550 } /* namespace OT */
1551 /* This has to be outside the namespace. */
1552 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
1553 namespace OT {
1554 
1555 struct Lookup
1556 {
get_subtable_countOT::Lookup1557   unsigned int get_subtable_count () const { return subTable.len; }
1558 
1559   template <typename TSubTable>
get_subtablesOT::Lookup1560   const Array16OfOffset16To<TSubTable>& get_subtables () const
1561   { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
1562   template <typename TSubTable>
get_subtablesOT::Lookup1563   Array16OfOffset16To<TSubTable>& get_subtables ()
1564   { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
1565 
1566   template <typename TSubTable>
get_subtableOT::Lookup1567   const TSubTable& get_subtable (unsigned int i) const
1568   { return this+get_subtables<TSubTable> ()[i]; }
1569   template <typename TSubTable>
get_subtableOT::Lookup1570   TSubTable& get_subtable (unsigned int i)
1571   { return this+get_subtables<TSubTable> ()[i]; }
1572 
get_sizeOT::Lookup1573   unsigned int get_size () const
1574   {
1575     const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
1576     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1577       return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
1578     return (const char *) &markFilteringSet - (const char *) this;
1579   }
1580 
get_typeOT::Lookup1581   unsigned int get_type () const { return lookupType; }
1582 
1583   /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
1584    * higher 16-bit is mark-filtering-set if the lookup uses one.
1585    * Not to be confused with glyph_props which is very similar. */
get_propsOT::Lookup1586   uint32_t get_props () const
1587   {
1588     unsigned int flag = lookupFlag;
1589     if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
1590     {
1591       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1592       flag += (markFilteringSet << 16);
1593     }
1594     return flag;
1595   }
1596 
1597   template <typename TSubTable, typename context_t, typename ...Ts>
dispatchOT::Lookup1598   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1599   {
1600     unsigned int lookup_type = get_type ();
1601     TRACE_DISPATCH (this, lookup_type);
1602     unsigned int count = get_subtable_count ();
1603     for (unsigned int i = 0; i < count; i++) {
1604       typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, hb_forward<Ts> (ds)...);
1605       if (c->stop_sublookup_iteration (r))
1606 	return_trace (r);
1607     }
1608     return_trace (c->default_return_value ());
1609   }
1610 
serializeOT::Lookup1611   bool serialize (hb_serialize_context_t *c,
1612 		  unsigned int lookup_type,
1613 		  uint32_t lookup_props,
1614 		  unsigned int num_subtables)
1615   {
1616     TRACE_SERIALIZE (this);
1617     if (unlikely (!c->extend_min (*this))) return_trace (false);
1618     lookupType = lookup_type;
1619     lookupFlag = lookup_props & 0xFFFFu;
1620     if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
1621     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1622     {
1623       if (unlikely (!c->extend (*this))) return_trace (false);
1624       HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1625       markFilteringSet = lookup_props >> 16;
1626     }
1627     return_trace (true);
1628   }
1629 
1630   template <typename TSubTable>
subsetOT::Lookup1631   bool subset (hb_subset_context_t *c) const
1632   {
1633     TRACE_SUBSET (this);
1634     auto *out = c->serializer->start_embed (*this);
1635     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1636     out->lookupType = lookupType;
1637     out->lookupFlag = lookupFlag;
1638 
1639     const hb_set_t *glyphset = c->plan->glyphset_gsub ();
1640     unsigned int lookup_type = get_type ();
1641     + hb_iter (get_subtables <TSubTable> ())
1642     | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
1643     | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
1644     ;
1645 
1646     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1647     {
1648       if (unlikely (!c->serializer->extend (out))) return_trace (false);
1649       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1650       HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
1651       outMarkFilteringSet = markFilteringSet;
1652     }
1653 
1654     return_trace (true);
1655   }
1656 
1657   template <typename TSubTable>
sanitizeOT::Lookup1658   bool sanitize (hb_sanitize_context_t *c) const
1659   {
1660     TRACE_SANITIZE (this);
1661     if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
1662 
1663     unsigned subtables = get_subtable_count ();
1664     if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
1665 
1666     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1667     {
1668       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1669       if (!markFilteringSet.sanitize (c)) return_trace (false);
1670     }
1671 
1672     if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
1673       return_trace (false);
1674 
1675     if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
1676     {
1677       /* The spec says all subtables of an Extension lookup should
1678        * have the same type, which shall not be the Extension type
1679        * itself (but we already checked for that).
1680        * This is specially important if one has a reverse type!
1681        *
1682        * We only do this if sanitizer edit_count is zero.  Otherwise,
1683        * some of the subtables might have become insane after they
1684        * were sanity-checked by the edits of subsequent subtables.
1685        * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
1686        */
1687       unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
1688       for (unsigned int i = 1; i < subtables; i++)
1689 	if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
1690 	  return_trace (false);
1691     }
1692     return_trace (true);
1693   }
1694 
1695   private:
1696   HBUINT16	lookupType;		/* Different enumerations for GSUB and GPOS */
1697   HBUINT16	lookupFlag;		/* Lookup qualifiers */
1698   Array16Of<Offset16>
1699 		subTable;		/* Array of SubTables */
1700 /*HBUINT16	markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
1701 					 * structure. This field is only present if bit
1702 					 * UseMarkFilteringSet of lookup flags is set. */
1703   public:
1704   DEFINE_SIZE_ARRAY (6, subTable);
1705 };
1706 
1707 typedef List16OfOffset16To<Lookup> LookupList;
1708 
1709 template <typename TLookup>
1710 struct LookupOffsetList : List16OfOffset16To<TLookup>
1711 {
subsetOT::LookupOffsetList1712   bool subset (hb_subset_context_t        *c,
1713 	       hb_subset_layout_context_t *l) const
1714   {
1715     TRACE_SUBSET (this);
1716     auto *out = c->serializer->start_embed (this);
1717     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1718 
1719     unsigned count = this->len;
1720     + hb_zip (*this, hb_range (count))
1721     | hb_filter (l->lookup_index_map, hb_second)
1722     | hb_map (hb_first)
1723     | hb_apply (subset_offset_array (c, *out, this))
1724     ;
1725     return_trace (true);
1726   }
1727 
sanitizeOT::LookupOffsetList1728   bool sanitize (hb_sanitize_context_t *c) const
1729   {
1730     TRACE_SANITIZE (this);
1731     return_trace (List16OfOffset16To<TLookup>::sanitize (c, this));
1732   }
1733 };
1734 
1735 
1736 /*
1737  * Coverage Table
1738  */
1739 
1740 struct CoverageFormat1
1741 {
1742   friend struct Coverage;
1743 
1744   private:
get_coverageOT::CoverageFormat11745   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1746   {
1747     unsigned int i;
1748     glyphArray.bfind (glyph_id, &i, HB_BFIND_NOT_FOUND_STORE, NOT_COVERED);
1749     return i;
1750   }
1751 
1752   template <typename Iterator,
1753       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::CoverageFormat11754   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1755   {
1756     TRACE_SERIALIZE (this);
1757     return_trace (glyphArray.serialize (c, glyphs));
1758   }
1759 
sanitizeOT::CoverageFormat11760   bool sanitize (hb_sanitize_context_t *c) const
1761   {
1762     TRACE_SANITIZE (this);
1763     return_trace (glyphArray.sanitize (c));
1764   }
1765 
intersectsOT::CoverageFormat11766   bool intersects (const hb_set_t *glyphs) const
1767   {
1768     /* TODO Speed up, using hb_set_next() and bsearch()? */
1769     for (const auto& g : glyphArray.as_array ())
1770       if (glyphs->has (g))
1771 	return true;
1772     return false;
1773   }
intersects_coverageOT::CoverageFormat11774   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1775   { return glyphs->has (glyphArray[index]); }
1776 
intersected_coverage_glyphsOT::CoverageFormat11777   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1778   {
1779     unsigned count = glyphArray.len;
1780     for (unsigned i = 0; i < count; i++)
1781       if (glyphs->has (glyphArray[i]))
1782         intersect_glyphs->add (glyphArray[i]);
1783   }
1784 
1785   template <typename set_t>
collect_coverageOT::CoverageFormat11786   bool collect_coverage (set_t *glyphs) const
1787   { return glyphs->add_sorted_array (glyphArray.as_array ()); }
1788 
1789   public:
1790   /* Older compilers need this to be public. */
1791   struct iter_t
1792   {
initOT::CoverageFormat1::iter_t1793     void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
finiOT::CoverageFormat1::iter_t1794     void fini () {}
moreOT::CoverageFormat1::iter_t1795     bool more () const { return i < c->glyphArray.len; }
nextOT::CoverageFormat1::iter_t1796     void next () { i++; }
get_glyphOT::CoverageFormat1::iter_t1797     hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
operator !=OT::CoverageFormat1::iter_t1798     bool operator != (const iter_t& o) const
1799     { return i != o.i || c != o.c; }
1800 
1801     private:
1802     const struct CoverageFormat1 *c;
1803     unsigned int i;
1804   };
1805   private:
1806 
1807   protected:
1808   HBUINT16	coverageFormat;	/* Format identifier--format = 1 */
1809   SortedArray16Of<HBGlyphID>
1810 		glyphArray;	/* Array of GlyphIDs--in numerical order */
1811   public:
1812   DEFINE_SIZE_ARRAY (4, glyphArray);
1813 };
1814 
1815 struct CoverageFormat2
1816 {
1817   friend struct Coverage;
1818 
1819   private:
get_coverageOT::CoverageFormat21820   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1821   {
1822     const RangeRecord &range = rangeRecord.bsearch (glyph_id);
1823     return likely (range.first <= range.last)
1824 	 ? (unsigned int) range.value + (glyph_id - range.first)
1825 	 : NOT_COVERED;
1826   }
1827 
1828   template <typename Iterator,
1829       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::CoverageFormat21830   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1831   {
1832     TRACE_SERIALIZE (this);
1833     if (unlikely (!c->extend_min (*this))) return_trace (false);
1834 
1835     if (unlikely (!glyphs))
1836     {
1837       rangeRecord.len = 0;
1838       return_trace (true);
1839     }
1840 
1841     /* TODO(iter) Write more efficiently? */
1842 
1843     unsigned num_ranges = 0;
1844     hb_codepoint_t last = (hb_codepoint_t) -2;
1845     for (auto g: glyphs)
1846     {
1847       if (last + 1 != g)
1848 	num_ranges++;
1849       last = g;
1850     }
1851 
1852     if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
1853 
1854     unsigned count = 0;
1855     unsigned range = (unsigned) -1;
1856     last = (hb_codepoint_t) -2;
1857     for (auto g: glyphs)
1858     {
1859       if (last + 1 != g)
1860       {
1861 	range++;
1862 	rangeRecord[range].first = g;
1863 	rangeRecord[range].value = count;
1864       }
1865       rangeRecord[range].last = g;
1866       last = g;
1867       count++;
1868     }
1869 
1870     return_trace (true);
1871   }
1872 
sanitizeOT::CoverageFormat21873   bool sanitize (hb_sanitize_context_t *c) const
1874   {
1875     TRACE_SANITIZE (this);
1876     return_trace (rangeRecord.sanitize (c));
1877   }
1878 
intersectsOT::CoverageFormat21879   bool intersects (const hb_set_t *glyphs) const
1880   {
1881     /* TODO Speed up, using hb_set_next() and bsearch()? */
1882     /* TODO(iter) Rewrite as dagger. */
1883     for (const auto& range : rangeRecord.as_array ())
1884       if (range.intersects (glyphs))
1885 	return true;
1886     return false;
1887   }
intersects_coverageOT::CoverageFormat21888   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1889   {
1890     /* TODO(iter) Rewrite as dagger. */
1891     for (const auto& range : rangeRecord.as_array ())
1892     {
1893       if (range.value <= index &&
1894 	  index < (unsigned int) range.value + (range.last - range.first) &&
1895 	  range.intersects (glyphs))
1896 	return true;
1897       else if (index < range.value)
1898 	return false;
1899     }
1900     return false;
1901   }
1902 
intersected_coverage_glyphsOT::CoverageFormat21903   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1904   {
1905     for (const auto& range : rangeRecord.as_array ())
1906     {
1907       if (!range.intersects (glyphs)) continue;
1908       for (hb_codepoint_t g = range.first; g <= range.last; g++)
1909         if (glyphs->has (g)) intersect_glyphs->add (g);
1910     }
1911   }
1912 
1913   template <typename set_t>
collect_coverageOT::CoverageFormat21914   bool collect_coverage (set_t *glyphs) const
1915   {
1916     unsigned int count = rangeRecord.len;
1917     for (unsigned int i = 0; i < count; i++)
1918       if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
1919 	return false;
1920     return true;
1921   }
1922 
1923   public:
1924   /* Older compilers need this to be public. */
1925   struct iter_t
1926   {
initOT::CoverageFormat2::iter_t1927     void init (const CoverageFormat2 &c_)
1928     {
1929       c = &c_;
1930       coverage = 0;
1931       i = 0;
1932       j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
1933       if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
1934       {
1935 	/* Broken table. Skip. */
1936 	i = c->rangeRecord.len;
1937       }
1938     }
finiOT::CoverageFormat2::iter_t1939     void fini () {}
moreOT::CoverageFormat2::iter_t1940     bool more () const { return i < c->rangeRecord.len; }
nextOT::CoverageFormat2::iter_t1941     void next ()
1942     {
1943       if (j >= c->rangeRecord[i].last)
1944       {
1945 	i++;
1946 	if (more ())
1947 	{
1948 	  unsigned int old = coverage;
1949 	  j = c->rangeRecord[i].first;
1950 	  coverage = c->rangeRecord[i].value;
1951 	  if (unlikely (coverage != old + 1))
1952 	  {
1953 	    /* Broken table. Skip. Important to avoid DoS.
1954 	     * Also, our callers depend on coverage being
1955 	     * consecutive and monotonically increasing,
1956 	     * ie. iota(). */
1957 	   i = c->rangeRecord.len;
1958 	   return;
1959 	  }
1960 	}
1961 	return;
1962       }
1963       coverage++;
1964       j++;
1965     }
get_glyphOT::CoverageFormat2::iter_t1966     hb_codepoint_t get_glyph () const { return j; }
operator !=OT::CoverageFormat2::iter_t1967     bool operator != (const iter_t& o) const
1968     { return i != o.i || j != o.j || c != o.c; }
1969 
1970     private:
1971     const struct CoverageFormat2 *c;
1972     unsigned int i, coverage;
1973     hb_codepoint_t j;
1974   };
1975   private:
1976 
1977   protected:
1978   HBUINT16	coverageFormat;	/* Format identifier--format = 2 */
1979   SortedArray16Of<RangeRecord>
1980 		rangeRecord;	/* Array of glyph ranges--ordered by
1981 				 * Start GlyphID. rangeCount entries
1982 				 * long */
1983   public:
1984   DEFINE_SIZE_ARRAY (4, rangeRecord);
1985 };
1986 
1987 struct Coverage
1988 {
1989   /* Has interface. */
1990   static constexpr unsigned SENTINEL = NOT_COVERED;
1991   typedef unsigned int value_t;
operator []OT::Coverage1992   value_t operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::Coverage1993   bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
1994   /* Predicate. */
operator ()OT::Coverage1995   bool operator () (hb_codepoint_t k) const { return has (k); }
1996 
getOT::Coverage1997   unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
get_coverageOT::Coverage1998   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1999   {
2000     switch (u.format) {
2001     case 1: return u.format1.get_coverage (glyph_id);
2002     case 2: return u.format2.get_coverage (glyph_id);
2003     default:return NOT_COVERED;
2004     }
2005   }
2006 
2007   template <typename Iterator,
2008       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::Coverage2009   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
2010   {
2011     TRACE_SERIALIZE (this);
2012     if (unlikely (!c->extend_min (*this))) return_trace (false);
2013 
2014     unsigned count = 0;
2015     unsigned num_ranges = 0;
2016     hb_codepoint_t last = (hb_codepoint_t) -2;
2017     for (auto g: glyphs)
2018     {
2019       if (last + 1 != g)
2020 	num_ranges++;
2021       last = g;
2022       count++;
2023     }
2024     u.format = count <= num_ranges * 3 ? 1 : 2;
2025 
2026     switch (u.format)
2027     {
2028     case 1: return_trace (u.format1.serialize (c, glyphs));
2029     case 2: return_trace (u.format2.serialize (c, glyphs));
2030     default:return_trace (false);
2031     }
2032   }
2033 
subsetOT::Coverage2034   bool subset (hb_subset_context_t *c) const
2035   {
2036     TRACE_SUBSET (this);
2037     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2038     const hb_map_t &glyph_map = *c->plan->glyph_map;
2039 
2040     auto it =
2041     + iter ()
2042     | hb_filter (glyphset)
2043     | hb_map_retains_sorting (glyph_map)
2044     ;
2045 
2046     bool ret = bool (it);
2047     Coverage_serialize (c->serializer, it);
2048     return_trace (ret);
2049   }
2050 
sanitizeOT::Coverage2051   bool sanitize (hb_sanitize_context_t *c) const
2052   {
2053     TRACE_SANITIZE (this);
2054     if (!u.format.sanitize (c)) return_trace (false);
2055     switch (u.format)
2056     {
2057     case 1: return_trace (u.format1.sanitize (c));
2058     case 2: return_trace (u.format2.sanitize (c));
2059     default:return_trace (true);
2060     }
2061   }
2062 
intersectsOT::Coverage2063   bool intersects (const hb_set_t *glyphs) const
2064   {
2065     switch (u.format)
2066     {
2067     case 1: return u.format1.intersects (glyphs);
2068     case 2: return u.format2.intersects (glyphs);
2069     default:return false;
2070     }
2071   }
intersects_coverageOT::Coverage2072   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
2073   {
2074     switch (u.format)
2075     {
2076     case 1: return u.format1.intersects_coverage (glyphs, index);
2077     case 2: return u.format2.intersects_coverage (glyphs, index);
2078     default:return false;
2079     }
2080   }
2081 
2082   /* Might return false if array looks unsorted.
2083    * Used for faster rejection of corrupt data. */
2084   template <typename set_t>
collect_coverageOT::Coverage2085   bool collect_coverage (set_t *glyphs) const
2086   {
2087     switch (u.format)
2088     {
2089     case 1: return u.format1.collect_coverage (glyphs);
2090     case 2: return u.format2.collect_coverage (glyphs);
2091     default:return false;
2092     }
2093   }
2094 
intersected_coverage_glyphsOT::Coverage2095   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
2096   {
2097     switch (u.format)
2098     {
2099     case 1: return u.format1.intersected_coverage_glyphs (glyphs, intersect_glyphs);
2100     case 2: return u.format2.intersected_coverage_glyphs (glyphs, intersect_glyphs);
2101     default:return ;
2102     }
2103   }
2104 
2105   struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
2106   {
2107     static constexpr bool is_sorted_iterator = true;
iter_tOT::Coverage::iter_t2108     iter_t (const Coverage &c_ = Null (Coverage))
2109     {
2110       memset (this, 0, sizeof (*this));
2111       format = c_.u.format;
2112       switch (format)
2113       {
2114       case 1: u.format1.init (c_.u.format1); return;
2115       case 2: u.format2.init (c_.u.format2); return;
2116       default:				     return;
2117       }
2118     }
__more__OT::Coverage::iter_t2119     bool __more__ () const
2120     {
2121       switch (format)
2122       {
2123       case 1: return u.format1.more ();
2124       case 2: return u.format2.more ();
2125       default:return false;
2126       }
2127     }
__next__OT::Coverage::iter_t2128     void __next__ ()
2129     {
2130       switch (format)
2131       {
2132       case 1: u.format1.next (); break;
2133       case 2: u.format2.next (); break;
2134       default:			 break;
2135       }
2136     }
2137     typedef hb_codepoint_t __item_t__;
__item__OT::Coverage::iter_t2138     __item_t__ __item__ () const { return get_glyph (); }
2139 
get_glyphOT::Coverage::iter_t2140     hb_codepoint_t get_glyph () const
2141     {
2142       switch (format)
2143       {
2144       case 1: return u.format1.get_glyph ();
2145       case 2: return u.format2.get_glyph ();
2146       default:return 0;
2147       }
2148     }
operator !=OT::Coverage::iter_t2149     bool operator != (const iter_t& o) const
2150     {
2151       if (format != o.format) return true;
2152       switch (format)
2153       {
2154       case 1: return u.format1 != o.u.format1;
2155       case 2: return u.format2 != o.u.format2;
2156       default:return false;
2157       }
2158     }
2159 
2160     private:
2161     unsigned int format;
2162     union {
2163     CoverageFormat2::iter_t	format2; /* Put this one first since it's larger; helps shut up compiler. */
2164     CoverageFormat1::iter_t	format1;
2165     } u;
2166   };
iterOT::Coverage2167   iter_t iter () const { return iter_t (*this); }
2168 
2169   protected:
2170   union {
2171   HBUINT16		format;		/* Format identifier */
2172   CoverageFormat1	format1;
2173   CoverageFormat2	format2;
2174   } u;
2175   public:
2176   DEFINE_SIZE_UNION (2, format);
2177 };
2178 
2179 template<typename Iterator>
2180 static inline void
Coverage_serialize(hb_serialize_context_t * c,Iterator it)2181 Coverage_serialize (hb_serialize_context_t *c,
2182 		    Iterator it)
2183 { c->start_embed<Coverage> ()->serialize (c, it); }
2184 
ClassDef_remap_and_serialize(hb_serialize_context_t * c,const hb_map_t & gid_klass_map,hb_sorted_vector_t<HBGlyphID> & glyphs,const hb_set_t & klasses,bool use_class_zero,hb_map_t * klass_map)2185 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
2186 					  const hb_map_t &gid_klass_map,
2187 					  hb_sorted_vector_t<HBGlyphID> &glyphs,
2188 					  const hb_set_t &klasses,
2189                                           bool use_class_zero,
2190 					  hb_map_t *klass_map /*INOUT*/)
2191 {
2192   if (!klass_map)
2193   {
2194     ClassDef_serialize (c, hb_zip (glyphs.iter (), + glyphs.iter ()
2195 						   | hb_map (gid_klass_map)));
2196     return;
2197   }
2198 
2199   /* any glyph not assigned a class value falls into Class zero (0),
2200    * if any glyph assigned to class 0, remapping must start with 0->0*/
2201   if (!use_class_zero)
2202     klass_map->set (0, 0);
2203 
2204   unsigned idx = klass_map->has (0) ? 1 : 0;
2205   for (const unsigned k: klasses.iter ())
2206   {
2207     if (klass_map->has (k)) continue;
2208     klass_map->set (k, idx);
2209     idx++;
2210   }
2211 
2212   auto it =
2213   + glyphs.iter ()
2214   | hb_map_retains_sorting ([&] (const HBGlyphID& gid) -> hb_pair_t<hb_codepoint_t, unsigned>
2215 			    {
2216 			      unsigned new_klass = klass_map->get (gid_klass_map[gid]);
2217 			      return hb_pair ((hb_codepoint_t)gid, new_klass);
2218 			    })
2219   ;
2220 
2221   c->propagate_error (glyphs, klasses);
2222   ClassDef_serialize (c, it);
2223 }
2224 
2225 /*
2226  * Class Definition Table
2227  */
2228 
2229 struct ClassDefFormat1
2230 {
2231   friend struct ClassDef;
2232 
2233   private:
get_classOT::ClassDefFormat12234   unsigned int get_class (hb_codepoint_t glyph_id) const
2235   {
2236     return classValue[(unsigned int) (glyph_id - startGlyph)];
2237   }
2238 
2239   template<typename Iterator,
2240 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDefFormat12241   bool serialize (hb_serialize_context_t *c,
2242 		  Iterator it)
2243   {
2244     TRACE_SERIALIZE (this);
2245     if (unlikely (!c->extend_min (*this))) return_trace (false);
2246 
2247     if (unlikely (!it))
2248     {
2249       classFormat = 1;
2250       startGlyph = 0;
2251       classValue.len = 0;
2252       return_trace (true);
2253     }
2254 
2255     hb_codepoint_t glyph_min = (*it).first;
2256     hb_codepoint_t glyph_max = + it
2257 			       | hb_map (hb_first)
2258 			       | hb_reduce (hb_max, 0u);
2259     unsigned glyph_count = glyph_max - glyph_min + 1;
2260 
2261     startGlyph = glyph_min;
2262     if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
2263     for (const hb_pair_t<hb_codepoint_t, unsigned> gid_klass_pair : + it)
2264     {
2265       unsigned idx = gid_klass_pair.first - glyph_min;
2266       classValue[idx] = gid_klass_pair.second;
2267     }
2268     return_trace (true);
2269   }
2270 
subsetOT::ClassDefFormat12271   bool subset (hb_subset_context_t *c,
2272 	       hb_map_t *klass_map = nullptr /*OUT*/,
2273                bool keep_empty_table = true,
2274                bool use_class_zero = true,
2275                const Coverage* glyph_filter = nullptr) const
2276   {
2277     TRACE_SUBSET (this);
2278     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2279     const hb_map_t &glyph_map = *c->plan->glyph_map;
2280 
2281     hb_sorted_vector_t<HBGlyphID> glyphs;
2282     hb_set_t orig_klasses;
2283     hb_map_t gid_org_klass_map;
2284 
2285     hb_codepoint_t start = startGlyph;
2286     hb_codepoint_t end   = start + classValue.len;
2287 
2288     for (const hb_codepoint_t gid : + hb_range (start, end)
2289                                     | hb_filter (glyphset))
2290     {
2291       if (glyph_filter && !glyph_filter->has(gid)) continue;
2292 
2293       unsigned klass = classValue[gid - start];
2294       if (!klass) continue;
2295 
2296       glyphs.push (glyph_map[gid]);
2297       gid_org_klass_map.set (glyph_map[gid], klass);
2298       orig_klasses.add (klass);
2299     }
2300 
2301     unsigned glyph_count = glyph_filter
2302                            ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
2303                            : glyphset.get_population ();
2304     use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
2305     ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
2306 				  glyphs, orig_klasses, use_class_zero, klass_map);
2307     return_trace (keep_empty_table || (bool) glyphs);
2308   }
2309 
sanitizeOT::ClassDefFormat12310   bool sanitize (hb_sanitize_context_t *c) const
2311   {
2312     TRACE_SANITIZE (this);
2313     return_trace (c->check_struct (this) && classValue.sanitize (c));
2314   }
2315 
2316   template <typename set_t>
collect_coverageOT::ClassDefFormat12317   bool collect_coverage (set_t *glyphs) const
2318   {
2319     unsigned int start = 0;
2320     unsigned int count = classValue.len;
2321     for (unsigned int i = 0; i < count; i++)
2322     {
2323       if (classValue[i])
2324 	continue;
2325 
2326       if (start != i)
2327 	if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
2328 	  return false;
2329 
2330       start = i + 1;
2331     }
2332     if (start != count)
2333       if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
2334 	return false;
2335 
2336     return true;
2337   }
2338 
2339   template <typename set_t>
collect_classOT::ClassDefFormat12340   bool collect_class (set_t *glyphs, unsigned klass) const
2341   {
2342     unsigned int count = classValue.len;
2343     for (unsigned int i = 0; i < count; i++)
2344       if (classValue[i] == klass) glyphs->add (startGlyph + i);
2345     return true;
2346   }
2347 
intersectsOT::ClassDefFormat12348   bool intersects (const hb_set_t *glyphs) const
2349   {
2350     /* TODO Speed up, using hb_set_next()? */
2351     hb_codepoint_t start = startGlyph;
2352     hb_codepoint_t end = startGlyph + classValue.len;
2353     for (hb_codepoint_t iter = startGlyph - 1;
2354 	 hb_set_next (glyphs, &iter) && iter < end;)
2355       if (classValue[iter - start]) return true;
2356     return false;
2357   }
intersects_classOT::ClassDefFormat12358   bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2359   {
2360     unsigned int count = classValue.len;
2361     if (klass == 0)
2362     {
2363       /* Match if there's any glyph that is not listed! */
2364       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2365       if (!hb_set_next (glyphs, &g)) return false;
2366       if (g < startGlyph) return true;
2367       g = startGlyph + count - 1;
2368       if (hb_set_next (glyphs, &g)) return true;
2369       /* Fall through. */
2370     }
2371     /* TODO Speed up, using set overlap first? */
2372     /* TODO(iter) Rewrite as dagger. */
2373     HBUINT16 k {klass};
2374     const HBUINT16 *arr = classValue.arrayZ;
2375     for (unsigned int i = 0; i < count; i++)
2376       if (arr[i] == k && glyphs->has (startGlyph + i))
2377 	return true;
2378     return false;
2379   }
2380 
intersected_class_glyphsOT::ClassDefFormat12381   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2382   {
2383     unsigned count = classValue.len;
2384     if (klass == 0)
2385     {
2386       hb_codepoint_t endGlyph = startGlyph + count -1;
2387       for (hb_codepoint_t g : glyphs->iter ())
2388         if (g < startGlyph || g > endGlyph)
2389           intersect_glyphs->add (g);
2390 
2391       return;
2392     }
2393 
2394     for (unsigned i = 0; i < count; i++)
2395       if (classValue[i] == klass && glyphs->has (startGlyph + i))
2396         intersect_glyphs->add (startGlyph + i);
2397   }
2398 
2399   protected:
2400   HBUINT16	classFormat;	/* Format identifier--format = 1 */
2401   HBGlyphID	startGlyph;	/* First GlyphID of the classValueArray */
2402   Array16Of<HBUINT16>
2403 		classValue;	/* Array of Class Values--one per GlyphID */
2404   public:
2405   DEFINE_SIZE_ARRAY (6, classValue);
2406 };
2407 
2408 struct ClassDefFormat2
2409 {
2410   friend struct ClassDef;
2411 
2412   private:
get_classOT::ClassDefFormat22413   unsigned int get_class (hb_codepoint_t glyph_id) const
2414   {
2415     return rangeRecord.bsearch (glyph_id).value;
2416   }
2417 
2418   template<typename Iterator,
2419 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDefFormat22420   bool serialize (hb_serialize_context_t *c,
2421 		  Iterator it)
2422   {
2423     TRACE_SERIALIZE (this);
2424     if (unlikely (!c->extend_min (*this))) return_trace (false);
2425 
2426     if (unlikely (!it))
2427     {
2428       classFormat = 2;
2429       rangeRecord.len = 0;
2430       return_trace (true);
2431     }
2432 
2433     unsigned num_ranges = 1;
2434     hb_codepoint_t prev_gid = (*it).first;
2435     unsigned prev_klass = (*it).second;
2436 
2437     RangeRecord range_rec;
2438     range_rec.first = prev_gid;
2439     range_rec.last = prev_gid;
2440     range_rec.value = prev_klass;
2441 
2442     RangeRecord *record = c->copy (range_rec);
2443     if (unlikely (!record)) return_trace (false);
2444 
2445     for (const auto gid_klass_pair : + (++it))
2446     {
2447       hb_codepoint_t cur_gid = gid_klass_pair.first;
2448       unsigned cur_klass = gid_klass_pair.second;
2449 
2450       if (cur_gid != prev_gid + 1 ||
2451 	  cur_klass != prev_klass)
2452       {
2453 	if (unlikely (!record)) break;
2454 	record->last = prev_gid;
2455 	num_ranges++;
2456 
2457 	range_rec.first = cur_gid;
2458 	range_rec.last = cur_gid;
2459 	range_rec.value = cur_klass;
2460 
2461 	record = c->copy (range_rec);
2462       }
2463 
2464       prev_klass = cur_klass;
2465       prev_gid = cur_gid;
2466     }
2467 
2468     if (likely (record)) record->last = prev_gid;
2469     rangeRecord.len = num_ranges;
2470     return_trace (true);
2471   }
2472 
subsetOT::ClassDefFormat22473   bool subset (hb_subset_context_t *c,
2474 	       hb_map_t *klass_map = nullptr /*OUT*/,
2475                bool keep_empty_table = true,
2476                bool use_class_zero = true,
2477                const Coverage* glyph_filter = nullptr) const
2478   {
2479     TRACE_SUBSET (this);
2480     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2481     const hb_map_t &glyph_map = *c->plan->glyph_map;
2482 
2483     hb_sorted_vector_t<HBGlyphID> glyphs;
2484     hb_set_t orig_klasses;
2485     hb_map_t gid_org_klass_map;
2486 
2487     unsigned count = rangeRecord.len;
2488     for (unsigned i = 0; i < count; i++)
2489     {
2490       unsigned klass = rangeRecord[i].value;
2491       if (!klass) continue;
2492       hb_codepoint_t start = rangeRecord[i].first;
2493       hb_codepoint_t end   = rangeRecord[i].last + 1;
2494       for (hb_codepoint_t g = start; g < end; g++)
2495       {
2496 	if (!glyphset.has (g)) continue;
2497         if (glyph_filter && !glyph_filter->has (g)) continue;
2498 	glyphs.push (glyph_map[g]);
2499 	gid_org_klass_map.set (glyph_map[g], klass);
2500 	orig_klasses.add (klass);
2501       }
2502     }
2503 
2504     unsigned glyph_count = glyph_filter
2505                            ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
2506                            : glyphset.get_population ();
2507     use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
2508     ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
2509 				  glyphs, orig_klasses, use_class_zero, klass_map);
2510     return_trace (keep_empty_table || (bool) glyphs);
2511   }
2512 
sanitizeOT::ClassDefFormat22513   bool sanitize (hb_sanitize_context_t *c) const
2514   {
2515     TRACE_SANITIZE (this);
2516     return_trace (rangeRecord.sanitize (c));
2517   }
2518 
2519   template <typename set_t>
collect_coverageOT::ClassDefFormat22520   bool collect_coverage (set_t *glyphs) const
2521   {
2522     unsigned int count = rangeRecord.len;
2523     for (unsigned int i = 0; i < count; i++)
2524       if (rangeRecord[i].value)
2525 	if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2526 	  return false;
2527     return true;
2528   }
2529 
2530   template <typename set_t>
collect_classOT::ClassDefFormat22531   bool collect_class (set_t *glyphs, unsigned int klass) const
2532   {
2533     unsigned int count = rangeRecord.len;
2534     for (unsigned int i = 0; i < count; i++)
2535     {
2536       if (rangeRecord[i].value == klass)
2537 	if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2538 	  return false;
2539     }
2540     return true;
2541   }
2542 
intersectsOT::ClassDefFormat22543   bool intersects (const hb_set_t *glyphs) const
2544   {
2545     /* TODO Speed up, using hb_set_next() and bsearch()? */
2546     unsigned int count = rangeRecord.len;
2547     for (unsigned int i = 0; i < count; i++)
2548     {
2549       const auto& range = rangeRecord[i];
2550       if (range.intersects (glyphs) && range.value)
2551 	return true;
2552     }
2553     return false;
2554   }
intersects_classOT::ClassDefFormat22555   bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2556   {
2557     unsigned int count = rangeRecord.len;
2558     if (klass == 0)
2559     {
2560       /* Match if there's any glyph that is not listed! */
2561       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2562       for (unsigned int i = 0; i < count; i++)
2563       {
2564 	if (!hb_set_next (glyphs, &g))
2565 	  break;
2566 	if (g < rangeRecord[i].first)
2567 	  return true;
2568 	g = rangeRecord[i].last;
2569       }
2570       if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2571 	return true;
2572       /* Fall through. */
2573     }
2574     /* TODO Speed up, using set overlap first? */
2575     /* TODO(iter) Rewrite as dagger. */
2576     HBUINT16 k {klass};
2577     const RangeRecord *arr = rangeRecord.arrayZ;
2578     for (unsigned int i = 0; i < count; i++)
2579       if (arr[i].value == k && arr[i].intersects (glyphs))
2580 	return true;
2581     return false;
2582   }
2583 
intersected_class_glyphsOT::ClassDefFormat22584   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2585   {
2586     unsigned count = rangeRecord.len;
2587     if (klass == 0)
2588     {
2589       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2590       for (unsigned int i = 0; i < count; i++)
2591       {
2592         if (!hb_set_next (glyphs, &g))
2593           break;
2594         while (g != HB_SET_VALUE_INVALID && g < rangeRecord[i].first)
2595         {
2596           intersect_glyphs->add (g);
2597           hb_set_next (glyphs, &g);
2598         }
2599         g = rangeRecord[i].last;
2600       }
2601       while (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2602         intersect_glyphs->add (g);
2603 
2604       return;
2605     }
2606 
2607     hb_codepoint_t g = HB_SET_VALUE_INVALID;
2608     for (unsigned int i = 0; i < count; i++)
2609     {
2610       if (rangeRecord[i].value != klass) continue;
2611 
2612       if (g != HB_SET_VALUE_INVALID)
2613       {
2614         if (g >= rangeRecord[i].first &&
2615             g <= rangeRecord[i].last)
2616           intersect_glyphs->add (g);
2617         if (g > rangeRecord[i].last)
2618           continue;
2619       }
2620 
2621       g = rangeRecord[i].first - 1;
2622       while (hb_set_next (glyphs, &g))
2623       {
2624         if (g >= rangeRecord[i].first && g <= rangeRecord[i].last)
2625           intersect_glyphs->add (g);
2626         else if (g > rangeRecord[i].last)
2627           break;
2628       }
2629     }
2630   }
2631 
2632   protected:
2633   HBUINT16	classFormat;	/* Format identifier--format = 2 */
2634   SortedArray16Of<RangeRecord>
2635 		rangeRecord;	/* Array of glyph ranges--ordered by
2636 				 * Start GlyphID */
2637   public:
2638   DEFINE_SIZE_ARRAY (4, rangeRecord);
2639 };
2640 
2641 struct ClassDef
2642 {
2643   /* Has interface. */
2644   static constexpr unsigned SENTINEL = 0;
2645   typedef unsigned int value_t;
operator []OT::ClassDef2646   value_t operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::ClassDef2647   bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
2648   /* Projection. */
operator ()OT::ClassDef2649   hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
2650 
getOT::ClassDef2651   unsigned int get (hb_codepoint_t k) const { return get_class (k); }
get_classOT::ClassDef2652   unsigned int get_class (hb_codepoint_t glyph_id) const
2653   {
2654     switch (u.format) {
2655     case 1: return u.format1.get_class (glyph_id);
2656     case 2: return u.format2.get_class (glyph_id);
2657     default:return 0;
2658     }
2659   }
2660 
2661   template<typename Iterator,
2662 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDef2663   bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
2664   {
2665     TRACE_SERIALIZE (this);
2666     if (unlikely (!c->extend_min (*this))) return_trace (false);
2667 
2668     auto it = + it_with_class_zero | hb_filter (hb_second);
2669 
2670     unsigned format = 2;
2671     if (likely (it))
2672     {
2673       hb_codepoint_t glyph_min = (*it).first;
2674       hb_codepoint_t glyph_max = glyph_min;
2675 
2676       unsigned num_glyphs = 0;
2677       unsigned num_ranges = 1;
2678       hb_codepoint_t prev_gid = glyph_min;
2679       unsigned prev_klass = (*it).second;
2680 
2681       for (const auto gid_klass_pair : it)
2682       {
2683 	hb_codepoint_t cur_gid = gid_klass_pair.first;
2684 	unsigned cur_klass = gid_klass_pair.second;
2685         num_glyphs++;
2686 	if (cur_gid == glyph_min) continue;
2687         if (cur_gid > glyph_max) glyph_max = cur_gid;
2688 	if (cur_gid != prev_gid + 1 ||
2689 	    cur_klass != prev_klass)
2690 	  num_ranges++;
2691 
2692 	prev_gid = cur_gid;
2693 	prev_klass = cur_klass;
2694       }
2695 
2696       if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
2697 	format = 1;
2698     }
2699     u.format = format;
2700 
2701     switch (u.format)
2702     {
2703     case 1: return_trace (u.format1.serialize (c, it));
2704     case 2: return_trace (u.format2.serialize (c, it));
2705     default:return_trace (false);
2706     }
2707   }
2708 
subsetOT::ClassDef2709   bool subset (hb_subset_context_t *c,
2710 	       hb_map_t *klass_map = nullptr /*OUT*/,
2711                bool keep_empty_table = true,
2712                bool use_class_zero = true,
2713                const Coverage* glyph_filter = nullptr) const
2714   {
2715     TRACE_SUBSET (this);
2716     switch (u.format) {
2717     case 1: return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2718     case 2: return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2719     default:return_trace (false);
2720     }
2721   }
2722 
sanitizeOT::ClassDef2723   bool sanitize (hb_sanitize_context_t *c) const
2724   {
2725     TRACE_SANITIZE (this);
2726     if (!u.format.sanitize (c)) return_trace (false);
2727     switch (u.format) {
2728     case 1: return_trace (u.format1.sanitize (c));
2729     case 2: return_trace (u.format2.sanitize (c));
2730     default:return_trace (true);
2731     }
2732   }
2733 
2734   /* Might return false if array looks unsorted.
2735    * Used for faster rejection of corrupt data. */
2736   template <typename set_t>
collect_coverageOT::ClassDef2737   bool collect_coverage (set_t *glyphs) const
2738   {
2739     switch (u.format) {
2740     case 1: return u.format1.collect_coverage (glyphs);
2741     case 2: return u.format2.collect_coverage (glyphs);
2742     default:return false;
2743     }
2744   }
2745 
2746   /* Might return false if array looks unsorted.
2747    * Used for faster rejection of corrupt data. */
2748   template <typename set_t>
collect_classOT::ClassDef2749   bool collect_class (set_t *glyphs, unsigned int klass) const
2750   {
2751     switch (u.format) {
2752     case 1: return u.format1.collect_class (glyphs, klass);
2753     case 2: return u.format2.collect_class (glyphs, klass);
2754     default:return false;
2755     }
2756   }
2757 
intersectsOT::ClassDef2758   bool intersects (const hb_set_t *glyphs) const
2759   {
2760     switch (u.format) {
2761     case 1: return u.format1.intersects (glyphs);
2762     case 2: return u.format2.intersects (glyphs);
2763     default:return false;
2764     }
2765   }
intersects_classOT::ClassDef2766   bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
2767   {
2768     switch (u.format) {
2769     case 1: return u.format1.intersects_class (glyphs, klass);
2770     case 2: return u.format2.intersects_class (glyphs, klass);
2771     default:return false;
2772     }
2773   }
2774 
intersected_class_glyphsOT::ClassDef2775   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2776   {
2777     switch (u.format) {
2778     case 1: return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2779     case 2: return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2780     default:return;
2781     }
2782   }
2783 
2784   protected:
2785   union {
2786   HBUINT16		format;		/* Format identifier */
2787   ClassDefFormat1	format1;
2788   ClassDefFormat2	format2;
2789   } u;
2790   public:
2791   DEFINE_SIZE_UNION (2, format);
2792 };
2793 
2794 template<typename Iterator>
ClassDef_serialize(hb_serialize_context_t * c,Iterator it)2795 static inline void ClassDef_serialize (hb_serialize_context_t *c,
2796 				       Iterator it)
2797 { c->start_embed<ClassDef> ()->serialize (c, it); }
2798 
2799 
2800 /*
2801  * Item Variation Store
2802  */
2803 
2804 struct VarRegionAxis
2805 {
evaluateOT::VarRegionAxis2806   float evaluate (int coord) const
2807   {
2808     int start = startCoord, peak = peakCoord, end = endCoord;
2809 
2810     /* TODO Move these to sanitize(). */
2811     if (unlikely (start > peak || peak > end))
2812       return 1.;
2813     if (unlikely (start < 0 && end > 0 && peak != 0))
2814       return 1.;
2815 
2816     if (peak == 0 || coord == peak)
2817       return 1.;
2818 
2819     if (coord <= start || end <= coord)
2820       return 0.;
2821 
2822     /* Interpolate */
2823     if (coord < peak)
2824       return float (coord - start) / (peak - start);
2825     else
2826       return float (end - coord) / (end - peak);
2827   }
2828 
sanitizeOT::VarRegionAxis2829   bool sanitize (hb_sanitize_context_t *c) const
2830   {
2831     TRACE_SANITIZE (this);
2832     return_trace (c->check_struct (this));
2833     /* TODO Handle invalid start/peak/end configs, so we don't
2834      * have to do that at runtime. */
2835   }
2836 
2837   public:
2838   F2DOT14	startCoord;
2839   F2DOT14	peakCoord;
2840   F2DOT14	endCoord;
2841   public:
2842   DEFINE_SIZE_STATIC (6);
2843 };
2844 
2845 struct VarRegionList
2846 {
evaluateOT::VarRegionList2847   float evaluate (unsigned int region_index,
2848 		  const int *coords, unsigned int coord_len) const
2849   {
2850     if (unlikely (region_index >= regionCount))
2851       return 0.;
2852 
2853     const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
2854 
2855     float v = 1.;
2856     unsigned int count = axisCount;
2857     for (unsigned int i = 0; i < count; i++)
2858     {
2859       int coord = i < coord_len ? coords[i] : 0;
2860       float factor = axes[i].evaluate (coord);
2861       if (factor == 0.f)
2862 	return 0.;
2863       v *= factor;
2864     }
2865     return v;
2866   }
2867 
sanitizeOT::VarRegionList2868   bool sanitize (hb_sanitize_context_t *c) const
2869   {
2870     TRACE_SANITIZE (this);
2871     return_trace (c->check_struct (this) &&
2872 		  axesZ.sanitize (c, (unsigned int) axisCount * (unsigned int) regionCount));
2873   }
2874 
serializeOT::VarRegionList2875   bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t &region_map)
2876   {
2877     TRACE_SERIALIZE (this);
2878     VarRegionList *out = c->allocate_min<VarRegionList> ();
2879     if (unlikely (!out)) return_trace (false);
2880     axisCount = src->axisCount;
2881     regionCount = region_map.get_population ();
2882     if (unlikely (!c->allocate_size<VarRegionList> (get_size () - min_size))) return_trace (false);
2883     unsigned int region_count = src->get_region_count ();
2884     for (unsigned int r = 0; r < regionCount; r++)
2885     {
2886       unsigned int backward = region_map.backward (r);
2887       if (backward >= region_count) return_trace (false);
2888       memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
2889     }
2890 
2891     return_trace (true);
2892   }
2893 
get_sizeOT::VarRegionList2894   unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
get_region_countOT::VarRegionList2895   unsigned int get_region_count () const { return regionCount; }
2896 
2897   protected:
2898   HBUINT16	axisCount;
2899   HBUINT16	regionCount;
2900   UnsizedArrayOf<VarRegionAxis>
2901 		axesZ;
2902   public:
2903   DEFINE_SIZE_ARRAY (4, axesZ);
2904 };
2905 
2906 struct VarData
2907 {
get_region_index_countOT::VarData2908   unsigned int get_region_index_count () const
2909   { return regionIndices.len; }
2910 
get_row_sizeOT::VarData2911   unsigned int get_row_size () const
2912   { return shortCount + regionIndices.len; }
2913 
get_sizeOT::VarData2914   unsigned int get_size () const
2915   { return itemCount * get_row_size (); }
2916 
get_deltaOT::VarData2917   float get_delta (unsigned int inner,
2918 		   const int *coords, unsigned int coord_count,
2919 		   const VarRegionList &regions) const
2920   {
2921     if (unlikely (inner >= itemCount))
2922       return 0.;
2923 
2924    unsigned int count = regionIndices.len;
2925    unsigned int scount = shortCount;
2926 
2927    const HBUINT8 *bytes = get_delta_bytes ();
2928    const HBUINT8 *row = bytes + inner * (scount + count);
2929 
2930    float delta = 0.;
2931    unsigned int i = 0;
2932 
2933    const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
2934    for (; i < scount; i++)
2935    {
2936      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2937      delta += scalar * *scursor++;
2938    }
2939    const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
2940    for (; i < count; i++)
2941    {
2942      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2943      delta += scalar * *bcursor++;
2944    }
2945 
2946    return delta;
2947   }
2948 
get_scalarsOT::VarData2949   void get_scalars (const int *coords, unsigned int coord_count,
2950 		    const VarRegionList &regions,
2951 		    float *scalars /*OUT */,
2952 		    unsigned int num_scalars) const
2953   {
2954     unsigned count = hb_min (num_scalars, regionIndices.len);
2955     for (unsigned int i = 0; i < count; i++)
2956       scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2957     for (unsigned int i = count; i < num_scalars; i++)
2958       scalars[i] = 0.f;
2959   }
2960 
sanitizeOT::VarData2961   bool sanitize (hb_sanitize_context_t *c) const
2962   {
2963     TRACE_SANITIZE (this);
2964     return_trace (c->check_struct (this) &&
2965 		  regionIndices.sanitize (c) &&
2966 		  shortCount <= regionIndices.len &&
2967 		  c->check_range (get_delta_bytes (),
2968 				  itemCount,
2969 				  get_row_size ()));
2970   }
2971 
serializeOT::VarData2972   bool serialize (hb_serialize_context_t *c,
2973 		  const VarData *src,
2974 		  const hb_inc_bimap_t &inner_map,
2975 		  const hb_bimap_t &region_map)
2976   {
2977     TRACE_SERIALIZE (this);
2978     if (unlikely (!c->extend_min (*this))) return_trace (false);
2979     itemCount = inner_map.get_next_value ();
2980 
2981     /* Optimize short count */
2982     unsigned short ri_count = src->regionIndices.len;
2983     enum delta_size_t { kZero=0, kByte, kShort };
2984     hb_vector_t<delta_size_t> delta_sz;
2985     hb_vector_t<unsigned int> ri_map;	/* maps old index to new index */
2986     delta_sz.resize (ri_count);
2987     ri_map.resize (ri_count);
2988     unsigned int new_short_count = 0;
2989     unsigned int r;
2990     for (r = 0; r < ri_count; r++)
2991     {
2992       delta_sz[r] = kZero;
2993       for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2994       {
2995 	unsigned int old = inner_map.backward (i);
2996 	int16_t delta = src->get_item_delta (old, r);
2997 	if (delta < -128 || 127 < delta)
2998 	{
2999 	  delta_sz[r] = kShort;
3000 	  new_short_count++;
3001 	  break;
3002 	}
3003 	else if (delta != 0)
3004 	  delta_sz[r] = kByte;
3005       }
3006     }
3007     unsigned int short_index = 0;
3008     unsigned int byte_index = new_short_count;
3009     unsigned int new_ri_count = 0;
3010     for (r = 0; r < ri_count; r++)
3011       if (delta_sz[r])
3012       {
3013 	ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
3014 	new_ri_count++;
3015       }
3016 
3017     shortCount = new_short_count;
3018     regionIndices.len = new_ri_count;
3019 
3020     unsigned int size = regionIndices.get_size () - HBUINT16::static_size/*regionIndices.len*/ + (get_row_size () * itemCount);
3021     if (unlikely (!c->allocate_size<HBUINT8> (size)))
3022       return_trace (false);
3023 
3024     for (r = 0; r < ri_count; r++)
3025       if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
3026 
3027     for (unsigned int i = 0; i < itemCount; i++)
3028     {
3029       unsigned int	old = inner_map.backward (i);
3030       for (unsigned int r = 0; r < ri_count; r++)
3031 	if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
3032     }
3033 
3034     return_trace (true);
3035   }
3036 
collect_region_refsOT::VarData3037   void collect_region_refs (hb_inc_bimap_t &region_map, const hb_inc_bimap_t &inner_map) const
3038   {
3039     for (unsigned int r = 0; r < regionIndices.len; r++)
3040     {
3041       unsigned int region = regionIndices[r];
3042       if (region_map.has (region)) continue;
3043       for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
3044 	if (get_item_delta (inner_map.backward (i), r) != 0)
3045 	{
3046 	  region_map.add (region);
3047 	  break;
3048 	}
3049     }
3050   }
3051 
3052   protected:
get_delta_bytesOT::VarData3053   const HBUINT8 *get_delta_bytes () const
3054   { return &StructAfter<HBUINT8> (regionIndices); }
3055 
get_delta_bytesOT::VarData3056   HBUINT8 *get_delta_bytes ()
3057   { return &StructAfter<HBUINT8> (regionIndices); }
3058 
get_item_deltaOT::VarData3059   int16_t get_item_delta (unsigned int item, unsigned int region) const
3060   {
3061     if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
3062     const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
3063     if (region < shortCount)
3064       return ((const HBINT16 *)p)[region];
3065     else
3066       return (p + HBINT16::static_size * shortCount)[region - shortCount];
3067   }
3068 
set_item_deltaOT::VarData3069   void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
3070   {
3071     HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
3072     if (region < shortCount)
3073       ((HBINT16 *)p)[region] = delta;
3074     else
3075       (p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
3076   }
3077 
3078   protected:
3079   HBUINT16		itemCount;
3080   HBUINT16		shortCount;
3081   Array16Of<HBUINT16>	regionIndices;
3082 /*UnsizedArrayOf<HBUINT8>bytesX;*/
3083   public:
3084   DEFINE_SIZE_ARRAY (6, regionIndices);
3085 };
3086 
3087 struct VariationStore
3088 {
3089   private:
get_deltaOT::VariationStore3090   float get_delta (unsigned int outer, unsigned int inner,
3091 		   const int *coords, unsigned int coord_count) const
3092   {
3093 #ifdef HB_NO_VAR
3094     return 0.f;
3095 #endif
3096 
3097     if (unlikely (outer >= dataSets.len))
3098       return 0.f;
3099 
3100     return (this+dataSets[outer]).get_delta (inner,
3101 					     coords, coord_count,
3102 					     this+regions);
3103   }
3104 
3105   public:
get_deltaOT::VariationStore3106   float get_delta (unsigned int index,
3107 		   const int *coords, unsigned int coord_count) const
3108   {
3109     unsigned int outer = index >> 16;
3110     unsigned int inner = index & 0xFFFF;
3111     return get_delta (outer, inner, coords, coord_count);
3112   }
3113 
sanitizeOT::VariationStore3114   bool sanitize (hb_sanitize_context_t *c) const
3115   {
3116 #ifdef HB_NO_VAR
3117     return true;
3118 #endif
3119 
3120     TRACE_SANITIZE (this);
3121     return_trace (c->check_struct (this) &&
3122 		  format == 1 &&
3123 		  regions.sanitize (c, this) &&
3124 		  dataSets.sanitize (c, this));
3125   }
3126 
serializeOT::VariationStore3127   bool serialize (hb_serialize_context_t *c,
3128 		  const VariationStore *src,
3129 		  const hb_array_t <hb_inc_bimap_t> &inner_maps)
3130   {
3131     TRACE_SERIALIZE (this);
3132     unsigned int set_count = 0;
3133     for (unsigned int i = 0; i < inner_maps.length; i++)
3134       if (inner_maps[i].get_population () > 0) set_count++;
3135 
3136     unsigned int size = min_size + HBUINT32::static_size * set_count;
3137     if (unlikely (!c->allocate_size<HBUINT32> (size))) return_trace (false);
3138     format = 1;
3139 
3140     hb_inc_bimap_t region_map;
3141     for (unsigned int i = 0; i < inner_maps.length; i++)
3142       (src+src->dataSets[i]).collect_region_refs (region_map, inner_maps[i]);
3143     region_map.sort ();
3144 
3145     if (unlikely (!regions.serialize (c, this)
3146 		  .serialize (c, &(src+src->regions), region_map))) return_trace (false);
3147 
3148     /* TODO: The following code could be simplified when
3149      * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize ()
3150      */
3151     dataSets.len = set_count;
3152     unsigned int set_index = 0;
3153     for (unsigned int i = 0; i < inner_maps.length; i++)
3154     {
3155       if (inner_maps[i].get_population () == 0) continue;
3156       if (unlikely (!dataSets[set_index++].serialize (c, this)
3157 		      .serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
3158 	return_trace (false);
3159     }
3160 
3161     return_trace (true);
3162   }
3163 
subsetOT::VariationStore3164   bool subset (hb_subset_context_t *c) const
3165   {
3166     TRACE_SUBSET (this);
3167 
3168     VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
3169     if (unlikely (!varstore_prime)) return_trace (false);
3170 
3171     const hb_set_t *variation_indices = c->plan->layout_variation_indices;
3172     if (variation_indices->is_empty ()) return_trace (false);
3173 
3174     hb_vector_t<hb_inc_bimap_t> inner_maps;
3175     inner_maps.resize ((unsigned) dataSets.len);
3176     for (unsigned i = 0; i < inner_maps.length; i++)
3177       inner_maps[i].init ();
3178 
3179     for (unsigned idx : c->plan->layout_variation_indices->iter ())
3180     {
3181       uint16_t major = idx >> 16;
3182       uint16_t minor = idx & 0xFFFF;
3183 
3184       if (major >= inner_maps.length)
3185       {
3186 	for (unsigned i = 0; i < inner_maps.length; i++)
3187 	  inner_maps[i].fini ();
3188 	return_trace (false);
3189       }
3190       inner_maps[major].add (minor);
3191     }
3192     varstore_prime->serialize (c->serializer, this, inner_maps.as_array ());
3193 
3194     for (unsigned i = 0; i < inner_maps.length; i++)
3195       inner_maps[i].fini ();
3196 
3197     return_trace (
3198         !c->serializer->in_error()
3199         && varstore_prime->dataSets);
3200   }
3201 
get_region_index_countOT::VariationStore3202   unsigned int get_region_index_count (unsigned int ivs) const
3203   { return (this+dataSets[ivs]).get_region_index_count (); }
3204 
get_scalarsOT::VariationStore3205   void get_scalars (unsigned int ivs,
3206 		    const int *coords, unsigned int coord_count,
3207 		    float *scalars /*OUT*/,
3208 		    unsigned int num_scalars) const
3209   {
3210 #ifdef HB_NO_VAR
3211     for (unsigned i = 0; i < num_scalars; i++)
3212       scalars[i] = 0.f;
3213     return;
3214 #endif
3215 
3216     (this+dataSets[ivs]).get_scalars (coords, coord_count, this+regions,
3217 				      &scalars[0], num_scalars);
3218   }
3219 
get_sub_table_countOT::VariationStore3220   unsigned int get_sub_table_count () const { return dataSets.len; }
3221 
3222   protected:
3223   HBUINT16				format;
3224   Offset32To<VarRegionList>		regions;
3225   Array16OfOffset32To<VarData>		dataSets;
3226   public:
3227   DEFINE_SIZE_ARRAY (8, dataSets);
3228 };
3229 
3230 /*
3231  * Feature Variations
3232  */
3233 
3234 struct ConditionFormat1
3235 {
3236   friend struct Condition;
3237 
subsetOT::ConditionFormat13238   bool subset (hb_subset_context_t *c) const
3239   {
3240     TRACE_SUBSET (this);
3241     auto *out = c->serializer->embed (this);
3242     if (unlikely (!out)) return_trace (false);
3243     return_trace (true);
3244   }
3245 
3246   private:
evaluateOT::ConditionFormat13247   bool evaluate (const int *coords, unsigned int coord_len) const
3248   {
3249     int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
3250     return filterRangeMinValue <= coord && coord <= filterRangeMaxValue;
3251   }
3252 
sanitizeOT::ConditionFormat13253   bool sanitize (hb_sanitize_context_t *c) const
3254   {
3255     TRACE_SANITIZE (this);
3256     return_trace (c->check_struct (this));
3257   }
3258 
3259   protected:
3260   HBUINT16	format;		/* Format identifier--format = 1 */
3261   HBUINT16	axisIndex;
3262   F2DOT14	filterRangeMinValue;
3263   F2DOT14	filterRangeMaxValue;
3264   public:
3265   DEFINE_SIZE_STATIC (8);
3266 };
3267 
3268 struct Condition
3269 {
evaluateOT::Condition3270   bool evaluate (const int *coords, unsigned int coord_len) const
3271   {
3272     switch (u.format) {
3273     case 1: return u.format1.evaluate (coords, coord_len);
3274     default:return false;
3275     }
3276   }
3277 
3278   template <typename context_t, typename ...Ts>
dispatchOT::Condition3279   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3280   {
3281     TRACE_DISPATCH (this, u.format);
3282     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3283     switch (u.format) {
3284     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
3285     default:return_trace (c->default_return_value ());
3286     }
3287   }
3288 
sanitizeOT::Condition3289   bool sanitize (hb_sanitize_context_t *c) const
3290   {
3291     TRACE_SANITIZE (this);
3292     if (!u.format.sanitize (c)) return_trace (false);
3293     switch (u.format) {
3294     case 1: return_trace (u.format1.sanitize (c));
3295     default:return_trace (true);
3296     }
3297   }
3298 
3299   protected:
3300   union {
3301   HBUINT16		format;		/* Format identifier */
3302   ConditionFormat1	format1;
3303   } u;
3304   public:
3305   DEFINE_SIZE_UNION (2, format);
3306 };
3307 
3308 struct ConditionSet
3309 {
evaluateOT::ConditionSet3310   bool evaluate (const int *coords, unsigned int coord_len) const
3311   {
3312     unsigned int count = conditions.len;
3313     for (unsigned int i = 0; i < count; i++)
3314       if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
3315 	return false;
3316     return true;
3317   }
3318 
subsetOT::ConditionSet3319   bool subset (hb_subset_context_t *c) const
3320   {
3321     TRACE_SUBSET (this);
3322     auto *out = c->serializer->start_embed (this);
3323     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3324 
3325     + conditions.iter ()
3326     | hb_apply (subset_offset_array (c, out->conditions, this))
3327     ;
3328 
3329     return_trace (bool (out->conditions));
3330   }
3331 
sanitizeOT::ConditionSet3332   bool sanitize (hb_sanitize_context_t *c) const
3333   {
3334     TRACE_SANITIZE (this);
3335     return_trace (conditions.sanitize (c, this));
3336   }
3337 
3338   protected:
3339   Array16OfOffset32To<Condition>	conditions;
3340   public:
3341   DEFINE_SIZE_ARRAY (2, conditions);
3342 };
3343 
3344 struct FeatureTableSubstitutionRecord
3345 {
3346   friend struct FeatureTableSubstitution;
3347 
collect_lookupsOT::FeatureTableSubstitutionRecord3348   void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
3349   {
3350     return (base+feature).add_lookup_indexes_to (lookup_indexes);
3351   }
3352 
closure_featuresOT::FeatureTableSubstitutionRecord3353   void closure_features (const void *base,
3354 			 const hb_map_t *lookup_indexes,
3355 			 hb_set_t       *feature_indexes /* OUT */) const
3356   {
3357     if ((base+feature).intersects_lookup_indexes (lookup_indexes))
3358       feature_indexes->add (featureIndex);
3359   }
3360 
subsetOT::FeatureTableSubstitutionRecord3361   bool subset (hb_subset_layout_context_t *c, const void *base) const
3362   {
3363     TRACE_SUBSET (this);
3364     if (!c->feature_index_map->has (featureIndex)) {
3365       // Feature that is being substituted is not being retained, so we don't
3366       // need this.
3367       return_trace (false);
3368     }
3369 
3370     auto *out = c->subset_context->serializer->embed (this);
3371     if (unlikely (!out)) return_trace (false);
3372 
3373     out->featureIndex = c->feature_index_map->get (featureIndex);
3374     bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c);
3375     return_trace (ret);
3376   }
3377 
sanitizeOT::FeatureTableSubstitutionRecord3378   bool sanitize (hb_sanitize_context_t *c, const void *base) const
3379   {
3380     TRACE_SANITIZE (this);
3381     return_trace (c->check_struct (this) && feature.sanitize (c, base));
3382   }
3383 
3384   protected:
3385   HBUINT16		featureIndex;
3386   Offset32To<Feature>	feature;
3387   public:
3388   DEFINE_SIZE_STATIC (6);
3389 };
3390 
3391 struct FeatureTableSubstitution
3392 {
find_substituteOT::FeatureTableSubstitution3393   const Feature *find_substitute (unsigned int feature_index) const
3394   {
3395     unsigned int count = substitutions.len;
3396     for (unsigned int i = 0; i < count; i++)
3397     {
3398       const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
3399       if (record.featureIndex == feature_index)
3400 	return &(this+record.feature);
3401     }
3402     return nullptr;
3403   }
3404 
collect_lookupsOT::FeatureTableSubstitution3405   void collect_lookups (const hb_set_t *feature_indexes,
3406 			hb_set_t       *lookup_indexes /* OUT */) const
3407   {
3408     + hb_iter (substitutions)
3409     | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
3410     | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
3411 		{ r.collect_lookups (this, lookup_indexes); })
3412     ;
3413   }
3414 
closure_featuresOT::FeatureTableSubstitution3415   void closure_features (const hb_map_t *lookup_indexes,
3416 			 hb_set_t       *feature_indexes /* OUT */) const
3417   {
3418     for (const FeatureTableSubstitutionRecord& record : substitutions)
3419       record.closure_features (this, lookup_indexes, feature_indexes);
3420   }
3421 
intersects_featuresOT::FeatureTableSubstitution3422   bool intersects_features (const hb_map_t *feature_index_map) const
3423   {
3424     for (const FeatureTableSubstitutionRecord& record : substitutions)
3425     {
3426       if (feature_index_map->has (record.featureIndex)) return true;
3427     }
3428     return false;
3429   }
3430 
subsetOT::FeatureTableSubstitution3431   bool subset (hb_subset_context_t        *c,
3432 	       hb_subset_layout_context_t *l) const
3433   {
3434     TRACE_SUBSET (this);
3435     auto *out = c->serializer->start_embed (*this);
3436     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3437 
3438     out->version.major = version.major;
3439     out->version.minor = version.minor;
3440 
3441     + substitutions.iter ()
3442     | hb_apply (subset_record_array (l, &(out->substitutions), this))
3443     ;
3444 
3445     return_trace (bool (out->substitutions));
3446   }
3447 
sanitizeOT::FeatureTableSubstitution3448   bool sanitize (hb_sanitize_context_t *c) const
3449   {
3450     TRACE_SANITIZE (this);
3451     return_trace (version.sanitize (c) &&
3452 		  likely (version.major == 1) &&
3453 		  substitutions.sanitize (c, this));
3454   }
3455 
3456   protected:
3457   FixedVersion<>	version;	/* Version--0x00010000u */
3458   Array16Of<FeatureTableSubstitutionRecord>
3459 			substitutions;
3460   public:
3461   DEFINE_SIZE_ARRAY (6, substitutions);
3462 };
3463 
3464 struct FeatureVariationRecord
3465 {
3466   friend struct FeatureVariations;
3467 
collect_lookupsOT::FeatureVariationRecord3468   void collect_lookups (const void     *base,
3469 			const hb_set_t *feature_indexes,
3470 			hb_set_t       *lookup_indexes /* OUT */) const
3471   {
3472     return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
3473   }
3474 
closure_featuresOT::FeatureVariationRecord3475   void closure_features (const void     *base,
3476 			 const hb_map_t *lookup_indexes,
3477 			 hb_set_t       *feature_indexes /* OUT */) const
3478   {
3479     (base+substitutions).closure_features (lookup_indexes, feature_indexes);
3480   }
3481 
intersects_featuresOT::FeatureVariationRecord3482   bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
3483   {
3484     return (base+substitutions).intersects_features (feature_index_map);
3485   }
3486 
subsetOT::FeatureVariationRecord3487   bool subset (hb_subset_layout_context_t *c, const void *base) const
3488   {
3489     TRACE_SUBSET (this);
3490     auto *out = c->subset_context->serializer->embed (this);
3491     if (unlikely (!out)) return_trace (false);
3492 
3493     out->conditions.serialize_subset (c->subset_context, conditions, base);
3494     out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
3495 
3496     return_trace (true);
3497   }
3498 
sanitizeOT::FeatureVariationRecord3499   bool sanitize (hb_sanitize_context_t *c, const void *base) const
3500   {
3501     TRACE_SANITIZE (this);
3502     return_trace (conditions.sanitize (c, base) &&
3503 		  substitutions.sanitize (c, base));
3504   }
3505 
3506   protected:
3507   Offset32To<ConditionSet>
3508 			conditions;
3509   Offset32To<FeatureTableSubstitution>
3510 			substitutions;
3511   public:
3512   DEFINE_SIZE_STATIC (8);
3513 };
3514 
3515 struct FeatureVariations
3516 {
3517   static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
3518 
find_indexOT::FeatureVariations3519   bool find_index (const int *coords, unsigned int coord_len,
3520 		   unsigned int *index) const
3521   {
3522     unsigned int count = varRecords.len;
3523     for (unsigned int i = 0; i < count; i++)
3524     {
3525       const FeatureVariationRecord &record = varRecords.arrayZ[i];
3526       if ((this+record.conditions).evaluate (coords, coord_len))
3527       {
3528 	*index = i;
3529 	return true;
3530       }
3531     }
3532     *index = NOT_FOUND_INDEX;
3533     return false;
3534   }
3535 
find_substituteOT::FeatureVariations3536   const Feature *find_substitute (unsigned int variations_index,
3537 				  unsigned int feature_index) const
3538   {
3539     const FeatureVariationRecord &record = varRecords[variations_index];
3540     return (this+record.substitutions).find_substitute (feature_index);
3541   }
3542 
copyOT::FeatureVariations3543   FeatureVariations* copy (hb_serialize_context_t *c) const
3544   {
3545     TRACE_SERIALIZE (this);
3546     return_trace (c->embed (*this));
3547   }
3548 
collect_lookupsOT::FeatureVariations3549   void collect_lookups (const hb_set_t *feature_indexes,
3550 			hb_set_t       *lookup_indexes /* OUT */) const
3551   {
3552     for (const FeatureVariationRecord& r : varRecords)
3553       r.collect_lookups (this, feature_indexes, lookup_indexes);
3554   }
3555 
closure_featuresOT::FeatureVariations3556   void closure_features (const hb_map_t *lookup_indexes,
3557 			 hb_set_t       *feature_indexes /* OUT */) const
3558   {
3559     for (const FeatureVariationRecord& record : varRecords)
3560       record.closure_features (this, lookup_indexes, feature_indexes);
3561   }
3562 
subsetOT::FeatureVariations3563   bool subset (hb_subset_context_t *c,
3564 	       hb_subset_layout_context_t *l) const
3565   {
3566     TRACE_SUBSET (this);
3567     auto *out = c->serializer->start_embed (*this);
3568     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3569 
3570     out->version.major = version.major;
3571     out->version.minor = version.minor;
3572 
3573     int keep_up_to = -1;
3574     for (int i = varRecords.len - 1; i >= 0; i--) {
3575       if (varRecords[i].intersects_features (this, l->feature_index_map)) {
3576         keep_up_to = i;
3577         break;
3578       }
3579     }
3580 
3581     unsigned count = (unsigned) (keep_up_to + 1);
3582     for (unsigned i = 0; i < count; i++) {
3583       subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
3584     }
3585     return_trace (bool (out->varRecords));
3586   }
3587 
sanitizeOT::FeatureVariations3588   bool sanitize (hb_sanitize_context_t *c) const
3589   {
3590     TRACE_SANITIZE (this);
3591     return_trace (version.sanitize (c) &&
3592 		  likely (version.major == 1) &&
3593 		  varRecords.sanitize (c, this));
3594   }
3595 
3596   protected:
3597   FixedVersion<>	version;	/* Version--0x00010000u */
3598   Array32Of<FeatureVariationRecord>
3599 			varRecords;
3600   public:
3601   DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
3602 };
3603 
3604 
3605 /*
3606  * Device Tables
3607  */
3608 
3609 struct HintingDevice
3610 {
3611   friend struct Device;
3612 
3613   private:
3614 
get_x_deltaOT::HintingDevice3615   hb_position_t get_x_delta (hb_font_t *font) const
3616   { return get_delta (font->x_ppem, font->x_scale); }
3617 
get_y_deltaOT::HintingDevice3618   hb_position_t get_y_delta (hb_font_t *font) const
3619   { return get_delta (font->y_ppem, font->y_scale); }
3620 
3621   public:
3622 
get_sizeOT::HintingDevice3623   unsigned int get_size () const
3624   {
3625     unsigned int f = deltaFormat;
3626     if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
3627     return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
3628   }
3629 
sanitizeOT::HintingDevice3630   bool sanitize (hb_sanitize_context_t *c) const
3631   {
3632     TRACE_SANITIZE (this);
3633     return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
3634   }
3635 
copyOT::HintingDevice3636   HintingDevice* copy (hb_serialize_context_t *c) const
3637   {
3638     TRACE_SERIALIZE (this);
3639     return_trace (c->embed<HintingDevice> (this));
3640   }
3641 
3642   private:
3643 
get_deltaOT::HintingDevice3644   int get_delta (unsigned int ppem, int scale) const
3645   {
3646     if (!ppem) return 0;
3647 
3648     int pixels = get_delta_pixels (ppem);
3649 
3650     if (!pixels) return 0;
3651 
3652     return (int) (pixels * (int64_t) scale / ppem);
3653   }
get_delta_pixelsOT::HintingDevice3654   int get_delta_pixels (unsigned int ppem_size) const
3655   {
3656     unsigned int f = deltaFormat;
3657     if (unlikely (f < 1 || f > 3))
3658       return 0;
3659 
3660     if (ppem_size < startSize || ppem_size > endSize)
3661       return 0;
3662 
3663     unsigned int s = ppem_size - startSize;
3664 
3665     unsigned int byte = deltaValueZ[s >> (4 - f)];
3666     unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
3667     unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
3668 
3669     int delta = bits & mask;
3670 
3671     if ((unsigned int) delta >= ((mask + 1) >> 1))
3672       delta -= mask + 1;
3673 
3674     return delta;
3675   }
3676 
3677   protected:
3678   HBUINT16	startSize;		/* Smallest size to correct--in ppem */
3679   HBUINT16	endSize;		/* Largest size to correct--in ppem */
3680   HBUINT16	deltaFormat;		/* Format of DeltaValue array data: 1, 2, or 3
3681 					 * 1	Signed 2-bit value, 8 values per uint16
3682 					 * 2	Signed 4-bit value, 4 values per uint16
3683 					 * 3	Signed 8-bit value, 2 values per uint16
3684 					 */
3685   UnsizedArrayOf<HBUINT16>
3686 		deltaValueZ;		/* Array of compressed data */
3687   public:
3688   DEFINE_SIZE_ARRAY (6, deltaValueZ);
3689 };
3690 
3691 struct VariationDevice
3692 {
3693   friend struct Device;
3694 
3695   private:
3696 
get_x_deltaOT::VariationDevice3697   hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
3698   { return font->em_scalef_x (get_delta (font, store)); }
3699 
get_y_deltaOT::VariationDevice3700   hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
3701   { return font->em_scalef_y (get_delta (font, store)); }
3702 
copyOT::VariationDevice3703   VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
3704   {
3705     TRACE_SERIALIZE (this);
3706     auto snap = c->snapshot ();
3707     auto *out = c->embed (this);
3708     if (unlikely (!out)) return_trace (nullptr);
3709     if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out);
3710 
3711     /* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
3712     if (!layout_variation_idx_map->has (varIdx))
3713     {
3714       c->revert (snap);
3715       return_trace (nullptr);
3716     }
3717     unsigned new_idx = layout_variation_idx_map->get (varIdx);
3718     out->varIdx = new_idx;
3719     return_trace (out);
3720   }
3721 
record_variation_indexOT::VariationDevice3722   void record_variation_index (hb_set_t *layout_variation_indices) const
3723   {
3724     layout_variation_indices->add (varIdx);
3725   }
3726 
sanitizeOT::VariationDevice3727   bool sanitize (hb_sanitize_context_t *c) const
3728   {
3729     TRACE_SANITIZE (this);
3730     return_trace (c->check_struct (this));
3731   }
3732 
3733   private:
3734 
get_deltaOT::VariationDevice3735   float get_delta (hb_font_t *font, const VariationStore &store) const
3736   {
3737     return store.get_delta (varIdx, font->coords, font->num_coords);
3738   }
3739 
3740   protected:
3741   VarIdx	varIdx;
3742   HBUINT16	deltaFormat;	/* Format identifier for this table: 0x0x8000 */
3743   public:
3744   DEFINE_SIZE_STATIC (6);
3745 };
3746 
3747 struct DeviceHeader
3748 {
3749   protected:
3750   HBUINT16		reserved1;
3751   HBUINT16		reserved2;
3752   public:
3753   HBUINT16		format;		/* Format identifier */
3754   public:
3755   DEFINE_SIZE_STATIC (6);
3756 };
3757 
3758 struct Device
3759 {
get_x_deltaOT::Device3760   hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3761   {
3762     switch (u.b.format)
3763     {
3764 #ifndef HB_NO_HINTING
3765     case 1: case 2: case 3:
3766       return u.hinting.get_x_delta (font);
3767 #endif
3768 #ifndef HB_NO_VAR
3769     case 0x8000:
3770       return u.variation.get_x_delta (font, store);
3771 #endif
3772     default:
3773       return 0;
3774     }
3775   }
get_y_deltaOT::Device3776   hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3777   {
3778     switch (u.b.format)
3779     {
3780     case 1: case 2: case 3:
3781 #ifndef HB_NO_HINTING
3782       return u.hinting.get_y_delta (font);
3783 #endif
3784 #ifndef HB_NO_VAR
3785     case 0x8000:
3786       return u.variation.get_y_delta (font, store);
3787 #endif
3788     default:
3789       return 0;
3790     }
3791   }
3792 
sanitizeOT::Device3793   bool sanitize (hb_sanitize_context_t *c) const
3794   {
3795     TRACE_SANITIZE (this);
3796     if (!u.b.format.sanitize (c)) return_trace (false);
3797     switch (u.b.format) {
3798 #ifndef HB_NO_HINTING
3799     case 1: case 2: case 3:
3800       return_trace (u.hinting.sanitize (c));
3801 #endif
3802 #ifndef HB_NO_VAR
3803     case 0x8000:
3804       return_trace (u.variation.sanitize (c));
3805 #endif
3806     default:
3807       return_trace (true);
3808     }
3809   }
3810 
copyOT::Device3811   Device* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const
3812   {
3813     TRACE_SERIALIZE (this);
3814     switch (u.b.format) {
3815 #ifndef HB_NO_HINTING
3816     case 1:
3817     case 2:
3818     case 3:
3819       return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
3820 #endif
3821 #ifndef HB_NO_VAR
3822     case 0x8000:
3823       return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_map)));
3824 #endif
3825     default:
3826       return_trace (nullptr);
3827     }
3828   }
3829 
collect_variation_indicesOT::Device3830   void collect_variation_indices (hb_set_t *layout_variation_indices) const
3831   {
3832     switch (u.b.format) {
3833 #ifndef HB_NO_HINTING
3834     case 1:
3835     case 2:
3836     case 3:
3837       return;
3838 #endif
3839 #ifndef HB_NO_VAR
3840     case 0x8000:
3841       u.variation.record_variation_index (layout_variation_indices);
3842       return;
3843 #endif
3844     default:
3845       return;
3846     }
3847   }
3848 
3849   protected:
3850   union {
3851   DeviceHeader		b;
3852   HintingDevice		hinting;
3853 #ifndef HB_NO_VAR
3854   VariationDevice	variation;
3855 #endif
3856   } u;
3857   public:
3858   DEFINE_SIZE_UNION (6, b);
3859 };
3860 
3861 
3862 } /* namespace OT */
3863 
3864 
3865 #endif /* HB_OT_LAYOUT_COMMON_HH */
3866