• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2007,2008,2009  Red Hat, Inc.
3  * Copyright © 2010,2012  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
31 
32 #include "hb.hh"
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
35 #include "hb-set.hh"
36 #include "hb-bimap.hh"
37 
38 
39 #ifndef HB_MAX_NESTING_LEVEL
40 #define HB_MAX_NESTING_LEVEL	6
41 #endif
42 #ifndef HB_MAX_CONTEXT_LENGTH
43 #define HB_MAX_CONTEXT_LENGTH	64
44 #endif
45 #ifndef HB_CLOSURE_MAX_STAGES
46 /*
47  * The maximum number of times a lookup can be applied during shaping.
48  * Used to limit the number of iterations of the closure algorithm.
49  * This must be larger than the number of times add_pause() is
50  * called in a collect_features call of any shaper.
51  */
52 #define HB_CLOSURE_MAX_STAGES	32
53 #endif
54 
55 #ifndef HB_MAX_SCRIPTS
56 #define HB_MAX_SCRIPTS	500
57 #endif
58 
59 #ifndef HB_MAX_LANGSYS
60 #define HB_MAX_LANGSYS	2000
61 #endif
62 
63 #ifndef HB_MAX_FEATURES
64 #define HB_MAX_FEATURES 750
65 #endif
66 
67 #ifndef HB_MAX_FEATURE_INDICES
68 #define HB_MAX_FEATURE_INDICES	1500
69 #endif
70 
71 #ifndef HB_MAX_LOOKUP_INDICES
72 #define HB_MAX_LOOKUP_INDICES	20000
73 #endif
74 
75 
76 namespace OT {
77 
78 
79 #define NOT_COVERED		((unsigned int) -1)
80 
81 
82 template<typename Iterator>
83 static inline void Coverage_serialize (hb_serialize_context_t *c,
84 				       Iterator it);
85 
86 template<typename Iterator>
87 static inline void ClassDef_serialize (hb_serialize_context_t *c,
88 				       Iterator it);
89 
90 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
91 					  const hb_map_t &gid_klass_map,
92 					  hb_sorted_vector_t<HBGlyphID16> &glyphs,
93 					  const hb_set_t &klasses,
94 					  bool use_class_zero,
95 					  hb_map_t *klass_map /*INOUT*/);
96 
97 
98 struct hb_prune_langsys_context_t
99 {
hb_prune_langsys_context_tOT::hb_prune_langsys_context_t100   hb_prune_langsys_context_t (const void         *table_,
101                               hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
102                               const hb_map_t     *duplicate_feature_map_,
103                               hb_set_t           *new_collected_feature_indexes_)
104       :table (table_),
105       script_langsys_map (script_langsys_map_),
106       duplicate_feature_map (duplicate_feature_map_),
107       new_feature_indexes (new_collected_feature_indexes_),
108       script_count (0),langsys_count (0) {}
109 
visitedScriptOT::hb_prune_langsys_context_t110   bool visitedScript (const void *s)
111   {
112     if (script_count++ > HB_MAX_SCRIPTS)
113       return true;
114 
115     return visited (s, visited_script);
116   }
117 
visitedLangsysOT::hb_prune_langsys_context_t118   bool visitedLangsys (const void *l)
119   {
120     if (langsys_count++ > HB_MAX_LANGSYS)
121       return true;
122 
123     return visited (l, visited_langsys);
124   }
125 
126   private:
127   template <typename T>
visitedOT::hb_prune_langsys_context_t128   bool visited (const T *p, hb_set_t &visited_set)
129   {
130     hb_codepoint_t delta = (hb_codepoint_t) ((uintptr_t) p - (uintptr_t) table);
131      if (visited_set.has (delta))
132       return true;
133 
134     visited_set.add (delta);
135     return false;
136   }
137 
138   public:
139   const void *table;
140   hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map;
141   const hb_map_t     *duplicate_feature_map;
142   hb_set_t           *new_feature_indexes;
143 
144   private:
145   hb_set_t visited_script;
146   hb_set_t visited_langsys;
147   unsigned script_count;
148   unsigned langsys_count;
149 };
150 
151 struct hb_subset_layout_context_t :
152   hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
153 {
get_nameOT::hb_subset_layout_context_t154   const char *get_name () { return "SUBSET_LAYOUT"; }
default_return_valueOT::hb_subset_layout_context_t155   static return_t default_return_value () { return hb_empty_t (); }
156 
visitScriptOT::hb_subset_layout_context_t157   bool visitScript ()
158   {
159     return script_count++ < HB_MAX_SCRIPTS;
160   }
161 
visitLangSysOT::hb_subset_layout_context_t162   bool visitLangSys ()
163   {
164     return langsys_count++ < HB_MAX_LANGSYS;
165   }
166 
visitFeatureIndexOT::hb_subset_layout_context_t167   bool visitFeatureIndex (int count)
168   {
169     feature_index_count += count;
170     return feature_index_count < HB_MAX_FEATURE_INDICES;
171   }
172 
visitLookupIndexOT::hb_subset_layout_context_t173   bool visitLookupIndex()
174   {
175     lookup_index_count++;
176     return lookup_index_count < HB_MAX_LOOKUP_INDICES;
177   }
178 
179   hb_subset_context_t *subset_context;
180   const hb_tag_t table_tag;
181   const hb_map_t *lookup_index_map;
182   const hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map;
183   const hb_map_t *feature_index_map;
184   unsigned cur_script_index;
185 
hb_subset_layout_context_tOT::hb_subset_layout_context_t186   hb_subset_layout_context_t (hb_subset_context_t *c_,
187 			      hb_tag_t tag_,
188 			      hb_map_t *lookup_map_,
189 			      hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map_,
190 			      hb_map_t *feature_index_map_) :
191 				subset_context (c_),
192 				table_tag (tag_),
193 				lookup_index_map (lookup_map_),
194 				script_langsys_map (script_langsys_map_),
195 				feature_index_map (feature_index_map_),
196 				cur_script_index (0xFFFFu),
197 				script_count (0),
198 				langsys_count (0),
199 				feature_index_count (0),
200 				lookup_index_count (0)
201   {}
202 
203   private:
204   unsigned script_count;
205   unsigned langsys_count;
206   unsigned feature_index_count;
207   unsigned lookup_index_count;
208 };
209 
210 struct hb_collect_variation_indices_context_t :
211        hb_dispatch_context_t<hb_collect_variation_indices_context_t>
212 {
213   template <typename T>
dispatchOT::hb_collect_variation_indices_context_t214   return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_variation_indices_context_t215   static return_t default_return_value () { return hb_empty_t (); }
216 
217   hb_set_t *layout_variation_indices;
218   const hb_set_t *glyph_set;
219   const hb_map_t *gpos_lookups;
220 
hb_collect_variation_indices_context_tOT::hb_collect_variation_indices_context_t221   hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
222 					  const hb_set_t *glyph_set_,
223 					  const hb_map_t *gpos_lookups_) :
224 					layout_variation_indices (layout_variation_indices_),
225 					glyph_set (glyph_set_),
226 					gpos_lookups (gpos_lookups_) {}
227 };
228 
229 template<typename OutputArray>
230 struct subset_offset_array_t
231 {
subset_offset_array_tOT::subset_offset_array_t232   subset_offset_array_t (hb_subset_context_t *subset_context_,
233 			 OutputArray& out_,
234 			 const void *base_) : subset_context (subset_context_),
235 					      out (out_), base (base_) {}
236 
237   template <typename T>
operator ()OT::subset_offset_array_t238   bool operator () (T&& offset)
239   {
240     auto snap = subset_context->serializer->snapshot ();
241     auto *o = out.serialize_append (subset_context->serializer);
242     if (unlikely (!o)) return false;
243     bool ret = o->serialize_subset (subset_context, offset, base);
244     if (!ret)
245     {
246       out.pop ();
247       subset_context->serializer->revert (snap);
248     }
249     return ret;
250   }
251 
252   private:
253   hb_subset_context_t *subset_context;
254   OutputArray &out;
255   const void *base;
256 };
257 
258 
259 template<typename OutputArray, typename Arg>
260 struct subset_offset_array_arg_t
261 {
subset_offset_array_arg_tOT::subset_offset_array_arg_t262   subset_offset_array_arg_t (hb_subset_context_t *subset_context_,
263 			     OutputArray& out_,
264 			     const void *base_,
265 			     Arg &&arg_) : subset_context (subset_context_), out (out_),
266 					  base (base_), arg (arg_) {}
267 
268   template <typename T>
operator ()OT::subset_offset_array_arg_t269   bool operator () (T&& offset)
270   {
271     auto snap = subset_context->serializer->snapshot ();
272     auto *o = out.serialize_append (subset_context->serializer);
273     if (unlikely (!o)) return false;
274     bool ret = o->serialize_subset (subset_context, offset, base, arg);
275     if (!ret)
276     {
277       out.pop ();
278       subset_context->serializer->revert (snap);
279     }
280     return ret;
281   }
282 
283   private:
284   hb_subset_context_t *subset_context;
285   OutputArray &out;
286   const void *base;
287   Arg &&arg;
288 };
289 
290 /*
291  * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
292  * and discards the offset in the array if the subset operation results in an empty
293  * thing.
294  */
295 struct
296 {
297   template<typename OutputArray>
298   subset_offset_array_t<OutputArray>
operator ()OT::__anonbce509a30108299   operator () (hb_subset_context_t *subset_context, OutputArray& out,
300 	       const void *base) const
301   { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
302 
303   /* Variant with one extra argument passed to serialize_subset */
304   template<typename OutputArray, typename Arg>
305   subset_offset_array_arg_t<OutputArray, Arg>
operator ()OT::__anonbce509a30108306   operator () (hb_subset_context_t *subset_context, OutputArray& out,
307 	       const void *base, Arg &&arg) const
308   { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
309 }
310 HB_FUNCOBJ (subset_offset_array);
311 
312 template<typename OutputArray>
313 struct subset_record_array_t
314 {
subset_record_array_tOT::subset_record_array_t315   subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_,
316 			 const void *base_) : subset_layout_context (c_),
317 					      out (out_), base (base_) {}
318 
319   template <typename T>
320   void
operator ()OT::subset_record_array_t321   operator () (T&& record)
322   {
323     auto snap = subset_layout_context->subset_context->serializer->snapshot ();
324     bool ret = record.subset (subset_layout_context, base);
325     if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
326     else out->len++;
327   }
328 
329   private:
330   hb_subset_layout_context_t *subset_layout_context;
331   OutputArray *out;
332   const void *base;
333 };
334 
335 /*
336  * Helper to subset a RecordList/record array. Subsets each Record in the array and
337  * discards the record if the subset operation returns false.
338  */
339 struct
340 {
341   template<typename OutputArray>
342   subset_record_array_t<OutputArray>
operator ()OT::__anonbce509a30208343   operator () (hb_subset_layout_context_t *c, OutputArray* out,
344 	       const void *base) const
345   { return subset_record_array_t<OutputArray> (c, out, base); }
346 }
347 HB_FUNCOBJ (subset_record_array);
348 
349 
350 template<typename OutputArray>
351 struct serialize_math_record_array_t
352 {
serialize_math_record_array_tOT::serialize_math_record_array_t353   serialize_math_record_array_t (hb_serialize_context_t *serialize_context_,
354                          OutputArray& out_,
355                          const void *base_) : serialize_context (serialize_context_),
356                                               out (out_), base (base_) {}
357 
358   template <typename T>
operator ()OT::serialize_math_record_array_t359   bool operator () (T&& record)
360   {
361     if (!serialize_context->copy (record, base)) return false;
362     out.len++;
363     return true;
364   }
365 
366   private:
367   hb_serialize_context_t *serialize_context;
368   OutputArray &out;
369   const void *base;
370 };
371 
372 /*
373  * Helper to serialize an array of MATH records.
374  */
375 struct
376 {
377   template<typename OutputArray>
378   serialize_math_record_array_t<OutputArray>
operator ()OT::__anonbce509a30308379   operator () (hb_serialize_context_t *serialize_context, OutputArray& out,
380                const void *base) const
381   { return serialize_math_record_array_t<OutputArray> (serialize_context, out, base); }
382 
383 }
384 HB_FUNCOBJ (serialize_math_record_array);
385 
386 /*
387  *
388  * OpenType Layout Common Table Formats
389  *
390  */
391 
392 
393 /*
394  * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
395  */
396 
397 struct Record_sanitize_closure_t {
398   hb_tag_t tag;
399   const void *list_base;
400 };
401 
402 template <typename Type>
403 struct Record
404 {
cmpOT::Record405   int cmp (hb_tag_t a) const { return tag.cmp (a); }
406 
subsetOT::Record407   bool subset (hb_subset_layout_context_t *c, const void *base) const
408   {
409     TRACE_SUBSET (this);
410     auto *out = c->subset_context->serializer->embed (this);
411     if (unlikely (!out)) return_trace (false);
412     bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag);
413     return_trace (ret);
414   }
415 
sanitizeOT::Record416   bool sanitize (hb_sanitize_context_t *c, const void *base) const
417   {
418     TRACE_SANITIZE (this);
419     const Record_sanitize_closure_t closure = {tag, base};
420     return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
421   }
422 
423   Tag		tag;		/* 4-byte Tag identifier */
424   Offset16To<Type>
425 		offset;		/* Offset from beginning of object holding
426 				 * the Record */
427   public:
428   DEFINE_SIZE_STATIC (6);
429 };
430 
431 template <typename Type>
432 struct RecordArrayOf : SortedArray16Of<Record<Type>>
433 {
get_offsetOT::RecordArrayOf434   const Offset16To<Type>& get_offset (unsigned int i) const
435   { return (*this)[i].offset; }
get_offsetOT::RecordArrayOf436   Offset16To<Type>& get_offset (unsigned int i)
437   { return (*this)[i].offset; }
get_tagOT::RecordArrayOf438   const Tag& get_tag (unsigned int i) const
439   { return (*this)[i].tag; }
get_tagsOT::RecordArrayOf440   unsigned int get_tags (unsigned int start_offset,
441 			 unsigned int *record_count /* IN/OUT */,
442 			 hb_tag_t     *record_tags /* OUT */) const
443   {
444     if (record_count)
445     {
446       + this->sub_array (start_offset, record_count)
447       | hb_map (&Record<Type>::tag)
448       | hb_sink (hb_array (record_tags, *record_count))
449       ;
450     }
451     return this->len;
452   }
find_indexOT::RecordArrayOf453   bool find_index (hb_tag_t tag, unsigned int *index) const
454   {
455     return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
456   }
457 };
458 
459 template <typename Type>
460 struct RecordListOf : RecordArrayOf<Type>
461 {
operator []OT::RecordListOf462   const Type& operator [] (unsigned int i) const
463   { return this+this->get_offset (i); }
464 
subsetOT::RecordListOf465   bool subset (hb_subset_context_t *c,
466 	       hb_subset_layout_context_t *l) const
467   {
468     TRACE_SUBSET (this);
469     auto *out = c->serializer->start_embed (*this);
470     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
471 
472     + this->iter ()
473     | hb_apply (subset_record_array (l, out, this))
474     ;
475     return_trace (true);
476   }
477 
sanitizeOT::RecordListOf478   bool sanitize (hb_sanitize_context_t *c) const
479   {
480     TRACE_SANITIZE (this);
481     return_trace (RecordArrayOf<Type>::sanitize (c, this));
482   }
483 };
484 
485 struct Feature;
486 
487 struct RecordListOfFeature : RecordListOf<Feature>
488 {
subsetOT::RecordListOfFeature489   bool subset (hb_subset_context_t *c,
490 	       hb_subset_layout_context_t *l) const
491   {
492     TRACE_SUBSET (this);
493     auto *out = c->serializer->start_embed (*this);
494     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
495 
496     unsigned count = this->len;
497     + hb_zip (*this, hb_range (count))
498     | hb_filter (l->feature_index_map, hb_second)
499     | hb_map (hb_first)
500     | hb_apply (subset_record_array (l, out, this))
501     ;
502     return_trace (true);
503   }
504 };
505 
506 struct Script;
507 struct RecordListOfScript : RecordListOf<Script>
508 {
subsetOT::RecordListOfScript509   bool subset (hb_subset_context_t *c,
510                hb_subset_layout_context_t *l) const
511   {
512     TRACE_SUBSET (this);
513     auto *out = c->serializer->start_embed (*this);
514     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
515 
516     unsigned count = this->len;
517     for (auto _ : + hb_zip (*this, hb_range (count)))
518     {
519       auto snap = c->serializer->snapshot ();
520       l->cur_script_index = _.second;
521       bool ret = _.first.subset (l, this);
522       if (!ret) c->serializer->revert (snap);
523       else out->len++;
524     }
525 
526     return_trace (true);
527   }
528 };
529 
530 struct RangeRecord
531 {
cmpOT::RangeRecord532   int cmp (hb_codepoint_t g) const
533   { return g < first ? -1 : g <= last ? 0 : +1; }
534 
sanitizeOT::RangeRecord535   bool sanitize (hb_sanitize_context_t *c) const
536   {
537     TRACE_SANITIZE (this);
538     return_trace (c->check_struct (this));
539   }
540 
intersectsOT::RangeRecord541   bool intersects (const hb_set_t *glyphs) const
542   { return glyphs->intersects (first, last); }
543 
544   template <typename set_t>
collect_coverageOT::RangeRecord545   bool collect_coverage (set_t *glyphs) const
546   { return glyphs->add_range (first, last); }
547 
548   HBGlyphID16	first;		/* First GlyphID in the range */
549   HBGlyphID16	last;		/* Last GlyphID in the range */
550   HBUINT16	value;		/* Value */
551   public:
552   DEFINE_SIZE_STATIC (6);
553 };
554 DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
555 
556 
557 struct IndexArray : Array16Of<Index>
558 {
intersectsOT::IndexArray559   bool intersects (const hb_map_t *indexes) const
560   { return hb_any (*this, indexes); }
561 
562   template <typename Iterator,
563 	    hb_requires (hb_is_iterator (Iterator))>
serializeOT::IndexArray564   void serialize (hb_serialize_context_t *c,
565 		  hb_subset_layout_context_t *l,
566 		  Iterator it)
567   {
568     if (!it) return;
569     if (unlikely (!c->extend_min ((*this)))) return;
570 
571     for (const auto _ : it)
572     {
573       if (!l->visitLookupIndex()) break;
574 
575       Index i;
576       i = _;
577       c->copy (i);
578       this->len++;
579     }
580   }
581 
get_indexesOT::IndexArray582   unsigned int get_indexes (unsigned int start_offset,
583 			    unsigned int *_count /* IN/OUT */,
584 			    unsigned int *_indexes /* OUT */) const
585   {
586     if (_count)
587     {
588       + this->sub_array (start_offset, _count)
589       | hb_sink (hb_array (_indexes, *_count))
590       ;
591     }
592     return this->len;
593   }
594 
add_indexes_toOT::IndexArray595   void add_indexes_to (hb_set_t* output /* OUT */) const
596   {
597     output->add_array (as_array ());
598   }
599 };
600 
601 
602 struct LangSys
603 {
get_feature_countOT::LangSys604   unsigned int get_feature_count () const
605   { return featureIndex.len; }
get_feature_indexOT::LangSys606   hb_tag_t get_feature_index (unsigned int i) const
607   { return featureIndex[i]; }
get_feature_indexesOT::LangSys608   unsigned int get_feature_indexes (unsigned int start_offset,
609 				    unsigned int *feature_count /* IN/OUT */,
610 				    unsigned int *feature_indexes /* OUT */) const
611   { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
add_feature_indexes_toOT::LangSys612   void add_feature_indexes_to (hb_set_t *feature_indexes) const
613   { featureIndex.add_indexes_to (feature_indexes); }
614 
has_required_featureOT::LangSys615   bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
get_required_feature_indexOT::LangSys616   unsigned int get_required_feature_index () const
617   {
618     if (reqFeatureIndex == 0xFFFFu)
619       return Index::NOT_FOUND_INDEX;
620    return reqFeatureIndex;
621   }
622 
copyOT::LangSys623   LangSys* copy (hb_serialize_context_t *c) const
624   {
625     TRACE_SERIALIZE (this);
626     return_trace (c->embed (*this));
627   }
628 
compareOT::LangSys629   bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
630   {
631     if (reqFeatureIndex != o.reqFeatureIndex)
632       return false;
633 
634     auto iter =
635     + hb_iter (featureIndex)
636     | hb_filter (feature_index_map)
637     | hb_map (feature_index_map)
638     ;
639 
640     auto o_iter =
641     + hb_iter (o.featureIndex)
642     | hb_filter (feature_index_map)
643     | hb_map (feature_index_map)
644     ;
645 
646     if (iter.len () != o_iter.len ())
647       return false;
648 
649     for (const auto _ : + hb_zip (iter, o_iter))
650       if (_.first != _.second) return false;
651 
652     return true;
653   }
654 
collect_featuresOT::LangSys655   void collect_features (hb_prune_langsys_context_t *c) const
656   {
657     if (!has_required_feature () && !get_feature_count ()) return;
658     if (c->visitedLangsys (this)) return;
659     if (has_required_feature () &&
660         c->duplicate_feature_map->has (reqFeatureIndex))
661       c->new_feature_indexes->add (get_required_feature_index ());
662 
663     + hb_iter (featureIndex)
664     | hb_filter (c->duplicate_feature_map)
665     | hb_sink (c->new_feature_indexes)
666     ;
667   }
668 
subsetOT::LangSys669   bool subset (hb_subset_context_t        *c,
670 	       hb_subset_layout_context_t *l,
671 	       const Tag                  *tag = nullptr) const
672   {
673     TRACE_SUBSET (this);
674     auto *out = c->serializer->start_embed (*this);
675     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
676 
677     out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu;
678 
679     if (!l->visitFeatureIndex (featureIndex.len))
680       return_trace (false);
681 
682     auto it =
683     + hb_iter (featureIndex)
684     | hb_filter (l->feature_index_map)
685     | hb_map (l->feature_index_map)
686     ;
687 
688     bool ret = bool (it);
689     out->featureIndex.serialize (c->serializer, l, it);
690     return_trace (ret);
691   }
692 
sanitizeOT::LangSys693   bool sanitize (hb_sanitize_context_t *c,
694 		 const Record_sanitize_closure_t * = nullptr) const
695   {
696     TRACE_SANITIZE (this);
697     return_trace (c->check_struct (this) && featureIndex.sanitize (c));
698   }
699 
700   Offset16	lookupOrderZ;	/* = Null (reserved for an offset to a
701 				 * reordering table) */
702   HBUINT16	reqFeatureIndex;/* Index of a feature required for this
703 				 * language system--if no required features
704 				 * = 0xFFFFu */
705   IndexArray	featureIndex;	/* Array of indices into the FeatureList */
706   public:
707   DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
708 };
709 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
710 
711 struct Script
712 {
get_lang_sys_countOT::Script713   unsigned int get_lang_sys_count () const
714   { return langSys.len; }
get_lang_sys_tagOT::Script715   const Tag& get_lang_sys_tag (unsigned int i) const
716   { return langSys.get_tag (i); }
get_lang_sys_tagsOT::Script717   unsigned int get_lang_sys_tags (unsigned int start_offset,
718 				  unsigned int *lang_sys_count /* IN/OUT */,
719 				  hb_tag_t     *lang_sys_tags /* OUT */) const
720   { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
get_lang_sysOT::Script721   const LangSys& get_lang_sys (unsigned int i) const
722   {
723     if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
724     return this+langSys[i].offset;
725   }
find_lang_sys_indexOT::Script726   bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
727   { return langSys.find_index (tag, index); }
728 
has_default_lang_sysOT::Script729   bool has_default_lang_sys () const           { return defaultLangSys != 0; }
get_default_lang_sysOT::Script730   const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
731 
prune_langsysOT::Script732   void prune_langsys (hb_prune_langsys_context_t *c,
733                       unsigned script_index) const
734   {
735     if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
736     if (c->visitedScript (this)) return;
737 
738     if (!c->script_langsys_map->has (script_index))
739     {
740       hb_set_t* empty_set = hb_set_create ();
741       if (unlikely (!c->script_langsys_map->set (script_index, empty_set)))
742       {
743 	hb_set_destroy (empty_set);
744 	return;
745       }
746     }
747 
748     unsigned langsys_count = get_lang_sys_count ();
749     if (has_default_lang_sys ())
750     {
751       //only collect features from non-redundant langsys
752       const LangSys& d = get_default_lang_sys ();
753       d.collect_features (c);
754 
755       for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
756       {
757         const LangSys& l = this+_.first.offset;
758         if (l.compare (d, c->duplicate_feature_map)) continue;
759 
760         l.collect_features (c);
761         c->script_langsys_map->get (script_index)->add (_.second);
762       }
763     }
764     else
765     {
766       for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
767       {
768         const LangSys& l = this+_.first.offset;
769         l.collect_features (c);
770         c->script_langsys_map->get (script_index)->add (_.second);
771       }
772     }
773   }
774 
subsetOT::Script775   bool subset (hb_subset_context_t         *c,
776 	       hb_subset_layout_context_t  *l,
777 	       const Tag                   *tag) const
778   {
779     TRACE_SUBSET (this);
780     if (!l->visitScript ()) return_trace (false);
781 
782     auto *out = c->serializer->start_embed (*this);
783     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
784 
785     bool defaultLang = false;
786     if (has_default_lang_sys ())
787     {
788       c->serializer->push ();
789       const LangSys& ls = this+defaultLangSys;
790       bool ret = ls.subset (c, l);
791       if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
792       {
793 	c->serializer->pop_discard ();
794 	out->defaultLangSys = 0;
795       }
796       else
797       {
798 	c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
799 	defaultLang = true;
800       }
801     }
802 
803     const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
804     if (active_langsys)
805     {
806       unsigned count = langSys.len;
807       + hb_zip (langSys, hb_range (count))
808       | hb_filter (active_langsys, hb_second)
809       | hb_map (hb_first)
810       | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
811       | hb_apply (subset_record_array (l, &(out->langSys), this))
812       ;
813     }
814 
815     return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
816   }
817 
sanitizeOT::Script818   bool sanitize (hb_sanitize_context_t *c,
819 		 const Record_sanitize_closure_t * = nullptr) const
820   {
821     TRACE_SANITIZE (this);
822     return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
823   }
824 
825   protected:
826   Offset16To<LangSys>
827 		defaultLangSys;	/* Offset to DefaultLangSys table--from
828 				 * beginning of Script table--may be Null */
829   RecordArrayOf<LangSys>
830 		langSys;	/* Array of LangSysRecords--listed
831 				 * alphabetically by LangSysTag */
832   public:
833   DEFINE_SIZE_ARRAY_SIZED (4, langSys);
834 };
835 
836 typedef RecordListOfScript ScriptList;
837 
838 
839 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
840 struct FeatureParamsSize
841 {
sanitizeOT::FeatureParamsSize842   bool sanitize (hb_sanitize_context_t *c) const
843   {
844     TRACE_SANITIZE (this);
845     if (unlikely (!c->check_struct (this))) return_trace (false);
846 
847     /* This subtable has some "history", if you will.  Some earlier versions of
848      * Adobe tools calculated the offset of the FeatureParams sutable from the
849      * beginning of the FeatureList table!  Now, that is dealt with in the
850      * Feature implementation.  But we still need to be able to tell junk from
851      * real data.  Note: We don't check that the nameID actually exists.
852      *
853      * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
854      *
855      * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
856      * coming out soon, and that the makeotf program will build a font with a
857      * 'size' feature that is correct by the specification.
858      *
859      * The specification for this feature tag is in the "OpenType Layout Tag
860      * Registry". You can see a copy of this at:
861      * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
862      *
863      * Here is one set of rules to determine if the 'size' feature is built
864      * correctly, or as by the older versions of MakeOTF. You may be able to do
865      * better.
866      *
867      * Assume that the offset to the size feature is according to specification,
868      * and make the following value checks. If it fails, assume the size
869      * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
870      * If this fails, reject the 'size' feature. The older makeOTF's calculated the
871      * offset from the beginning of the FeatureList table, rather than from the
872      * beginning of the 'size' Feature table.
873      *
874      * If "design size" == 0:
875      *     fails check
876      *
877      * Else if ("subfamily identifier" == 0 and
878      *     "range start" == 0 and
879      *     "range end" == 0 and
880      *     "range start" == 0 and
881      *     "menu name ID" == 0)
882      *     passes check: this is the format used when there is a design size
883      * specified, but there is no recommended size range.
884      *
885      * Else if ("design size" <  "range start" or
886      *     "design size" >   "range end" or
887      *     "range end" <= "range start" or
888      *     "menu name ID"  < 256 or
889      *     "menu name ID"  > 32767 or
890      *     menu name ID is not a name ID which is actually in the name table)
891      *     fails test
892      * Else
893      *     passes test.
894      */
895 
896     if (!designSize)
897       return_trace (false);
898     else if (subfamilyID == 0 &&
899 	     subfamilyNameID == 0 &&
900 	     rangeStart == 0 &&
901 	     rangeEnd == 0)
902       return_trace (true);
903     else if (designSize < rangeStart ||
904 	     designSize > rangeEnd ||
905 	     subfamilyNameID < 256 ||
906 	     subfamilyNameID > 32767)
907       return_trace (false);
908     else
909       return_trace (true);
910   }
911 
subsetOT::FeatureParamsSize912   bool subset (hb_subset_context_t *c) const
913   {
914     TRACE_SUBSET (this);
915     return_trace ((bool) c->serializer->embed (*this));
916   }
917 
918   HBUINT16	designSize;	/* Represents the design size in 720/inch
919 				 * units (decipoints).  The design size entry
920 				 * must be non-zero.  When there is a design
921 				 * size but no recommended size range, the
922 				 * rest of the array will consist of zeros. */
923   HBUINT16	subfamilyID;	/* Has no independent meaning, but serves
924 				 * as an identifier that associates fonts
925 				 * in a subfamily. All fonts which share a
926 				 * Preferred or Font Family name and which
927 				 * differ only by size range shall have the
928 				 * same subfamily value, and no fonts which
929 				 * differ in weight or style shall have the
930 				 * same subfamily value. If this value is
931 				 * zero, the remaining fields in the array
932 				 * will be ignored. */
933   NameID	subfamilyNameID;/* If the preceding value is non-zero, this
934 				 * value must be set in the range 256 - 32767
935 				 * (inclusive). It records the value of a
936 				 * field in the name table, which must
937 				 * contain English-language strings encoded
938 				 * in Windows Unicode and Macintosh Roman,
939 				 * and may contain additional strings
940 				 * localized to other scripts and languages.
941 				 * Each of these strings is the name an
942 				 * application should use, in combination
943 				 * with the family name, to represent the
944 				 * subfamily in a menu.  Applications will
945 				 * choose the appropriate version based on
946 				 * their selection criteria. */
947   HBUINT16	rangeStart;	/* Large end of the recommended usage range
948 				 * (inclusive), stored in 720/inch units
949 				 * (decipoints). */
950   HBUINT16	rangeEnd;	/* Small end of the recommended usage range
951 				   (exclusive), stored in 720/inch units
952 				 * (decipoints). */
953   public:
954   DEFINE_SIZE_STATIC (10);
955 };
956 
957 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
958 struct FeatureParamsStylisticSet
959 {
sanitizeOT::FeatureParamsStylisticSet960   bool sanitize (hb_sanitize_context_t *c) const
961   {
962     TRACE_SANITIZE (this);
963     /* Right now minorVersion is at zero.  Which means, any table supports
964      * the uiNameID field. */
965     return_trace (c->check_struct (this));
966   }
967 
subsetOT::FeatureParamsStylisticSet968   bool subset (hb_subset_context_t *c) const
969   {
970     TRACE_SUBSET (this);
971     return_trace ((bool) c->serializer->embed (*this));
972   }
973 
974   HBUINT16	version;	/* (set to 0): This corresponds to a “minor”
975 				 * version number. Additional data may be
976 				 * added to the end of this Feature Parameters
977 				 * table in the future. */
978 
979   NameID	uiNameID;	/* The 'name' table name ID that specifies a
980 				 * string (or strings, for multiple languages)
981 				 * for a user-interface label for this
982 				 * feature.  The values of uiLabelNameId and
983 				 * sampleTextNameId are expected to be in the
984 				 * font-specific name ID range (256-32767),
985 				 * though that is not a requirement in this
986 				 * Feature Parameters specification. The
987 				 * user-interface label for the feature can
988 				 * be provided in multiple languages. An
989 				 * English string should be included as a
990 				 * fallback. The string should be kept to a
991 				 * minimal length to fit comfortably with
992 				 * different application interfaces. */
993   public:
994   DEFINE_SIZE_STATIC (4);
995 };
996 
997 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
998 struct FeatureParamsCharacterVariants
999 {
1000   unsigned
get_charactersOT::FeatureParamsCharacterVariants1001   get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
1002   {
1003     if (char_count)
1004     {
1005       + characters.sub_array (start_offset, char_count)
1006       | hb_sink (hb_array (chars, *char_count))
1007       ;
1008     }
1009     return characters.len;
1010   }
1011 
get_sizeOT::FeatureParamsCharacterVariants1012   unsigned get_size () const
1013   { return min_size + characters.len * HBUINT24::static_size; }
1014 
subsetOT::FeatureParamsCharacterVariants1015   bool subset (hb_subset_context_t *c) const
1016   {
1017     TRACE_SUBSET (this);
1018     return_trace ((bool) c->serializer->embed (*this));
1019   }
1020 
sanitizeOT::FeatureParamsCharacterVariants1021   bool sanitize (hb_sanitize_context_t *c) const
1022   {
1023     TRACE_SANITIZE (this);
1024     return_trace (c->check_struct (this) &&
1025 		  characters.sanitize (c));
1026   }
1027 
1028   HBUINT16	format;			/* Format number is set to 0. */
1029   NameID	featUILableNameID;	/* The ‘name’ table name ID that
1030 					 * specifies a string (or strings,
1031 					 * for multiple languages) for a
1032 					 * user-interface label for this
1033 					 * feature. (May be NULL.) */
1034   NameID	featUITooltipTextNameID;/* The ‘name’ table name ID that
1035 					 * specifies a string (or strings,
1036 					 * for multiple languages) that an
1037 					 * application can use for tooltip
1038 					 * text for this feature. (May be
1039 					 * nullptr.) */
1040   NameID	sampleTextNameID;	/* The ‘name’ table name ID that
1041 					 * specifies sample text that
1042 					 * illustrates the effect of this
1043 					 * feature. (May be NULL.) */
1044   HBUINT16	numNamedParameters;	/* Number of named parameters. (May
1045 					 * be zero.) */
1046   NameID	firstParamUILabelNameID;/* The first ‘name’ table name ID
1047 					 * used to specify strings for
1048 					 * user-interface labels for the
1049 					 * feature parameters. (Must be zero
1050 					 * if numParameters is zero.) */
1051   Array16Of<HBUINT24>
1052 		characters;		/* Array of the Unicode Scalar Value
1053 					 * of the characters for which this
1054 					 * feature provides glyph variants.
1055 					 * (May be zero.) */
1056   public:
1057   DEFINE_SIZE_ARRAY (14, characters);
1058 };
1059 
1060 struct FeatureParams
1061 {
sanitizeOT::FeatureParams1062   bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
1063   {
1064 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
1065     return true;
1066 #endif
1067     TRACE_SANITIZE (this);
1068     if (tag == HB_TAG ('s','i','z','e'))
1069       return_trace (u.size.sanitize (c));
1070     if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1071       return_trace (u.stylisticSet.sanitize (c));
1072     if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1073       return_trace (u.characterVariants.sanitize (c));
1074     return_trace (true);
1075   }
1076 
subsetOT::FeatureParams1077   bool subset (hb_subset_context_t *c, const Tag* tag) const
1078   {
1079     TRACE_SUBSET (this);
1080     if (!tag) return_trace (false);
1081     if (*tag == HB_TAG ('s','i','z','e'))
1082       return_trace (u.size.subset (c));
1083     if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1084       return_trace (u.stylisticSet.subset (c));
1085     if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1086       return_trace (u.characterVariants.subset (c));
1087     return_trace (false);
1088   }
1089 
1090 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
get_size_paramsOT::FeatureParams1091   const FeatureParamsSize& get_size_params (hb_tag_t tag) const
1092   {
1093     if (tag == HB_TAG ('s','i','z','e'))
1094       return u.size;
1095     return Null (FeatureParamsSize);
1096   }
get_stylistic_set_paramsOT::FeatureParams1097   const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
1098   {
1099     if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1100       return u.stylisticSet;
1101     return Null (FeatureParamsStylisticSet);
1102   }
get_character_variants_paramsOT::FeatureParams1103   const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
1104   {
1105     if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1106       return u.characterVariants;
1107     return Null (FeatureParamsCharacterVariants);
1108   }
1109 #endif
1110 
1111   private:
1112   union {
1113   FeatureParamsSize			size;
1114   FeatureParamsStylisticSet		stylisticSet;
1115   FeatureParamsCharacterVariants	characterVariants;
1116   } u;
1117   public:
1118   DEFINE_SIZE_MIN (0);
1119 };
1120 
1121 struct Feature
1122 {
get_lookup_countOT::Feature1123   unsigned int get_lookup_count () const
1124   { return lookupIndex.len; }
get_lookup_indexOT::Feature1125   hb_tag_t get_lookup_index (unsigned int i) const
1126   { return lookupIndex[i]; }
get_lookup_indexesOT::Feature1127   unsigned int get_lookup_indexes (unsigned int start_index,
1128 				   unsigned int *lookup_count /* IN/OUT */,
1129 				   unsigned int *lookup_tags /* OUT */) const
1130   { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
add_lookup_indexes_toOT::Feature1131   void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
1132   { lookupIndex.add_indexes_to (lookup_indexes); }
1133 
get_feature_paramsOT::Feature1134   const FeatureParams &get_feature_params () const
1135   { return this+featureParams; }
1136 
intersects_lookup_indexesOT::Feature1137   bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
1138   { return lookupIndex.intersects (lookup_indexes); }
1139 
subsetOT::Feature1140   bool subset (hb_subset_context_t         *c,
1141 	       hb_subset_layout_context_t  *l,
1142 	       const Tag                   *tag = nullptr) const
1143   {
1144     TRACE_SUBSET (this);
1145     auto *out = c->serializer->start_embed (*this);
1146     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1147 
1148     out->featureParams.serialize_subset (c, featureParams, this, tag);
1149 
1150     auto it =
1151     + hb_iter (lookupIndex)
1152     | hb_filter (l->lookup_index_map)
1153     | hb_map (l->lookup_index_map)
1154     ;
1155 
1156     out->lookupIndex.serialize (c->serializer, l, it);
1157     // The decision to keep or drop this feature is already made before we get here
1158     // so always retain it.
1159     return_trace (true);
1160   }
1161 
sanitizeOT::Feature1162   bool sanitize (hb_sanitize_context_t *c,
1163 		 const Record_sanitize_closure_t *closure = nullptr) const
1164   {
1165     TRACE_SANITIZE (this);
1166     if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
1167       return_trace (false);
1168 
1169     /* Some earlier versions of Adobe tools calculated the offset of the
1170      * FeatureParams subtable from the beginning of the FeatureList table!
1171      *
1172      * If sanitizing "failed" for the FeatureParams subtable, try it with the
1173      * alternative location.  We would know sanitize "failed" if old value
1174      * of the offset was non-zero, but it's zeroed now.
1175      *
1176      * Only do this for the 'size' feature, since at the time of the faulty
1177      * Adobe tools, only the 'size' feature had FeatureParams defined.
1178      */
1179 
1180     if (likely (featureParams.is_null ()))
1181       return_trace (true);
1182 
1183     unsigned int orig_offset = featureParams;
1184     if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
1185       return_trace (false);
1186 
1187     if (featureParams == 0 && closure &&
1188 	closure->tag == HB_TAG ('s','i','z','e') &&
1189 	closure->list_base && closure->list_base < this)
1190     {
1191       unsigned int new_offset_int = orig_offset -
1192 				    (((char *) this) - ((char *) closure->list_base));
1193 
1194       Offset16To<FeatureParams> new_offset;
1195       /* Check that it would not overflow. */
1196       new_offset = new_offset_int;
1197       if (new_offset == new_offset_int &&
1198 	  c->try_set (&featureParams, new_offset_int) &&
1199 	  !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
1200 	return_trace (false);
1201     }
1202 
1203     return_trace (true);
1204   }
1205 
1206   Offset16To<FeatureParams>
1207 		 featureParams;	/* Offset to Feature Parameters table (if one
1208 				 * has been defined for the feature), relative
1209 				 * to the beginning of the Feature Table; = Null
1210 				 * if not required */
1211   IndexArray	 lookupIndex;	/* Array of LookupList indices */
1212   public:
1213   DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
1214 };
1215 
1216 typedef RecordListOf<Feature> FeatureList;
1217 
1218 
1219 struct LookupFlag : HBUINT16
1220 {
1221   enum Flags {
1222     RightToLeft		= 0x0001u,
1223     IgnoreBaseGlyphs	= 0x0002u,
1224     IgnoreLigatures	= 0x0004u,
1225     IgnoreMarks		= 0x0008u,
1226     IgnoreFlags		= 0x000Eu,
1227     UseMarkFilteringSet	= 0x0010u,
1228     Reserved		= 0x00E0u,
1229     MarkAttachmentType	= 0xFF00u
1230   };
1231   public:
1232   DEFINE_SIZE_STATIC (2);
1233 };
1234 
1235 } /* namespace OT */
1236 /* This has to be outside the namespace. */
1237 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
1238 namespace OT {
1239 
1240 struct Lookup
1241 {
get_subtable_countOT::Lookup1242   unsigned int get_subtable_count () const { return subTable.len; }
1243 
1244   template <typename TSubTable>
get_subtablesOT::Lookup1245   const Array16OfOffset16To<TSubTable>& get_subtables () const
1246   { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
1247   template <typename TSubTable>
get_subtablesOT::Lookup1248   Array16OfOffset16To<TSubTable>& get_subtables ()
1249   { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
1250 
1251   template <typename TSubTable>
get_subtableOT::Lookup1252   const TSubTable& get_subtable (unsigned int i) const
1253   { return this+get_subtables<TSubTable> ()[i]; }
1254   template <typename TSubTable>
get_subtableOT::Lookup1255   TSubTable& get_subtable (unsigned int i)
1256   { return this+get_subtables<TSubTable> ()[i]; }
1257 
get_sizeOT::Lookup1258   unsigned int get_size () const
1259   {
1260     const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
1261     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1262       return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
1263     return (const char *) &markFilteringSet - (const char *) this;
1264   }
1265 
get_typeOT::Lookup1266   unsigned int get_type () const { return lookupType; }
1267 
1268   /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
1269    * higher 16-bit is mark-filtering-set if the lookup uses one.
1270    * Not to be confused with glyph_props which is very similar. */
get_propsOT::Lookup1271   uint32_t get_props () const
1272   {
1273     unsigned int flag = lookupFlag;
1274     if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
1275     {
1276       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1277       flag += (markFilteringSet << 16);
1278     }
1279     return flag;
1280   }
1281 
1282   template <typename TSubTable, typename context_t, typename ...Ts>
dispatchOT::Lookup1283   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1284   {
1285     unsigned int lookup_type = get_type ();
1286     TRACE_DISPATCH (this, lookup_type);
1287     unsigned int count = get_subtable_count ();
1288     for (unsigned int i = 0; i < count; i++) {
1289       typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, std::forward<Ts> (ds)...);
1290       if (c->stop_sublookup_iteration (r))
1291 	return_trace (r);
1292     }
1293     return_trace (c->default_return_value ());
1294   }
1295 
serializeOT::Lookup1296   bool serialize (hb_serialize_context_t *c,
1297 		  unsigned int lookup_type,
1298 		  uint32_t lookup_props,
1299 		  unsigned int num_subtables)
1300   {
1301     TRACE_SERIALIZE (this);
1302     if (unlikely (!c->extend_min (this))) return_trace (false);
1303     lookupType = lookup_type;
1304     lookupFlag = lookup_props & 0xFFFFu;
1305     if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
1306     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1307     {
1308       if (unlikely (!c->extend (this))) return_trace (false);
1309       HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1310       markFilteringSet = lookup_props >> 16;
1311     }
1312     return_trace (true);
1313   }
1314 
1315   template <typename TSubTable>
subsetOT::Lookup1316   bool subset (hb_subset_context_t *c) const
1317   {
1318     TRACE_SUBSET (this);
1319     auto *out = c->serializer->start_embed (*this);
1320     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1321     out->lookupType = lookupType;
1322     out->lookupFlag = lookupFlag;
1323 
1324     const hb_set_t *glyphset = c->plan->glyphset_gsub ();
1325     unsigned int lookup_type = get_type ();
1326     + hb_iter (get_subtables <TSubTable> ())
1327     | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
1328     | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
1329     ;
1330 
1331     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1332     {
1333       if (unlikely (!c->serializer->extend (out))) return_trace (false);
1334       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1335       HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
1336       outMarkFilteringSet = markFilteringSet;
1337     }
1338 
1339     return_trace (out->subTable.len);
1340   }
1341 
1342   template <typename TSubTable>
sanitizeOT::Lookup1343   bool sanitize (hb_sanitize_context_t *c) const
1344   {
1345     TRACE_SANITIZE (this);
1346     if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
1347 
1348     unsigned subtables = get_subtable_count ();
1349     if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
1350 
1351     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1352     {
1353       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1354       if (!markFilteringSet.sanitize (c)) return_trace (false);
1355     }
1356 
1357     if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
1358       return_trace (false);
1359 
1360     if (unlikely (get_type () == TSubTable::Extension && subtables && !c->get_edit_count ()))
1361     {
1362       /* The spec says all subtables of an Extension lookup should
1363        * have the same type, which shall not be the Extension type
1364        * itself (but we already checked for that).
1365        * This is specially important if one has a reverse type!
1366        *
1367        * We only do this if sanitizer edit_count is zero.  Otherwise,
1368        * some of the subtables might have become insane after they
1369        * were sanity-checked by the edits of subsequent subtables.
1370        * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
1371        */
1372       unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
1373       for (unsigned int i = 1; i < subtables; i++)
1374 	if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
1375 	  return_trace (false);
1376     }
1377     return_trace (true);
1378   }
1379 
1380   private:
1381   HBUINT16	lookupType;		/* Different enumerations for GSUB and GPOS */
1382   HBUINT16	lookupFlag;		/* Lookup qualifiers */
1383   Array16Of<Offset16>
1384 		subTable;		/* Array of SubTables */
1385 /*HBUINT16	markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
1386 					 * structure. This field is only present if bit
1387 					 * UseMarkFilteringSet of lookup flags is set. */
1388   public:
1389   DEFINE_SIZE_ARRAY (6, subTable);
1390 };
1391 
1392 typedef List16OfOffset16To<Lookup> LookupList;
1393 
1394 template <typename TLookup>
1395 struct LookupOffsetList : List16OfOffset16To<TLookup>
1396 {
subsetOT::LookupOffsetList1397   bool subset (hb_subset_context_t        *c,
1398 	       hb_subset_layout_context_t *l) const
1399   {
1400     TRACE_SUBSET (this);
1401     auto *out = c->serializer->start_embed (this);
1402     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1403 
1404     unsigned count = this->len;
1405     + hb_zip (*this, hb_range (count))
1406     | hb_filter (l->lookup_index_map, hb_second)
1407     | hb_map (hb_first)
1408     | hb_apply (subset_offset_array (c, *out, this))
1409     ;
1410     return_trace (true);
1411   }
1412 
sanitizeOT::LookupOffsetList1413   bool sanitize (hb_sanitize_context_t *c) const
1414   {
1415     TRACE_SANITIZE (this);
1416     return_trace (List16OfOffset16To<TLookup>::sanitize (c, this));
1417   }
1418 };
1419 
1420 
1421 /*
1422  * Coverage Table
1423  */
1424 
1425 struct CoverageFormat1
1426 {
1427   friend struct Coverage;
1428 
1429   private:
get_coverageOT::CoverageFormat11430   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1431   {
1432     unsigned int i;
1433     glyphArray.bfind (glyph_id, &i, HB_NOT_FOUND_STORE, NOT_COVERED);
1434     return i;
1435   }
1436 
1437   template <typename Iterator,
1438       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::CoverageFormat11439   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1440   {
1441     TRACE_SERIALIZE (this);
1442     return_trace (glyphArray.serialize (c, glyphs));
1443   }
1444 
sanitizeOT::CoverageFormat11445   bool sanitize (hb_sanitize_context_t *c) const
1446   {
1447     TRACE_SANITIZE (this);
1448     return_trace (glyphArray.sanitize (c));
1449   }
1450 
intersectsOT::CoverageFormat11451   bool intersects (const hb_set_t *glyphs) const
1452   {
1453     /* TODO Speed up, using hb_set_next() and bsearch()? */
1454     for (const auto& g : glyphArray.as_array ())
1455       if (glyphs->has (g))
1456 	return true;
1457     return false;
1458   }
intersects_coverageOT::CoverageFormat11459   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1460   { return glyphs->has (glyphArray[index]); }
1461 
intersected_coverage_glyphsOT::CoverageFormat11462   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1463   {
1464     unsigned count = glyphArray.len;
1465     for (unsigned i = 0; i < count; i++)
1466       if (glyphs->has (glyphArray[i]))
1467         intersect_glyphs->add (glyphArray[i]);
1468   }
1469 
1470   template <typename set_t>
collect_coverageOT::CoverageFormat11471   bool collect_coverage (set_t *glyphs) const
1472   { return glyphs->add_sorted_array (glyphArray.as_array ()); }
1473 
1474   public:
1475   /* Older compilers need this to be public. */
1476   struct iter_t
1477   {
initOT::CoverageFormat1::iter_t1478     void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
finiOT::CoverageFormat1::iter_t1479     void fini () {}
moreOT::CoverageFormat1::iter_t1480     bool more () const { return i < c->glyphArray.len; }
nextOT::CoverageFormat1::iter_t1481     void next () { i++; }
get_glyphOT::CoverageFormat1::iter_t1482     hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
operator !=OT::CoverageFormat1::iter_t1483     bool operator != (const iter_t& o) const
1484     { return i != o.i || c != o.c; }
1485 
1486     private:
1487     const struct CoverageFormat1 *c;
1488     unsigned int i;
1489   };
1490   private:
1491 
1492   protected:
1493   HBUINT16	coverageFormat;	/* Format identifier--format = 1 */
1494   SortedArray16Of<HBGlyphID16>
1495 		glyphArray;	/* Array of GlyphIDs--in numerical order */
1496   public:
1497   DEFINE_SIZE_ARRAY (4, glyphArray);
1498 };
1499 
1500 struct CoverageFormat2
1501 {
1502   friend struct Coverage;
1503 
1504   private:
get_coverageOT::CoverageFormat21505   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1506   {
1507     const RangeRecord &range = rangeRecord.bsearch (glyph_id);
1508     return likely (range.first <= range.last)
1509 	 ? (unsigned int) range.value + (glyph_id - range.first)
1510 	 : NOT_COVERED;
1511   }
1512 
1513   template <typename Iterator,
1514       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::CoverageFormat21515   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1516   {
1517     TRACE_SERIALIZE (this);
1518     if (unlikely (!c->extend_min (this))) return_trace (false);
1519 
1520     if (unlikely (!glyphs))
1521     {
1522       rangeRecord.len = 0;
1523       return_trace (true);
1524     }
1525 
1526     /* TODO(iter) Write more efficiently? */
1527 
1528     unsigned num_ranges = 0;
1529     hb_codepoint_t last = (hb_codepoint_t) -2;
1530     for (auto g: glyphs)
1531     {
1532       if (last + 1 != g)
1533 	num_ranges++;
1534       last = g;
1535     }
1536 
1537     if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
1538 
1539     unsigned count = 0;
1540     unsigned range = (unsigned) -1;
1541     last = (hb_codepoint_t) -2;
1542     for (auto g: glyphs)
1543     {
1544       if (last + 1 != g)
1545       {
1546 	range++;
1547 	rangeRecord[range].first = g;
1548 	rangeRecord[range].value = count;
1549       }
1550       rangeRecord[range].last = g;
1551       last = g;
1552       count++;
1553     }
1554 
1555     return_trace (true);
1556   }
1557 
sanitizeOT::CoverageFormat21558   bool sanitize (hb_sanitize_context_t *c) const
1559   {
1560     TRACE_SANITIZE (this);
1561     return_trace (rangeRecord.sanitize (c));
1562   }
1563 
intersectsOT::CoverageFormat21564   bool intersects (const hb_set_t *glyphs) const
1565   {
1566     /* TODO Speed up, using hb_set_next() and bsearch()? */
1567     /* TODO(iter) Rewrite as dagger. */
1568     for (const auto& range : rangeRecord.as_array ())
1569       if (range.intersects (glyphs))
1570 	return true;
1571     return false;
1572   }
intersects_coverageOT::CoverageFormat21573   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1574   {
1575     /* TODO(iter) Rewrite as dagger. */
1576     for (const auto& range : rangeRecord.as_array ())
1577     {
1578       if (range.value <= index &&
1579 	  index < (unsigned int) range.value + (range.last - range.first) &&
1580 	  range.intersects (glyphs))
1581 	return true;
1582       else if (index < range.value)
1583 	return false;
1584     }
1585     return false;
1586   }
1587 
intersected_coverage_glyphsOT::CoverageFormat21588   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1589   {
1590     for (const auto& range : rangeRecord.as_array ())
1591     {
1592       if (!range.intersects (glyphs)) continue;
1593       for (hb_codepoint_t g = range.first; g <= range.last; g++)
1594         if (glyphs->has (g)) intersect_glyphs->add (g);
1595     }
1596   }
1597 
1598   template <typename set_t>
collect_coverageOT::CoverageFormat21599   bool collect_coverage (set_t *glyphs) const
1600   {
1601     unsigned int count = rangeRecord.len;
1602     for (unsigned int i = 0; i < count; i++)
1603       if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
1604 	return false;
1605     return true;
1606   }
1607 
1608   public:
1609   /* Older compilers need this to be public. */
1610   struct iter_t
1611   {
initOT::CoverageFormat2::iter_t1612     void init (const CoverageFormat2 &c_)
1613     {
1614       c = &c_;
1615       coverage = 0;
1616       i = 0;
1617       j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
1618       if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
1619       {
1620 	/* Broken table. Skip. */
1621 	i = c->rangeRecord.len;
1622       }
1623     }
finiOT::CoverageFormat2::iter_t1624     void fini () {}
moreOT::CoverageFormat2::iter_t1625     bool more () const { return i < c->rangeRecord.len; }
nextOT::CoverageFormat2::iter_t1626     void next ()
1627     {
1628       if (j >= c->rangeRecord[i].last)
1629       {
1630 	i++;
1631 	if (more ())
1632 	{
1633 	  unsigned int old = coverage;
1634 	  j = c->rangeRecord[i].first;
1635 	  coverage = c->rangeRecord[i].value;
1636 	  if (unlikely (coverage != old + 1))
1637 	  {
1638 	    /* Broken table. Skip. Important to avoid DoS.
1639 	     * Also, our callers depend on coverage being
1640 	     * consecutive and monotonically increasing,
1641 	     * ie. iota(). */
1642 	   i = c->rangeRecord.len;
1643 	   return;
1644 	  }
1645 	}
1646 	return;
1647       }
1648       coverage++;
1649       j++;
1650     }
get_glyphOT::CoverageFormat2::iter_t1651     hb_codepoint_t get_glyph () const { return j; }
operator !=OT::CoverageFormat2::iter_t1652     bool operator != (const iter_t& o) const
1653     { return i != o.i || j != o.j || c != o.c; }
1654 
1655     private:
1656     const struct CoverageFormat2 *c;
1657     unsigned int i, coverage;
1658     hb_codepoint_t j;
1659   };
1660   private:
1661 
1662   protected:
1663   HBUINT16	coverageFormat;	/* Format identifier--format = 2 */
1664   SortedArray16Of<RangeRecord>
1665 		rangeRecord;	/* Array of glyph ranges--ordered by
1666 				 * Start GlyphID. rangeCount entries
1667 				 * long */
1668   public:
1669   DEFINE_SIZE_ARRAY (4, rangeRecord);
1670 };
1671 
1672 struct Coverage
1673 {
1674   /* Has interface. */
1675   static constexpr unsigned SENTINEL = NOT_COVERED;
1676   typedef unsigned int value_t;
operator []OT::Coverage1677   value_t operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::Coverage1678   bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
1679   /* Predicate. */
operator ()OT::Coverage1680   bool operator () (hb_codepoint_t k) const { return has (k); }
1681 
getOT::Coverage1682   unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
get_coverageOT::Coverage1683   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1684   {
1685     switch (u.format) {
1686     case 1: return u.format1.get_coverage (glyph_id);
1687     case 2: return u.format2.get_coverage (glyph_id);
1688     default:return NOT_COVERED;
1689     }
1690   }
1691 
1692   template <typename Iterator,
1693       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::Coverage1694   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1695   {
1696     TRACE_SERIALIZE (this);
1697     if (unlikely (!c->extend_min (this))) return_trace (false);
1698 
1699     unsigned count = 0;
1700     unsigned num_ranges = 0;
1701     hb_codepoint_t last = (hb_codepoint_t) -2;
1702     for (auto g: glyphs)
1703     {
1704       if (last + 1 != g)
1705 	num_ranges++;
1706       last = g;
1707       count++;
1708     }
1709     u.format = count <= num_ranges * 3 ? 1 : 2;
1710 
1711     switch (u.format)
1712     {
1713     case 1: return_trace (u.format1.serialize (c, glyphs));
1714     case 2: return_trace (u.format2.serialize (c, glyphs));
1715     default:return_trace (false);
1716     }
1717   }
1718 
subsetOT::Coverage1719   bool subset (hb_subset_context_t *c) const
1720   {
1721     TRACE_SUBSET (this);
1722     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1723     const hb_map_t &glyph_map = *c->plan->glyph_map;
1724 
1725     auto it =
1726     + iter ()
1727     | hb_filter (glyphset)
1728     | hb_map_retains_sorting (glyph_map)
1729     ;
1730 
1731     bool ret = bool (it);
1732     Coverage_serialize (c->serializer, it);
1733     return_trace (ret);
1734   }
1735 
sanitizeOT::Coverage1736   bool sanitize (hb_sanitize_context_t *c) const
1737   {
1738     TRACE_SANITIZE (this);
1739     if (!u.format.sanitize (c)) return_trace (false);
1740     switch (u.format)
1741     {
1742     case 1: return_trace (u.format1.sanitize (c));
1743     case 2: return_trace (u.format2.sanitize (c));
1744     default:return_trace (true);
1745     }
1746   }
1747 
intersectsOT::Coverage1748   bool intersects (const hb_set_t *glyphs) const
1749   {
1750     switch (u.format)
1751     {
1752     case 1: return u.format1.intersects (glyphs);
1753     case 2: return u.format2.intersects (glyphs);
1754     default:return false;
1755     }
1756   }
intersects_coverageOT::Coverage1757   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1758   {
1759     switch (u.format)
1760     {
1761     case 1: return u.format1.intersects_coverage (glyphs, index);
1762     case 2: return u.format2.intersects_coverage (glyphs, index);
1763     default:return false;
1764     }
1765   }
1766 
1767   /* Might return false if array looks unsorted.
1768    * Used for faster rejection of corrupt data. */
1769   template <typename set_t>
collect_coverageOT::Coverage1770   bool collect_coverage (set_t *glyphs) const
1771   {
1772     switch (u.format)
1773     {
1774     case 1: return u.format1.collect_coverage (glyphs);
1775     case 2: return u.format2.collect_coverage (glyphs);
1776     default:return false;
1777     }
1778   }
1779 
intersected_coverage_glyphsOT::Coverage1780   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1781   {
1782     switch (u.format)
1783     {
1784     case 1: return u.format1.intersected_coverage_glyphs (glyphs, intersect_glyphs);
1785     case 2: return u.format2.intersected_coverage_glyphs (glyphs, intersect_glyphs);
1786     default:return ;
1787     }
1788   }
1789 
1790   struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
1791   {
1792     static constexpr bool is_sorted_iterator = true;
iter_tOT::Coverage::iter_t1793     iter_t (const Coverage &c_ = Null (Coverage))
1794     {
1795       memset (this, 0, sizeof (*this));
1796       format = c_.u.format;
1797       switch (format)
1798       {
1799       case 1: u.format1.init (c_.u.format1); return;
1800       case 2: u.format2.init (c_.u.format2); return;
1801       default:				     return;
1802       }
1803     }
__more__OT::Coverage::iter_t1804     bool __more__ () const
1805     {
1806       switch (format)
1807       {
1808       case 1: return u.format1.more ();
1809       case 2: return u.format2.more ();
1810       default:return false;
1811       }
1812     }
__next__OT::Coverage::iter_t1813     void __next__ ()
1814     {
1815       switch (format)
1816       {
1817       case 1: u.format1.next (); break;
1818       case 2: u.format2.next (); break;
1819       default:			 break;
1820       }
1821     }
1822     typedef hb_codepoint_t __item_t__;
__item__OT::Coverage::iter_t1823     __item_t__ __item__ () const { return get_glyph (); }
1824 
get_glyphOT::Coverage::iter_t1825     hb_codepoint_t get_glyph () const
1826     {
1827       switch (format)
1828       {
1829       case 1: return u.format1.get_glyph ();
1830       case 2: return u.format2.get_glyph ();
1831       default:return 0;
1832       }
1833     }
operator !=OT::Coverage::iter_t1834     bool operator != (const iter_t& o) const
1835     {
1836       if (format != o.format) return true;
1837       switch (format)
1838       {
1839       case 1: return u.format1 != o.u.format1;
1840       case 2: return u.format2 != o.u.format2;
1841       default:return false;
1842       }
1843     }
1844 
1845     private:
1846     unsigned int format;
1847     union {
1848     CoverageFormat2::iter_t	format2; /* Put this one first since it's larger; helps shut up compiler. */
1849     CoverageFormat1::iter_t	format1;
1850     } u;
1851   };
iterOT::Coverage1852   iter_t iter () const { return iter_t (*this); }
1853 
1854   protected:
1855   union {
1856   HBUINT16		format;		/* Format identifier */
1857   CoverageFormat1	format1;
1858   CoverageFormat2	format2;
1859   } u;
1860   public:
1861   DEFINE_SIZE_UNION (2, format);
1862 };
1863 
1864 template<typename Iterator>
1865 static inline void
Coverage_serialize(hb_serialize_context_t * c,Iterator it)1866 Coverage_serialize (hb_serialize_context_t *c,
1867 		    Iterator it)
1868 { c->start_embed<Coverage> ()->serialize (c, it); }
1869 
ClassDef_remap_and_serialize(hb_serialize_context_t * c,const hb_map_t & gid_klass_map,hb_sorted_vector_t<HBGlyphID16> & glyphs,const hb_set_t & klasses,bool use_class_zero,hb_map_t * klass_map)1870 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
1871 					  const hb_map_t &gid_klass_map,
1872 					  hb_sorted_vector_t<HBGlyphID16> &glyphs,
1873 					  const hb_set_t &klasses,
1874                                           bool use_class_zero,
1875 					  hb_map_t *klass_map /*INOUT*/)
1876 {
1877   if (!klass_map)
1878   {
1879     ClassDef_serialize (c, hb_zip (glyphs.iter (), + glyphs.iter ()
1880 						   | hb_map (gid_klass_map)));
1881     return;
1882   }
1883 
1884   /* any glyph not assigned a class value falls into Class zero (0),
1885    * if any glyph assigned to class 0, remapping must start with 0->0*/
1886   if (!use_class_zero)
1887     klass_map->set (0, 0);
1888 
1889   unsigned idx = klass_map->has (0) ? 1 : 0;
1890   for (const unsigned k: klasses.iter ())
1891   {
1892     if (klass_map->has (k)) continue;
1893     klass_map->set (k, idx);
1894     idx++;
1895   }
1896 
1897   auto it =
1898   + glyphs.iter ()
1899   | hb_map_retains_sorting ([&] (const HBGlyphID16& gid) -> hb_pair_t<hb_codepoint_t, unsigned>
1900 			    {
1901 			      unsigned new_klass = klass_map->get (gid_klass_map[gid]);
1902 			      return hb_pair ((hb_codepoint_t)gid, new_klass);
1903 			    })
1904   ;
1905 
1906   c->propagate_error (glyphs, klasses);
1907   ClassDef_serialize (c, it);
1908 }
1909 
1910 /*
1911  * Class Definition Table
1912  */
1913 
1914 struct ClassDefFormat1
1915 {
1916   friend struct ClassDef;
1917 
1918   private:
get_classOT::ClassDefFormat11919   unsigned int get_class (hb_codepoint_t glyph_id) const
1920   {
1921     return classValue[(unsigned int) (glyph_id - startGlyph)];
1922   }
1923 
1924   template<typename Iterator,
1925 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDefFormat11926   bool serialize (hb_serialize_context_t *c,
1927 		  Iterator it)
1928   {
1929     TRACE_SERIALIZE (this);
1930     if (unlikely (!c->extend_min (this))) return_trace (false);
1931 
1932     if (unlikely (!it))
1933     {
1934       classFormat = 1;
1935       startGlyph = 0;
1936       classValue.len = 0;
1937       return_trace (true);
1938     }
1939 
1940     hb_codepoint_t glyph_min = (*it).first;
1941     hb_codepoint_t glyph_max = + it
1942 			       | hb_map (hb_first)
1943 			       | hb_reduce (hb_max, 0u);
1944     unsigned glyph_count = glyph_max - glyph_min + 1;
1945 
1946     startGlyph = glyph_min;
1947     if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
1948     for (const hb_pair_t<hb_codepoint_t, unsigned> gid_klass_pair : + it)
1949     {
1950       unsigned idx = gid_klass_pair.first - glyph_min;
1951       classValue[idx] = gid_klass_pair.second;
1952     }
1953     return_trace (true);
1954   }
1955 
subsetOT::ClassDefFormat11956   bool subset (hb_subset_context_t *c,
1957 	       hb_map_t *klass_map = nullptr /*OUT*/,
1958                bool keep_empty_table = true,
1959                bool use_class_zero = true,
1960                const Coverage* glyph_filter = nullptr) const
1961   {
1962     TRACE_SUBSET (this);
1963     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1964     const hb_map_t &glyph_map = *c->plan->glyph_map;
1965 
1966     hb_sorted_vector_t<HBGlyphID16> glyphs;
1967     hb_set_t orig_klasses;
1968     hb_map_t gid_org_klass_map;
1969 
1970     hb_codepoint_t start = startGlyph;
1971     hb_codepoint_t end   = start + classValue.len;
1972 
1973     for (const hb_codepoint_t gid : + hb_range (start, end)
1974                                     | hb_filter (glyphset))
1975     {
1976       if (glyph_filter && !glyph_filter->has(gid)) continue;
1977 
1978       unsigned klass = classValue[gid - start];
1979       if (!klass) continue;
1980 
1981       glyphs.push (glyph_map[gid]);
1982       gid_org_klass_map.set (glyph_map[gid], klass);
1983       orig_klasses.add (klass);
1984     }
1985 
1986     unsigned glyph_count = glyph_filter
1987                            ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
1988                            : glyphset.get_population ();
1989     use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
1990     ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
1991 				  glyphs, orig_klasses, use_class_zero, klass_map);
1992     return_trace (keep_empty_table || (bool) glyphs);
1993   }
1994 
sanitizeOT::ClassDefFormat11995   bool sanitize (hb_sanitize_context_t *c) const
1996   {
1997     TRACE_SANITIZE (this);
1998     return_trace (c->check_struct (this) && classValue.sanitize (c));
1999   }
2000 
2001   template <typename set_t>
collect_coverageOT::ClassDefFormat12002   bool collect_coverage (set_t *glyphs) const
2003   {
2004     unsigned int start = 0;
2005     unsigned int count = classValue.len;
2006     for (unsigned int i = 0; i < count; i++)
2007     {
2008       if (classValue[i])
2009 	continue;
2010 
2011       if (start != i)
2012 	if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
2013 	  return false;
2014 
2015       start = i + 1;
2016     }
2017     if (start != count)
2018       if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
2019 	return false;
2020 
2021     return true;
2022   }
2023 
2024   template <typename set_t>
collect_classOT::ClassDefFormat12025   bool collect_class (set_t *glyphs, unsigned klass) const
2026   {
2027     unsigned int count = classValue.len;
2028     for (unsigned int i = 0; i < count; i++)
2029       if (classValue[i] == klass) glyphs->add (startGlyph + i);
2030     return true;
2031   }
2032 
intersectsOT::ClassDefFormat12033   bool intersects (const hb_set_t *glyphs) const
2034   {
2035     /* TODO Speed up, using hb_set_next()? */
2036     hb_codepoint_t start = startGlyph;
2037     hb_codepoint_t end = startGlyph + classValue.len;
2038     for (hb_codepoint_t iter = startGlyph - 1;
2039 	 hb_set_next (glyphs, &iter) && iter < end;)
2040       if (classValue[iter - start]) return true;
2041     return false;
2042   }
intersects_classOT::ClassDefFormat12043   bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2044   {
2045     unsigned int count = classValue.len;
2046     if (klass == 0)
2047     {
2048       /* Match if there's any glyph that is not listed! */
2049       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2050       if (!hb_set_next (glyphs, &g)) return false;
2051       if (g < startGlyph) return true;
2052       g = startGlyph + count - 1;
2053       if (hb_set_next (glyphs, &g)) return true;
2054       /* Fall through. */
2055     }
2056     /* TODO Speed up, using set overlap first? */
2057     /* TODO(iter) Rewrite as dagger. */
2058     HBUINT16 k {klass};
2059     const HBUINT16 *arr = classValue.arrayZ;
2060     for (unsigned int i = 0; i < count; i++)
2061       if (arr[i] == k && glyphs->has (startGlyph + i))
2062 	return true;
2063     return false;
2064   }
2065 
intersected_class_glyphsOT::ClassDefFormat12066   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2067   {
2068     unsigned count = classValue.len;
2069     if (klass == 0)
2070     {
2071       hb_codepoint_t endGlyph = startGlyph + count -1;
2072       for (hb_codepoint_t g : glyphs->iter ())
2073         if (g < startGlyph || g > endGlyph)
2074           intersect_glyphs->add (g);
2075 
2076       return;
2077     }
2078 
2079     for (unsigned i = 0; i < count; i++)
2080       if (classValue[i] == klass && glyphs->has (startGlyph + i))
2081         intersect_glyphs->add (startGlyph + i);
2082   }
2083 
intersected_classesOT::ClassDefFormat12084   void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2085   {
2086     if (glyphs->is_empty ()) return;
2087     hb_codepoint_t end_glyph = startGlyph + classValue.len - 1;
2088     if (glyphs->get_min () < startGlyph ||
2089         glyphs->get_max () > end_glyph)
2090       intersect_classes->add (0);
2091 
2092     for (const auto& _ : + hb_enumerate (classValue))
2093     {
2094       hb_codepoint_t g = startGlyph + _.first;
2095       if (glyphs->has (g))
2096         intersect_classes->add (_.second);
2097     }
2098   }
2099 
2100   protected:
2101   HBUINT16	classFormat;	/* Format identifier--format = 1 */
2102   HBGlyphID16	startGlyph;	/* First GlyphID of the classValueArray */
2103   Array16Of<HBUINT16>
2104 		classValue;	/* Array of Class Values--one per GlyphID */
2105   public:
2106   DEFINE_SIZE_ARRAY (6, classValue);
2107 };
2108 
2109 struct ClassDefFormat2
2110 {
2111   friend struct ClassDef;
2112 
2113   private:
get_classOT::ClassDefFormat22114   unsigned int get_class (hb_codepoint_t glyph_id) const
2115   {
2116     return rangeRecord.bsearch (glyph_id).value;
2117   }
2118 
2119   template<typename Iterator,
2120 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDefFormat22121   bool serialize (hb_serialize_context_t *c,
2122 		  Iterator it)
2123   {
2124     TRACE_SERIALIZE (this);
2125     if (unlikely (!c->extend_min (this))) return_trace (false);
2126 
2127     if (unlikely (!it))
2128     {
2129       classFormat = 2;
2130       rangeRecord.len = 0;
2131       return_trace (true);
2132     }
2133 
2134     unsigned num_ranges = 1;
2135     hb_codepoint_t prev_gid = (*it).first;
2136     unsigned prev_klass = (*it).second;
2137 
2138     RangeRecord range_rec;
2139     range_rec.first = prev_gid;
2140     range_rec.last = prev_gid;
2141     range_rec.value = prev_klass;
2142 
2143     RangeRecord *record = c->copy (range_rec);
2144     if (unlikely (!record)) return_trace (false);
2145 
2146     for (const auto gid_klass_pair : + (++it))
2147     {
2148       hb_codepoint_t cur_gid = gid_klass_pair.first;
2149       unsigned cur_klass = gid_klass_pair.second;
2150 
2151       if (cur_gid != prev_gid + 1 ||
2152 	  cur_klass != prev_klass)
2153       {
2154 	if (unlikely (!record)) break;
2155 	record->last = prev_gid;
2156 	num_ranges++;
2157 
2158 	range_rec.first = cur_gid;
2159 	range_rec.last = cur_gid;
2160 	range_rec.value = cur_klass;
2161 
2162 	record = c->copy (range_rec);
2163       }
2164 
2165       prev_klass = cur_klass;
2166       prev_gid = cur_gid;
2167     }
2168 
2169     if (likely (record)) record->last = prev_gid;
2170     rangeRecord.len = num_ranges;
2171     return_trace (true);
2172   }
2173 
subsetOT::ClassDefFormat22174   bool subset (hb_subset_context_t *c,
2175 	       hb_map_t *klass_map = nullptr /*OUT*/,
2176                bool keep_empty_table = true,
2177                bool use_class_zero = true,
2178                const Coverage* glyph_filter = nullptr) const
2179   {
2180     TRACE_SUBSET (this);
2181     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2182     const hb_map_t &glyph_map = *c->plan->glyph_map;
2183 
2184     hb_sorted_vector_t<HBGlyphID16> glyphs;
2185     hb_set_t orig_klasses;
2186     hb_map_t gid_org_klass_map;
2187 
2188     unsigned count = rangeRecord.len;
2189     for (unsigned i = 0; i < count; i++)
2190     {
2191       unsigned klass = rangeRecord[i].value;
2192       if (!klass) continue;
2193       hb_codepoint_t start = rangeRecord[i].first;
2194       hb_codepoint_t end   = rangeRecord[i].last + 1;
2195       for (hb_codepoint_t g = start; g < end; g++)
2196       {
2197 	if (!glyphset.has (g)) continue;
2198         if (glyph_filter && !glyph_filter->has (g)) continue;
2199 	glyphs.push (glyph_map[g]);
2200 	gid_org_klass_map.set (glyph_map[g], klass);
2201 	orig_klasses.add (klass);
2202       }
2203     }
2204 
2205     unsigned glyph_count = glyph_filter
2206                            ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
2207                            : glyphset.get_population ();
2208     use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
2209     ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
2210 				  glyphs, orig_klasses, use_class_zero, klass_map);
2211     return_trace (keep_empty_table || (bool) glyphs);
2212   }
2213 
sanitizeOT::ClassDefFormat22214   bool sanitize (hb_sanitize_context_t *c) const
2215   {
2216     TRACE_SANITIZE (this);
2217     return_trace (rangeRecord.sanitize (c));
2218   }
2219 
2220   template <typename set_t>
collect_coverageOT::ClassDefFormat22221   bool collect_coverage (set_t *glyphs) const
2222   {
2223     unsigned int count = rangeRecord.len;
2224     for (unsigned int i = 0; i < count; i++)
2225       if (rangeRecord[i].value)
2226 	if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2227 	  return false;
2228     return true;
2229   }
2230 
2231   template <typename set_t>
collect_classOT::ClassDefFormat22232   bool collect_class (set_t *glyphs, unsigned int klass) const
2233   {
2234     unsigned int count = rangeRecord.len;
2235     for (unsigned int i = 0; i < count; i++)
2236     {
2237       if (rangeRecord[i].value == klass)
2238 	if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2239 	  return false;
2240     }
2241     return true;
2242   }
2243 
intersectsOT::ClassDefFormat22244   bool intersects (const hb_set_t *glyphs) const
2245   {
2246     /* TODO Speed up, using hb_set_next() and bsearch()? */
2247     unsigned int count = rangeRecord.len;
2248     for (unsigned int i = 0; i < count; i++)
2249     {
2250       const auto& range = rangeRecord[i];
2251       if (range.intersects (glyphs) && range.value)
2252 	return true;
2253     }
2254     return false;
2255   }
intersects_classOT::ClassDefFormat22256   bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2257   {
2258     unsigned int count = rangeRecord.len;
2259     if (klass == 0)
2260     {
2261       /* Match if there's any glyph that is not listed! */
2262       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2263       for (unsigned int i = 0; i < count; i++)
2264       {
2265 	if (!hb_set_next (glyphs, &g))
2266 	  break;
2267 	if (g < rangeRecord[i].first)
2268 	  return true;
2269 	g = rangeRecord[i].last;
2270       }
2271       if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2272 	return true;
2273       /* Fall through. */
2274     }
2275     /* TODO Speed up, using set overlap first? */
2276     /* TODO(iter) Rewrite as dagger. */
2277     HBUINT16 k {klass};
2278     const RangeRecord *arr = rangeRecord.arrayZ;
2279     for (unsigned int i = 0; i < count; i++)
2280       if (arr[i].value == k && arr[i].intersects (glyphs))
2281 	return true;
2282     return false;
2283   }
2284 
intersected_class_glyphsOT::ClassDefFormat22285   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2286   {
2287     unsigned count = rangeRecord.len;
2288     if (klass == 0)
2289     {
2290       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2291       for (unsigned int i = 0; i < count; i++)
2292       {
2293         if (!hb_set_next (glyphs, &g))
2294           break;
2295         while (g != HB_SET_VALUE_INVALID && g < rangeRecord[i].first)
2296         {
2297           intersect_glyphs->add (g);
2298           hb_set_next (glyphs, &g);
2299         }
2300         g = rangeRecord[i].last;
2301       }
2302       while (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2303         intersect_glyphs->add (g);
2304 
2305       return;
2306     }
2307 
2308     hb_codepoint_t g = HB_SET_VALUE_INVALID;
2309     for (unsigned int i = 0; i < count; i++)
2310     {
2311       if (rangeRecord[i].value != klass) continue;
2312 
2313       if (g != HB_SET_VALUE_INVALID)
2314       {
2315         if (g >= rangeRecord[i].first &&
2316             g <= rangeRecord[i].last)
2317           intersect_glyphs->add (g);
2318         if (g > rangeRecord[i].last)
2319           continue;
2320       }
2321 
2322       g = rangeRecord[i].first - 1;
2323       while (hb_set_next (glyphs, &g))
2324       {
2325         if (g >= rangeRecord[i].first && g <= rangeRecord[i].last)
2326           intersect_glyphs->add (g);
2327         else if (g > rangeRecord[i].last)
2328           break;
2329       }
2330     }
2331   }
2332 
intersected_classesOT::ClassDefFormat22333   void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2334   {
2335     if (glyphs->is_empty ()) return;
2336 
2337     unsigned count = rangeRecord.len;
2338     hb_codepoint_t g = HB_SET_VALUE_INVALID;
2339     for (unsigned int i = 0; i < count; i++)
2340     {
2341       if (!hb_set_next (glyphs, &g))
2342         break;
2343       if (g < rangeRecord[i].first)
2344       {
2345         intersect_classes->add (0);
2346         break;
2347       }
2348       g = rangeRecord[i].last;
2349     }
2350     if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2351       intersect_classes->add (0);
2352 
2353     for (const RangeRecord& record : rangeRecord.iter ())
2354       if (record.intersects (glyphs))
2355         intersect_classes->add (record.value);
2356   }
2357 
2358   protected:
2359   HBUINT16	classFormat;	/* Format identifier--format = 2 */
2360   SortedArray16Of<RangeRecord>
2361 		rangeRecord;	/* Array of glyph ranges--ordered by
2362 				 * Start GlyphID */
2363   public:
2364   DEFINE_SIZE_ARRAY (4, rangeRecord);
2365 };
2366 
2367 struct ClassDef
2368 {
2369   /* Has interface. */
2370   static constexpr unsigned SENTINEL = 0;
2371   typedef unsigned int value_t;
operator []OT::ClassDef2372   value_t operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::ClassDef2373   bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
2374   /* Projection. */
operator ()OT::ClassDef2375   hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
2376 
getOT::ClassDef2377   unsigned int get (hb_codepoint_t k) const { return get_class (k); }
get_classOT::ClassDef2378   unsigned int get_class (hb_codepoint_t glyph_id) const
2379   {
2380     switch (u.format) {
2381     case 1: return u.format1.get_class (glyph_id);
2382     case 2: return u.format2.get_class (glyph_id);
2383     default:return 0;
2384     }
2385   }
2386 
2387   template<typename Iterator,
2388 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDef2389   bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
2390   {
2391     TRACE_SERIALIZE (this);
2392     if (unlikely (!c->extend_min (this))) return_trace (false);
2393 
2394     auto it = + it_with_class_zero | hb_filter (hb_second);
2395 
2396     unsigned format = 2;
2397     if (likely (it))
2398     {
2399       hb_codepoint_t glyph_min = (*it).first;
2400       hb_codepoint_t glyph_max = glyph_min;
2401 
2402       unsigned num_glyphs = 0;
2403       unsigned num_ranges = 1;
2404       hb_codepoint_t prev_gid = glyph_min;
2405       unsigned prev_klass = (*it).second;
2406 
2407       for (const auto gid_klass_pair : it)
2408       {
2409 	hb_codepoint_t cur_gid = gid_klass_pair.first;
2410 	unsigned cur_klass = gid_klass_pair.second;
2411         num_glyphs++;
2412 	if (cur_gid == glyph_min) continue;
2413         if (cur_gid > glyph_max) glyph_max = cur_gid;
2414 	if (cur_gid != prev_gid + 1 ||
2415 	    cur_klass != prev_klass)
2416 	  num_ranges++;
2417 
2418 	prev_gid = cur_gid;
2419 	prev_klass = cur_klass;
2420       }
2421 
2422       if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
2423 	format = 1;
2424     }
2425     u.format = format;
2426 
2427     switch (u.format)
2428     {
2429     case 1: return_trace (u.format1.serialize (c, it));
2430     case 2: return_trace (u.format2.serialize (c, it));
2431     default:return_trace (false);
2432     }
2433   }
2434 
subsetOT::ClassDef2435   bool subset (hb_subset_context_t *c,
2436 	       hb_map_t *klass_map = nullptr /*OUT*/,
2437                bool keep_empty_table = true,
2438                bool use_class_zero = true,
2439                const Coverage* glyph_filter = nullptr) const
2440   {
2441     TRACE_SUBSET (this);
2442     switch (u.format) {
2443     case 1: return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2444     case 2: return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2445     default:return_trace (false);
2446     }
2447   }
2448 
sanitizeOT::ClassDef2449   bool sanitize (hb_sanitize_context_t *c) const
2450   {
2451     TRACE_SANITIZE (this);
2452     if (!u.format.sanitize (c)) return_trace (false);
2453     switch (u.format) {
2454     case 1: return_trace (u.format1.sanitize (c));
2455     case 2: return_trace (u.format2.sanitize (c));
2456     default:return_trace (true);
2457     }
2458   }
2459 
2460   /* Might return false if array looks unsorted.
2461    * Used for faster rejection of corrupt data. */
2462   template <typename set_t>
collect_coverageOT::ClassDef2463   bool collect_coverage (set_t *glyphs) const
2464   {
2465     switch (u.format) {
2466     case 1: return u.format1.collect_coverage (glyphs);
2467     case 2: return u.format2.collect_coverage (glyphs);
2468     default:return false;
2469     }
2470   }
2471 
2472   /* Might return false if array looks unsorted.
2473    * Used for faster rejection of corrupt data. */
2474   template <typename set_t>
collect_classOT::ClassDef2475   bool collect_class (set_t *glyphs, unsigned int klass) const
2476   {
2477     switch (u.format) {
2478     case 1: return u.format1.collect_class (glyphs, klass);
2479     case 2: return u.format2.collect_class (glyphs, klass);
2480     default:return false;
2481     }
2482   }
2483 
intersectsOT::ClassDef2484   bool intersects (const hb_set_t *glyphs) const
2485   {
2486     switch (u.format) {
2487     case 1: return u.format1.intersects (glyphs);
2488     case 2: return u.format2.intersects (glyphs);
2489     default:return false;
2490     }
2491   }
intersects_classOT::ClassDef2492   bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
2493   {
2494     switch (u.format) {
2495     case 1: return u.format1.intersects_class (glyphs, klass);
2496     case 2: return u.format2.intersects_class (glyphs, klass);
2497     default:return false;
2498     }
2499   }
2500 
intersected_class_glyphsOT::ClassDef2501   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2502   {
2503     switch (u.format) {
2504     case 1: return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2505     case 2: return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2506     default:return;
2507     }
2508   }
2509 
intersected_classesOT::ClassDef2510   void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2511   {
2512     switch (u.format) {
2513     case 1: return u.format1.intersected_classes (glyphs, intersect_classes);
2514     case 2: return u.format2.intersected_classes (glyphs, intersect_classes);
2515     default:return;
2516     }
2517   }
2518 
2519 
2520   protected:
2521   union {
2522   HBUINT16		format;		/* Format identifier */
2523   ClassDefFormat1	format1;
2524   ClassDefFormat2	format2;
2525   } u;
2526   public:
2527   DEFINE_SIZE_UNION (2, format);
2528 };
2529 
2530 template<typename Iterator>
ClassDef_serialize(hb_serialize_context_t * c,Iterator it)2531 static inline void ClassDef_serialize (hb_serialize_context_t *c,
2532 				       Iterator it)
2533 { c->start_embed<ClassDef> ()->serialize (c, it); }
2534 
2535 
2536 /*
2537  * Item Variation Store
2538  */
2539 
2540 struct VarRegionAxis
2541 {
evaluateOT::VarRegionAxis2542   float evaluate (int coord) const
2543   {
2544     int start = startCoord, peak = peakCoord, end = endCoord;
2545 
2546     /* TODO Move these to sanitize(). */
2547     if (unlikely (start > peak || peak > end))
2548       return 1.;
2549     if (unlikely (start < 0 && end > 0 && peak != 0))
2550       return 1.;
2551 
2552     if (peak == 0 || coord == peak)
2553       return 1.;
2554 
2555     if (coord <= start || end <= coord)
2556       return 0.;
2557 
2558     /* Interpolate */
2559     if (coord < peak)
2560       return float (coord - start) / (peak - start);
2561     else
2562       return float (end - coord) / (end - peak);
2563   }
2564 
sanitizeOT::VarRegionAxis2565   bool sanitize (hb_sanitize_context_t *c) const
2566   {
2567     TRACE_SANITIZE (this);
2568     return_trace (c->check_struct (this));
2569     /* TODO Handle invalid start/peak/end configs, so we don't
2570      * have to do that at runtime. */
2571   }
2572 
2573   public:
2574   F2DOT14	startCoord;
2575   F2DOT14	peakCoord;
2576   F2DOT14	endCoord;
2577   public:
2578   DEFINE_SIZE_STATIC (6);
2579 };
2580 
2581 struct VarRegionList
2582 {
evaluateOT::VarRegionList2583   float evaluate (unsigned int region_index,
2584 		  const int *coords, unsigned int coord_len) const
2585   {
2586     if (unlikely (region_index >= regionCount))
2587       return 0.;
2588 
2589     const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
2590 
2591     float v = 1.;
2592     unsigned int count = axisCount;
2593     for (unsigned int i = 0; i < count; i++)
2594     {
2595       int coord = i < coord_len ? coords[i] : 0;
2596       float factor = axes[i].evaluate (coord);
2597       if (factor == 0.f)
2598 	return 0.;
2599       v *= factor;
2600     }
2601     return v;
2602   }
2603 
sanitizeOT::VarRegionList2604   bool sanitize (hb_sanitize_context_t *c) const
2605   {
2606     TRACE_SANITIZE (this);
2607     return_trace (c->check_struct (this) && axesZ.sanitize (c, axisCount * regionCount));
2608   }
2609 
serializeOT::VarRegionList2610   bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t &region_map)
2611   {
2612     TRACE_SERIALIZE (this);
2613     if (unlikely (!c->extend_min (this))) return_trace (false);
2614     axisCount = src->axisCount;
2615     regionCount = region_map.get_population ();
2616     if (unlikely (hb_unsigned_mul_overflows (axisCount * regionCount,
2617 					     VarRegionAxis::static_size))) return_trace (false);
2618     if (unlikely (!c->extend (this))) return_trace (false);
2619     unsigned int region_count = src->regionCount;
2620     for (unsigned int r = 0; r < regionCount; r++)
2621     {
2622       unsigned int backward = region_map.backward (r);
2623       if (backward >= region_count) return_trace (false);
2624       memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
2625     }
2626 
2627     return_trace (true);
2628   }
2629 
get_sizeOT::VarRegionList2630   unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
2631 
2632   public:
2633   HBUINT16	axisCount;
2634   HBUINT15	regionCount;
2635   protected:
2636   UnsizedArrayOf<VarRegionAxis>
2637 		axesZ;
2638   public:
2639   DEFINE_SIZE_ARRAY (4, axesZ);
2640 };
2641 
2642 struct VarData
2643 {
get_region_index_countOT::VarData2644   unsigned int get_region_index_count () const
2645   { return regionIndices.len; }
2646 
get_row_sizeOT::VarData2647   unsigned int get_row_size () const
2648   { return shortCount + regionIndices.len; }
2649 
get_sizeOT::VarData2650   unsigned int get_size () const
2651   { return min_size
2652 	 - regionIndices.min_size + regionIndices.get_size ()
2653 	 + itemCount * get_row_size ();
2654   }
2655 
get_deltaOT::VarData2656   float get_delta (unsigned int inner,
2657 		   const int *coords, unsigned int coord_count,
2658 		   const VarRegionList &regions) const
2659   {
2660     if (unlikely (inner >= itemCount))
2661       return 0.;
2662 
2663    unsigned int count = regionIndices.len;
2664    unsigned int scount = shortCount;
2665 
2666    const HBUINT8 *bytes = get_delta_bytes ();
2667    const HBUINT8 *row = bytes + inner * (scount + count);
2668 
2669    float delta = 0.;
2670    unsigned int i = 0;
2671 
2672    const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
2673    for (; i < scount; i++)
2674    {
2675      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2676      delta += scalar * *scursor++;
2677    }
2678    const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
2679    for (; i < count; i++)
2680    {
2681      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2682      delta += scalar * *bcursor++;
2683    }
2684 
2685    return delta;
2686   }
2687 
get_region_scalarsOT::VarData2688   void get_region_scalars (const int *coords, unsigned int coord_count,
2689 			   const VarRegionList &regions,
2690 			   float *scalars /*OUT */,
2691 			   unsigned int num_scalars) const
2692   {
2693     unsigned count = hb_min (num_scalars, regionIndices.len);
2694     for (unsigned int i = 0; i < count; i++)
2695       scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2696     for (unsigned int i = count; i < num_scalars; i++)
2697       scalars[i] = 0.f;
2698   }
2699 
sanitizeOT::VarData2700   bool sanitize (hb_sanitize_context_t *c) const
2701   {
2702     TRACE_SANITIZE (this);
2703     return_trace (c->check_struct (this) &&
2704 		  regionIndices.sanitize (c) &&
2705 		  shortCount <= regionIndices.len &&
2706 		  c->check_range (get_delta_bytes (),
2707 				  itemCount,
2708 				  get_row_size ()));
2709   }
2710 
serializeOT::VarData2711   bool serialize (hb_serialize_context_t *c,
2712 		  const VarData *src,
2713 		  const hb_inc_bimap_t &inner_map,
2714 		  const hb_bimap_t &region_map)
2715   {
2716     TRACE_SERIALIZE (this);
2717     if (unlikely (!c->extend_min (this))) return_trace (false);
2718     itemCount = inner_map.get_next_value ();
2719 
2720     /* Optimize short count */
2721     unsigned short ri_count = src->regionIndices.len;
2722     enum delta_size_t { kZero=0, kByte, kShort };
2723     hb_vector_t<delta_size_t> delta_sz;
2724     hb_vector_t<unsigned int> ri_map;	/* maps old index to new index */
2725     delta_sz.resize (ri_count);
2726     ri_map.resize (ri_count);
2727     unsigned int new_short_count = 0;
2728     unsigned int r;
2729     for (r = 0; r < ri_count; r++)
2730     {
2731       delta_sz[r] = kZero;
2732       for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2733       {
2734 	unsigned int old = inner_map.backward (i);
2735 	int16_t delta = src->get_item_delta (old, r);
2736 	if (delta < -128 || 127 < delta)
2737 	{
2738 	  delta_sz[r] = kShort;
2739 	  new_short_count++;
2740 	  break;
2741 	}
2742 	else if (delta != 0)
2743 	  delta_sz[r] = kByte;
2744       }
2745     }
2746     unsigned int short_index = 0;
2747     unsigned int byte_index = new_short_count;
2748     unsigned int new_ri_count = 0;
2749     for (r = 0; r < ri_count; r++)
2750       if (delta_sz[r])
2751       {
2752 	ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
2753 	new_ri_count++;
2754       }
2755 
2756     shortCount = new_short_count;
2757     regionIndices.len = new_ri_count;
2758 
2759     if (unlikely (!c->extend (this))) return_trace (false);
2760 
2761     for (r = 0; r < ri_count; r++)
2762       if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
2763 
2764     for (unsigned int i = 0; i < itemCount; i++)
2765     {
2766       unsigned int	old = inner_map.backward (i);
2767       for (unsigned int r = 0; r < ri_count; r++)
2768 	if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
2769     }
2770 
2771     return_trace (true);
2772   }
2773 
collect_region_refsOT::VarData2774   void collect_region_refs (hb_set_t &region_indices, const hb_inc_bimap_t &inner_map) const
2775   {
2776     for (unsigned int r = 0; r < regionIndices.len; r++)
2777     {
2778       unsigned int region = regionIndices[r];
2779       if (region_indices.has (region)) continue;
2780       for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2781 	if (get_item_delta (inner_map.backward (i), r) != 0)
2782 	{
2783 	  region_indices.add (region);
2784 	  break;
2785 	}
2786     }
2787   }
2788 
2789   protected:
get_delta_bytesOT::VarData2790   const HBUINT8 *get_delta_bytes () const
2791   { return &StructAfter<HBUINT8> (regionIndices); }
2792 
get_delta_bytesOT::VarData2793   HBUINT8 *get_delta_bytes ()
2794   { return &StructAfter<HBUINT8> (regionIndices); }
2795 
get_item_deltaOT::VarData2796   int16_t get_item_delta (unsigned int item, unsigned int region) const
2797   {
2798     if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
2799     const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
2800     if (region < shortCount)
2801       return ((const HBINT16 *)p)[region];
2802     else
2803       return (p + HBINT16::static_size * shortCount)[region - shortCount];
2804   }
2805 
set_item_deltaOT::VarData2806   void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
2807   {
2808     HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
2809     if (region < shortCount)
2810       ((HBINT16 *)p)[region] = delta;
2811     else
2812       (p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
2813   }
2814 
2815   protected:
2816   HBUINT16		itemCount;
2817   HBUINT16		shortCount;
2818   Array16Of<HBUINT16>	regionIndices;
2819 /*UnsizedArrayOf<HBUINT8>bytesX;*/
2820   public:
2821   DEFINE_SIZE_ARRAY (6, regionIndices);
2822 };
2823 
2824 struct VariationStore
2825 {
2826   private:
get_deltaOT::VariationStore2827   float get_delta (unsigned int outer, unsigned int inner,
2828 		   const int *coords, unsigned int coord_count) const
2829   {
2830 #ifdef HB_NO_VAR
2831     return 0.f;
2832 #endif
2833 
2834     if (unlikely (outer >= dataSets.len))
2835       return 0.f;
2836 
2837     return (this+dataSets[outer]).get_delta (inner,
2838 					     coords, coord_count,
2839 					     this+regions);
2840   }
2841 
2842   public:
get_deltaOT::VariationStore2843   float get_delta (unsigned int index,
2844 		   const int *coords, unsigned int coord_count) const
2845   {
2846     unsigned int outer = index >> 16;
2847     unsigned int inner = index & 0xFFFF;
2848     return get_delta (outer, inner, coords, coord_count);
2849   }
2850 
sanitizeOT::VariationStore2851   bool sanitize (hb_sanitize_context_t *c) const
2852   {
2853 #ifdef HB_NO_VAR
2854     return true;
2855 #endif
2856 
2857     TRACE_SANITIZE (this);
2858     return_trace (c->check_struct (this) &&
2859 		  format == 1 &&
2860 		  regions.sanitize (c, this) &&
2861 		  dataSets.sanitize (c, this));
2862   }
2863 
serializeOT::VariationStore2864   bool serialize (hb_serialize_context_t *c,
2865 		  const VariationStore *src,
2866 		  const hb_array_t <hb_inc_bimap_t> &inner_maps)
2867   {
2868     TRACE_SERIALIZE (this);
2869     if (unlikely (!c->extend_min (this))) return_trace (false);
2870 
2871     unsigned int set_count = 0;
2872     for (unsigned int i = 0; i < inner_maps.length; i++)
2873       if (inner_maps[i].get_population ())
2874 	set_count++;
2875 
2876     format = 1;
2877 
2878     const auto &src_regions = src+src->regions;
2879 
2880     hb_set_t region_indices;
2881     for (unsigned int i = 0; i < inner_maps.length; i++)
2882       (src+src->dataSets[i]).collect_region_refs (region_indices, inner_maps[i]);
2883 
2884     if (region_indices.in_error ())
2885       return_trace (false);
2886 
2887     region_indices.del_range ((src_regions).regionCount, hb_set_t::INVALID);
2888 
2889     /* TODO use constructor when our data-structures support that. */
2890     hb_inc_bimap_t region_map;
2891     + hb_iter (region_indices)
2892     | hb_apply ([&region_map] (unsigned _) { region_map.add(_); })
2893     ;
2894     if (region_map.in_error())
2895       return_trace (false);
2896 
2897     if (unlikely (!regions.serialize_serialize (c, &src_regions, region_map)))
2898       return_trace (false);
2899 
2900     dataSets.len = set_count;
2901     if (unlikely (!c->extend (dataSets))) return_trace (false);
2902 
2903     /* TODO: The following code could be simplified when
2904      * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize () */
2905     unsigned int set_index = 0;
2906     for (unsigned int i = 0; i < inner_maps.length; i++)
2907     {
2908       if (!inner_maps[i].get_population ()) continue;
2909       if (unlikely (!dataSets[set_index++]
2910 		     .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
2911 	return_trace (false);
2912     }
2913 
2914     return_trace (true);
2915   }
2916 
subsetOT::VariationStore2917   bool subset (hb_subset_context_t *c) const
2918   {
2919     TRACE_SUBSET (this);
2920 
2921     VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
2922     if (unlikely (!varstore_prime)) return_trace (false);
2923 
2924     const hb_set_t *variation_indices = c->plan->layout_variation_indices;
2925     if (variation_indices->is_empty ()) return_trace (false);
2926 
2927     hb_vector_t<hb_inc_bimap_t> inner_maps;
2928     inner_maps.resize ((unsigned) dataSets.len);
2929     for (unsigned i = 0; i < inner_maps.length; i++)
2930       inner_maps[i].init ();
2931 
2932     for (unsigned idx : c->plan->layout_variation_indices->iter ())
2933     {
2934       uint16_t major = idx >> 16;
2935       uint16_t minor = idx & 0xFFFF;
2936 
2937       if (major >= inner_maps.length)
2938       {
2939 	for (unsigned i = 0; i < inner_maps.length; i++)
2940 	  inner_maps[i].fini ();
2941 	return_trace (false);
2942       }
2943       inner_maps[major].add (minor);
2944     }
2945     varstore_prime->serialize (c->serializer, this, inner_maps.as_array ());
2946 
2947     for (unsigned i = 0; i < inner_maps.length; i++)
2948       inner_maps[i].fini ();
2949 
2950     return_trace (
2951         !c->serializer->in_error()
2952         && varstore_prime->dataSets);
2953   }
2954 
get_region_index_countOT::VariationStore2955   unsigned int get_region_index_count (unsigned int major) const
2956   { return (this+dataSets[major]).get_region_index_count (); }
2957 
get_region_scalarsOT::VariationStore2958   void get_region_scalars (unsigned int major,
2959 			   const int *coords, unsigned int coord_count,
2960 			   float *scalars /*OUT*/,
2961 			   unsigned int num_scalars) const
2962   {
2963 #ifdef HB_NO_VAR
2964     for (unsigned i = 0; i < num_scalars; i++)
2965       scalars[i] = 0.f;
2966     return;
2967 #endif
2968 
2969     (this+dataSets[major]).get_region_scalars (coords, coord_count,
2970 					       this+regions,
2971 					       &scalars[0], num_scalars);
2972   }
2973 
get_sub_table_countOT::VariationStore2974   unsigned int get_sub_table_count () const { return dataSets.len; }
2975 
2976   protected:
2977   HBUINT16				format;
2978   Offset32To<VarRegionList>		regions;
2979   Array16OfOffset32To<VarData>		dataSets;
2980   public:
2981   DEFINE_SIZE_ARRAY_SIZED (8, dataSets);
2982 };
2983 
2984 /*
2985  * Feature Variations
2986  */
2987 
2988 struct ConditionFormat1
2989 {
2990   friend struct Condition;
2991 
subsetOT::ConditionFormat12992   bool subset (hb_subset_context_t *c) const
2993   {
2994     TRACE_SUBSET (this);
2995     auto *out = c->serializer->embed (this);
2996     if (unlikely (!out)) return_trace (false);
2997     return_trace (true);
2998   }
2999 
3000   private:
evaluateOT::ConditionFormat13001   bool evaluate (const int *coords, unsigned int coord_len) const
3002   {
3003     int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
3004     return filterRangeMinValue <= coord && coord <= filterRangeMaxValue;
3005   }
3006 
sanitizeOT::ConditionFormat13007   bool sanitize (hb_sanitize_context_t *c) const
3008   {
3009     TRACE_SANITIZE (this);
3010     return_trace (c->check_struct (this));
3011   }
3012 
3013   protected:
3014   HBUINT16	format;		/* Format identifier--format = 1 */
3015   HBUINT16	axisIndex;
3016   F2DOT14	filterRangeMinValue;
3017   F2DOT14	filterRangeMaxValue;
3018   public:
3019   DEFINE_SIZE_STATIC (8);
3020 };
3021 
3022 struct Condition
3023 {
evaluateOT::Condition3024   bool evaluate (const int *coords, unsigned int coord_len) const
3025   {
3026     switch (u.format) {
3027     case 1: return u.format1.evaluate (coords, coord_len);
3028     default:return false;
3029     }
3030   }
3031 
3032   template <typename context_t, typename ...Ts>
dispatchOT::Condition3033   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3034   {
3035     TRACE_DISPATCH (this, u.format);
3036     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3037     switch (u.format) {
3038     case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3039     default:return_trace (c->default_return_value ());
3040     }
3041   }
3042 
sanitizeOT::Condition3043   bool sanitize (hb_sanitize_context_t *c) const
3044   {
3045     TRACE_SANITIZE (this);
3046     if (!u.format.sanitize (c)) return_trace (false);
3047     switch (u.format) {
3048     case 1: return_trace (u.format1.sanitize (c));
3049     default:return_trace (true);
3050     }
3051   }
3052 
3053   protected:
3054   union {
3055   HBUINT16		format;		/* Format identifier */
3056   ConditionFormat1	format1;
3057   } u;
3058   public:
3059   DEFINE_SIZE_UNION (2, format);
3060 };
3061 
3062 struct ConditionSet
3063 {
evaluateOT::ConditionSet3064   bool evaluate (const int *coords, unsigned int coord_len) const
3065   {
3066     unsigned int count = conditions.len;
3067     for (unsigned int i = 0; i < count; i++)
3068       if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
3069 	return false;
3070     return true;
3071   }
3072 
subsetOT::ConditionSet3073   bool subset (hb_subset_context_t *c) const
3074   {
3075     TRACE_SUBSET (this);
3076     auto *out = c->serializer->start_embed (this);
3077     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3078 
3079     + conditions.iter ()
3080     | hb_apply (subset_offset_array (c, out->conditions, this))
3081     ;
3082 
3083     return_trace (bool (out->conditions));
3084   }
3085 
sanitizeOT::ConditionSet3086   bool sanitize (hb_sanitize_context_t *c) const
3087   {
3088     TRACE_SANITIZE (this);
3089     return_trace (conditions.sanitize (c, this));
3090   }
3091 
3092   protected:
3093   Array16OfOffset32To<Condition>	conditions;
3094   public:
3095   DEFINE_SIZE_ARRAY (2, conditions);
3096 };
3097 
3098 struct FeatureTableSubstitutionRecord
3099 {
3100   friend struct FeatureTableSubstitution;
3101 
collect_lookupsOT::FeatureTableSubstitutionRecord3102   void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
3103   {
3104     return (base+feature).add_lookup_indexes_to (lookup_indexes);
3105   }
3106 
closure_featuresOT::FeatureTableSubstitutionRecord3107   void closure_features (const void *base,
3108 			 const hb_map_t *lookup_indexes,
3109 			 hb_set_t       *feature_indexes /* OUT */) const
3110   {
3111     if ((base+feature).intersects_lookup_indexes (lookup_indexes))
3112       feature_indexes->add (featureIndex);
3113   }
3114 
subsetOT::FeatureTableSubstitutionRecord3115   bool subset (hb_subset_layout_context_t *c, const void *base) const
3116   {
3117     TRACE_SUBSET (this);
3118     if (!c->feature_index_map->has (featureIndex)) {
3119       // Feature that is being substituted is not being retained, so we don't
3120       // need this.
3121       return_trace (false);
3122     }
3123 
3124     auto *out = c->subset_context->serializer->embed (this);
3125     if (unlikely (!out)) return_trace (false);
3126 
3127     out->featureIndex = c->feature_index_map->get (featureIndex);
3128     bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c);
3129     return_trace (ret);
3130   }
3131 
sanitizeOT::FeatureTableSubstitutionRecord3132   bool sanitize (hb_sanitize_context_t *c, const void *base) const
3133   {
3134     TRACE_SANITIZE (this);
3135     return_trace (c->check_struct (this) && feature.sanitize (c, base));
3136   }
3137 
3138   protected:
3139   HBUINT16		featureIndex;
3140   Offset32To<Feature>	feature;
3141   public:
3142   DEFINE_SIZE_STATIC (6);
3143 };
3144 
3145 struct FeatureTableSubstitution
3146 {
find_substituteOT::FeatureTableSubstitution3147   const Feature *find_substitute (unsigned int feature_index) const
3148   {
3149     unsigned int count = substitutions.len;
3150     for (unsigned int i = 0; i < count; i++)
3151     {
3152       const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
3153       if (record.featureIndex == feature_index)
3154 	return &(this+record.feature);
3155     }
3156     return nullptr;
3157   }
3158 
collect_lookupsOT::FeatureTableSubstitution3159   void collect_lookups (const hb_set_t *feature_indexes,
3160 			hb_set_t       *lookup_indexes /* OUT */) const
3161   {
3162     + hb_iter (substitutions)
3163     | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
3164     | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
3165 		{ r.collect_lookups (this, lookup_indexes); })
3166     ;
3167   }
3168 
closure_featuresOT::FeatureTableSubstitution3169   void closure_features (const hb_map_t *lookup_indexes,
3170 			 hb_set_t       *feature_indexes /* OUT */) const
3171   {
3172     for (const FeatureTableSubstitutionRecord& record : substitutions)
3173       record.closure_features (this, lookup_indexes, feature_indexes);
3174   }
3175 
intersects_featuresOT::FeatureTableSubstitution3176   bool intersects_features (const hb_map_t *feature_index_map) const
3177   {
3178     for (const FeatureTableSubstitutionRecord& record : substitutions)
3179     {
3180       if (feature_index_map->has (record.featureIndex)) return true;
3181     }
3182     return false;
3183   }
3184 
subsetOT::FeatureTableSubstitution3185   bool subset (hb_subset_context_t        *c,
3186 	       hb_subset_layout_context_t *l) const
3187   {
3188     TRACE_SUBSET (this);
3189     auto *out = c->serializer->start_embed (*this);
3190     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3191 
3192     out->version.major = version.major;
3193     out->version.minor = version.minor;
3194 
3195     + substitutions.iter ()
3196     | hb_apply (subset_record_array (l, &(out->substitutions), this))
3197     ;
3198 
3199     return_trace (bool (out->substitutions));
3200   }
3201 
sanitizeOT::FeatureTableSubstitution3202   bool sanitize (hb_sanitize_context_t *c) const
3203   {
3204     TRACE_SANITIZE (this);
3205     return_trace (version.sanitize (c) &&
3206 		  likely (version.major == 1) &&
3207 		  substitutions.sanitize (c, this));
3208   }
3209 
3210   protected:
3211   FixedVersion<>	version;	/* Version--0x00010000u */
3212   Array16Of<FeatureTableSubstitutionRecord>
3213 			substitutions;
3214   public:
3215   DEFINE_SIZE_ARRAY (6, substitutions);
3216 };
3217 
3218 struct FeatureVariationRecord
3219 {
3220   friend struct FeatureVariations;
3221 
collect_lookupsOT::FeatureVariationRecord3222   void collect_lookups (const void     *base,
3223 			const hb_set_t *feature_indexes,
3224 			hb_set_t       *lookup_indexes /* OUT */) const
3225   {
3226     return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
3227   }
3228 
closure_featuresOT::FeatureVariationRecord3229   void closure_features (const void     *base,
3230 			 const hb_map_t *lookup_indexes,
3231 			 hb_set_t       *feature_indexes /* OUT */) const
3232   {
3233     (base+substitutions).closure_features (lookup_indexes, feature_indexes);
3234   }
3235 
intersects_featuresOT::FeatureVariationRecord3236   bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
3237   {
3238     return (base+substitutions).intersects_features (feature_index_map);
3239   }
3240 
subsetOT::FeatureVariationRecord3241   bool subset (hb_subset_layout_context_t *c, const void *base) const
3242   {
3243     TRACE_SUBSET (this);
3244     auto *out = c->subset_context->serializer->embed (this);
3245     if (unlikely (!out)) return_trace (false);
3246 
3247     out->conditions.serialize_subset (c->subset_context, conditions, base);
3248     out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
3249 
3250     return_trace (true);
3251   }
3252 
sanitizeOT::FeatureVariationRecord3253   bool sanitize (hb_sanitize_context_t *c, const void *base) const
3254   {
3255     TRACE_SANITIZE (this);
3256     return_trace (conditions.sanitize (c, base) &&
3257 		  substitutions.sanitize (c, base));
3258   }
3259 
3260   protected:
3261   Offset32To<ConditionSet>
3262 			conditions;
3263   Offset32To<FeatureTableSubstitution>
3264 			substitutions;
3265   public:
3266   DEFINE_SIZE_STATIC (8);
3267 };
3268 
3269 struct FeatureVariations
3270 {
3271   static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
3272 
find_indexOT::FeatureVariations3273   bool find_index (const int *coords, unsigned int coord_len,
3274 		   unsigned int *index) const
3275   {
3276     unsigned int count = varRecords.len;
3277     for (unsigned int i = 0; i < count; i++)
3278     {
3279       const FeatureVariationRecord &record = varRecords.arrayZ[i];
3280       if ((this+record.conditions).evaluate (coords, coord_len))
3281       {
3282 	*index = i;
3283 	return true;
3284       }
3285     }
3286     *index = NOT_FOUND_INDEX;
3287     return false;
3288   }
3289 
find_substituteOT::FeatureVariations3290   const Feature *find_substitute (unsigned int variations_index,
3291 				  unsigned int feature_index) const
3292   {
3293     const FeatureVariationRecord &record = varRecords[variations_index];
3294     return (this+record.substitutions).find_substitute (feature_index);
3295   }
3296 
copyOT::FeatureVariations3297   FeatureVariations* copy (hb_serialize_context_t *c) const
3298   {
3299     TRACE_SERIALIZE (this);
3300     return_trace (c->embed (*this));
3301   }
3302 
collect_lookupsOT::FeatureVariations3303   void collect_lookups (const hb_set_t *feature_indexes,
3304 			hb_set_t       *lookup_indexes /* OUT */) const
3305   {
3306     for (const FeatureVariationRecord& r : varRecords)
3307       r.collect_lookups (this, feature_indexes, lookup_indexes);
3308   }
3309 
closure_featuresOT::FeatureVariations3310   void closure_features (const hb_map_t *lookup_indexes,
3311 			 hb_set_t       *feature_indexes /* OUT */) const
3312   {
3313     for (const FeatureVariationRecord& record : varRecords)
3314       record.closure_features (this, lookup_indexes, feature_indexes);
3315   }
3316 
subsetOT::FeatureVariations3317   bool subset (hb_subset_context_t *c,
3318 	       hb_subset_layout_context_t *l) const
3319   {
3320     TRACE_SUBSET (this);
3321     auto *out = c->serializer->start_embed (*this);
3322     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3323 
3324     out->version.major = version.major;
3325     out->version.minor = version.minor;
3326 
3327     int keep_up_to = -1;
3328     for (int i = varRecords.len - 1; i >= 0; i--) {
3329       if (varRecords[i].intersects_features (this, l->feature_index_map)) {
3330         keep_up_to = i;
3331         break;
3332       }
3333     }
3334 
3335     unsigned count = (unsigned) (keep_up_to + 1);
3336     for (unsigned i = 0; i < count; i++) {
3337       subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
3338     }
3339     return_trace (bool (out->varRecords));
3340   }
3341 
sanitizeOT::FeatureVariations3342   bool sanitize (hb_sanitize_context_t *c) const
3343   {
3344     TRACE_SANITIZE (this);
3345     return_trace (version.sanitize (c) &&
3346 		  likely (version.major == 1) &&
3347 		  varRecords.sanitize (c, this));
3348   }
3349 
3350   protected:
3351   FixedVersion<>	version;	/* Version--0x00010000u */
3352   Array32Of<FeatureVariationRecord>
3353 			varRecords;
3354   public:
3355   DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
3356 };
3357 
3358 
3359 /*
3360  * Device Tables
3361  */
3362 
3363 struct HintingDevice
3364 {
3365   friend struct Device;
3366 
3367   private:
3368 
get_x_deltaOT::HintingDevice3369   hb_position_t get_x_delta (hb_font_t *font) const
3370   { return get_delta (font->x_ppem, font->x_scale); }
3371 
get_y_deltaOT::HintingDevice3372   hb_position_t get_y_delta (hb_font_t *font) const
3373   { return get_delta (font->y_ppem, font->y_scale); }
3374 
3375   public:
3376 
get_sizeOT::HintingDevice3377   unsigned int get_size () const
3378   {
3379     unsigned int f = deltaFormat;
3380     if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
3381     return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
3382   }
3383 
sanitizeOT::HintingDevice3384   bool sanitize (hb_sanitize_context_t *c) const
3385   {
3386     TRACE_SANITIZE (this);
3387     return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
3388   }
3389 
copyOT::HintingDevice3390   HintingDevice* copy (hb_serialize_context_t *c) const
3391   {
3392     TRACE_SERIALIZE (this);
3393     return_trace (c->embed<HintingDevice> (this));
3394   }
3395 
3396   private:
3397 
get_deltaOT::HintingDevice3398   int get_delta (unsigned int ppem, int scale) const
3399   {
3400     if (!ppem) return 0;
3401 
3402     int pixels = get_delta_pixels (ppem);
3403 
3404     if (!pixels) return 0;
3405 
3406     return (int) (pixels * (int64_t) scale / ppem);
3407   }
get_delta_pixelsOT::HintingDevice3408   int get_delta_pixels (unsigned int ppem_size) const
3409   {
3410     unsigned int f = deltaFormat;
3411     if (unlikely (f < 1 || f > 3))
3412       return 0;
3413 
3414     if (ppem_size < startSize || ppem_size > endSize)
3415       return 0;
3416 
3417     unsigned int s = ppem_size - startSize;
3418 
3419     unsigned int byte = deltaValueZ[s >> (4 - f)];
3420     unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
3421     unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
3422 
3423     int delta = bits & mask;
3424 
3425     if ((unsigned int) delta >= ((mask + 1) >> 1))
3426       delta -= mask + 1;
3427 
3428     return delta;
3429   }
3430 
3431   protected:
3432   HBUINT16	startSize;		/* Smallest size to correct--in ppem */
3433   HBUINT16	endSize;		/* Largest size to correct--in ppem */
3434   HBUINT16	deltaFormat;		/* Format of DeltaValue array data: 1, 2, or 3
3435 					 * 1	Signed 2-bit value, 8 values per uint16
3436 					 * 2	Signed 4-bit value, 4 values per uint16
3437 					 * 3	Signed 8-bit value, 2 values per uint16
3438 					 */
3439   UnsizedArrayOf<HBUINT16>
3440 		deltaValueZ;		/* Array of compressed data */
3441   public:
3442   DEFINE_SIZE_ARRAY (6, deltaValueZ);
3443 };
3444 
3445 struct VariationDevice
3446 {
3447   friend struct Device;
3448 
3449   private:
3450 
get_x_deltaOT::VariationDevice3451   hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
3452   { return font->em_scalef_x (get_delta (font, store)); }
3453 
get_y_deltaOT::VariationDevice3454   hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
3455   { return font->em_scalef_y (get_delta (font, store)); }
3456 
copyOT::VariationDevice3457   VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
3458   {
3459     TRACE_SERIALIZE (this);
3460     auto snap = c->snapshot ();
3461     auto *out = c->embed (this);
3462     if (unlikely (!out)) return_trace (nullptr);
3463     if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out);
3464 
3465     /* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
3466     if (!layout_variation_idx_map->has (varIdx))
3467     {
3468       c->revert (snap);
3469       return_trace (nullptr);
3470     }
3471     unsigned new_idx = layout_variation_idx_map->get (varIdx);
3472     out->varIdx = new_idx;
3473     return_trace (out);
3474   }
3475 
record_variation_indexOT::VariationDevice3476   void record_variation_index (hb_set_t *layout_variation_indices) const
3477   {
3478     layout_variation_indices->add (varIdx);
3479   }
3480 
sanitizeOT::VariationDevice3481   bool sanitize (hb_sanitize_context_t *c) const
3482   {
3483     TRACE_SANITIZE (this);
3484     return_trace (c->check_struct (this));
3485   }
3486 
3487   private:
3488 
get_deltaOT::VariationDevice3489   float get_delta (hb_font_t *font, const VariationStore &store) const
3490   {
3491     return store.get_delta (varIdx, font->coords, font->num_coords);
3492   }
3493 
3494   protected:
3495   VarIdx	varIdx;
3496   HBUINT16	deltaFormat;	/* Format identifier for this table: 0x0x8000 */
3497   public:
3498   DEFINE_SIZE_STATIC (6);
3499 };
3500 
3501 struct DeviceHeader
3502 {
3503   protected:
3504   HBUINT16		reserved1;
3505   HBUINT16		reserved2;
3506   public:
3507   HBUINT16		format;		/* Format identifier */
3508   public:
3509   DEFINE_SIZE_STATIC (6);
3510 };
3511 
3512 struct Device
3513 {
get_x_deltaOT::Device3514   hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3515   {
3516     switch (u.b.format)
3517     {
3518 #ifndef HB_NO_HINTING
3519     case 1: case 2: case 3:
3520       return u.hinting.get_x_delta (font);
3521 #endif
3522 #ifndef HB_NO_VAR
3523     case 0x8000:
3524       return u.variation.get_x_delta (font, store);
3525 #endif
3526     default:
3527       return 0;
3528     }
3529   }
get_y_deltaOT::Device3530   hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3531   {
3532     switch (u.b.format)
3533     {
3534     case 1: case 2: case 3:
3535 #ifndef HB_NO_HINTING
3536       return u.hinting.get_y_delta (font);
3537 #endif
3538 #ifndef HB_NO_VAR
3539     case 0x8000:
3540       return u.variation.get_y_delta (font, store);
3541 #endif
3542     default:
3543       return 0;
3544     }
3545   }
3546 
sanitizeOT::Device3547   bool sanitize (hb_sanitize_context_t *c) const
3548   {
3549     TRACE_SANITIZE (this);
3550     if (!u.b.format.sanitize (c)) return_trace (false);
3551     switch (u.b.format) {
3552 #ifndef HB_NO_HINTING
3553     case 1: case 2: case 3:
3554       return_trace (u.hinting.sanitize (c));
3555 #endif
3556 #ifndef HB_NO_VAR
3557     case 0x8000:
3558       return_trace (u.variation.sanitize (c));
3559 #endif
3560     default:
3561       return_trace (true);
3562     }
3563   }
3564 
copyOT::Device3565   Device* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const
3566   {
3567     TRACE_SERIALIZE (this);
3568     switch (u.b.format) {
3569 #ifndef HB_NO_HINTING
3570     case 1:
3571     case 2:
3572     case 3:
3573       return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
3574 #endif
3575 #ifndef HB_NO_VAR
3576     case 0x8000:
3577       return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_map)));
3578 #endif
3579     default:
3580       return_trace (nullptr);
3581     }
3582   }
3583 
collect_variation_indicesOT::Device3584   void collect_variation_indices (hb_set_t *layout_variation_indices) const
3585   {
3586     switch (u.b.format) {
3587 #ifndef HB_NO_HINTING
3588     case 1:
3589     case 2:
3590     case 3:
3591       return;
3592 #endif
3593 #ifndef HB_NO_VAR
3594     case 0x8000:
3595       u.variation.record_variation_index (layout_variation_indices);
3596       return;
3597 #endif
3598     default:
3599       return;
3600     }
3601   }
3602 
3603   protected:
3604   union {
3605   DeviceHeader		b;
3606   HintingDevice		hinting;
3607 #ifndef HB_NO_VAR
3608   VariationDevice	variation;
3609 #endif
3610   } u;
3611   public:
3612   DEFINE_SIZE_UNION (6, b);
3613 };
3614 
3615 
3616 } /* namespace OT */
3617 
3618 
3619 #endif /* HB_OT_LAYOUT_COMMON_HH */
3620