1 /*
2 * Copyright © 2007,2008,2009 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
31
32 #include "hb.hh"
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
35 #include "hb-set.hh"
36
37
38 #ifndef HB_MAX_NESTING_LEVEL
39 #define HB_MAX_NESTING_LEVEL 6
40 #endif
41 #ifndef HB_MAX_CONTEXT_LENGTH
42 #define HB_MAX_CONTEXT_LENGTH 64
43 #endif
44 #ifndef HB_CLOSURE_MAX_STAGES
45 /*
46 * The maximum number of times a lookup can be applied during shaping.
47 * Used to limit the number of iterations of the closure algorithm.
48 * This must be larger than the number of times add_pause() is
49 * called in a collect_features call of any shaper.
50 */
51 #define HB_CLOSURE_MAX_STAGES 32
52 #endif
53
54 #ifndef HB_MAX_SCRIPTS
55 #define HB_MAX_SCRIPTS 500
56 #endif
57
58 #ifndef HB_MAX_LANGSYS
59 #define HB_MAX_LANGSYS 2000
60 #endif
61
62
63 namespace OT {
64
65
66 #define NOT_COVERED ((unsigned int) -1)
67
68
69
70 /*
71 *
72 * OpenType Layout Common Table Formats
73 *
74 */
75
76
77 /*
78 * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
79 */
80
81 struct Record_sanitize_closure_t {
82 hb_tag_t tag;
83 const void *list_base;
84 };
85
86 template <typename Type>
87 struct Record
88 {
cmpOT::Record89 int cmp (hb_tag_t a) const { return tag.cmp (a); }
90
sanitizeOT::Record91 bool sanitize (hb_sanitize_context_t *c, const void *base) const
92 {
93 TRACE_SANITIZE (this);
94 const Record_sanitize_closure_t closure = {tag, base};
95 return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
96 }
97
98 Tag tag; /* 4-byte Tag identifier */
99 OffsetTo<Type>
100 offset; /* Offset from beginning of object holding
101 * the Record */
102 public:
103 DEFINE_SIZE_STATIC (6);
104 };
105
106 template <typename Type>
107 struct RecordArrayOf : SortedArrayOf<Record<Type> >
108 {
get_offsetOT::RecordArrayOf109 const OffsetTo<Type>& get_offset (unsigned int i) const
110 { return (*this)[i].offset; }
get_offsetOT::RecordArrayOf111 OffsetTo<Type>& get_offset (unsigned int i)
112 { return (*this)[i].offset; }
get_tagOT::RecordArrayOf113 const Tag& get_tag (unsigned int i) const
114 { return (*this)[i].tag; }
get_tagsOT::RecordArrayOf115 unsigned int get_tags (unsigned int start_offset,
116 unsigned int *record_count /* IN/OUT */,
117 hb_tag_t *record_tags /* OUT */) const
118 {
119 if (record_count) {
120 const Record<Type> *arr = this->sub_array (start_offset, record_count);
121 unsigned int count = *record_count;
122 for (unsigned int i = 0; i < count; i++)
123 record_tags[i] = arr[i].tag;
124 }
125 return this->len;
126 }
find_indexOT::RecordArrayOf127 bool find_index (hb_tag_t tag, unsigned int *index) const
128 {
129 return this->bfind (tag, index, HB_BFIND_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
130 }
131 };
132
133 template <typename Type>
134 struct RecordListOf : RecordArrayOf<Type>
135 {
operator []OT::RecordListOf136 const Type& operator [] (unsigned int i) const
137 { return this+this->get_offset (i); }
138
subsetOT::RecordListOf139 bool subset (hb_subset_context_t *c) const
140 {
141 TRACE_SUBSET (this);
142 struct RecordListOf<Type> *out = c->serializer->embed (*this);
143 if (unlikely (!out)) return_trace (false);
144 unsigned int count = this->len;
145 for (unsigned int i = 0; i < count; i++)
146 out->get_offset (i).serialize_subset (c, (*this)[i], out);
147 return_trace (true);
148 }
149
sanitizeOT::RecordListOf150 bool sanitize (hb_sanitize_context_t *c) const
151 {
152 TRACE_SANITIZE (this);
153 return_trace (RecordArrayOf<Type>::sanitize (c, this));
154 }
155 };
156
157
158 struct RangeRecord
159 {
cmpOT::RangeRecord160 int cmp (hb_codepoint_t g) const
161 { return g < start ? -1 : g <= end ? 0 : +1; }
162
sanitizeOT::RangeRecord163 bool sanitize (hb_sanitize_context_t *c) const
164 {
165 TRACE_SANITIZE (this);
166 return_trace (c->check_struct (this));
167 }
168
intersectsOT::RangeRecord169 bool intersects (const hb_set_t *glyphs) const
170 { return glyphs->intersects (start, end); }
171
172 template <typename set_t>
add_coverageOT::RangeRecord173 bool add_coverage (set_t *glyphs) const
174 { return glyphs->add_range (start, end); }
175
176 GlyphID start; /* First GlyphID in the range */
177 GlyphID end; /* Last GlyphID in the range */
178 HBUINT16 value; /* Value */
179 public:
180 DEFINE_SIZE_STATIC (6);
181 };
182 DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
183
184
185 struct IndexArray : ArrayOf<Index>
186 {
get_indexesOT::IndexArray187 unsigned int get_indexes (unsigned int start_offset,
188 unsigned int *_count /* IN/OUT */,
189 unsigned int *_indexes /* OUT */) const
190 {
191 if (_count) {
192 const HBUINT16 *arr = this->sub_array (start_offset, _count);
193 unsigned int count = *_count;
194 for (unsigned int i = 0; i < count; i++)
195 _indexes[i] = arr[i];
196 }
197 return this->len;
198 }
199
add_indexes_toOT::IndexArray200 void add_indexes_to (hb_set_t* output /* OUT */) const
201 {
202 output->add_array (arrayZ, len);
203 }
204 };
205
206
207 struct Script;
208 struct LangSys;
209 struct Feature;
210
211
212 struct LangSys
213 {
get_feature_countOT::LangSys214 unsigned int get_feature_count () const
215 { return featureIndex.len; }
get_feature_indexOT::LangSys216 hb_tag_t get_feature_index (unsigned int i) const
217 { return featureIndex[i]; }
get_feature_indexesOT::LangSys218 unsigned int get_feature_indexes (unsigned int start_offset,
219 unsigned int *feature_count /* IN/OUT */,
220 unsigned int *feature_indexes /* OUT */) const
221 { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
add_feature_indexes_toOT::LangSys222 void add_feature_indexes_to (hb_set_t *feature_indexes) const
223 { featureIndex.add_indexes_to (feature_indexes); }
224
has_required_featureOT::LangSys225 bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
get_required_feature_indexOT::LangSys226 unsigned int get_required_feature_index () const
227 {
228 if (reqFeatureIndex == 0xFFFFu)
229 return Index::NOT_FOUND_INDEX;
230 return reqFeatureIndex;;
231 }
232
subsetOT::LangSys233 bool subset (hb_subset_context_t *c) const
234 {
235 TRACE_SUBSET (this);
236 return_trace (c->serializer->embed (*this));
237 }
238
sanitizeOT::LangSys239 bool sanitize (hb_sanitize_context_t *c,
240 const Record_sanitize_closure_t * = nullptr) const
241 {
242 TRACE_SANITIZE (this);
243 return_trace (c->check_struct (this) && featureIndex.sanitize (c));
244 }
245
246 Offset16 lookupOrderZ; /* = Null (reserved for an offset to a
247 * reordering table) */
248 HBUINT16 reqFeatureIndex;/* Index of a feature required for this
249 * language system--if no required features
250 * = 0xFFFFu */
251 IndexArray featureIndex; /* Array of indices into the FeatureList */
252 public:
253 DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
254 };
255 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
256
257 struct Script
258 {
get_lang_sys_countOT::Script259 unsigned int get_lang_sys_count () const
260 { return langSys.len; }
get_lang_sys_tagOT::Script261 const Tag& get_lang_sys_tag (unsigned int i) const
262 { return langSys.get_tag (i); }
get_lang_sys_tagsOT::Script263 unsigned int get_lang_sys_tags (unsigned int start_offset,
264 unsigned int *lang_sys_count /* IN/OUT */,
265 hb_tag_t *lang_sys_tags /* OUT */) const
266 { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
get_lang_sysOT::Script267 const LangSys& get_lang_sys (unsigned int i) const
268 {
269 if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
270 return this+langSys[i].offset;
271 }
find_lang_sys_indexOT::Script272 bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
273 { return langSys.find_index (tag, index); }
274
has_default_lang_sysOT::Script275 bool has_default_lang_sys () const { return defaultLangSys != 0; }
get_default_lang_sysOT::Script276 const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
277
subsetOT::Script278 bool subset (hb_subset_context_t *c) const
279 {
280 TRACE_SUBSET (this);
281 struct Script *out = c->serializer->embed (*this);
282 if (unlikely (!out)) return_trace (false);
283 out->defaultLangSys.serialize_subset (c, this+defaultLangSys, out);
284 unsigned int count = langSys.len;
285 for (unsigned int i = 0; i < count; i++)
286 out->langSys.arrayZ[i].offset.serialize_subset (c, this+langSys[i].offset, out);
287 return_trace (true);
288 }
289
sanitizeOT::Script290 bool sanitize (hb_sanitize_context_t *c,
291 const Record_sanitize_closure_t * = nullptr) const
292 {
293 TRACE_SANITIZE (this);
294 return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
295 }
296
297 protected:
298 OffsetTo<LangSys>
299 defaultLangSys; /* Offset to DefaultLangSys table--from
300 * beginning of Script table--may be Null */
301 RecordArrayOf<LangSys>
302 langSys; /* Array of LangSysRecords--listed
303 * alphabetically by LangSysTag */
304 public:
305 DEFINE_SIZE_ARRAY_SIZED (4, langSys);
306 };
307
308 typedef RecordListOf<Script> ScriptList;
309
310
311 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
312 struct FeatureParamsSize
313 {
sanitizeOT::FeatureParamsSize314 bool sanitize (hb_sanitize_context_t *c) const
315 {
316 TRACE_SANITIZE (this);
317 if (unlikely (!c->check_struct (this))) return_trace (false);
318
319 /* This subtable has some "history", if you will. Some earlier versions of
320 * Adobe tools calculated the offset of the FeatureParams sutable from the
321 * beginning of the FeatureList table! Now, that is dealt with in the
322 * Feature implementation. But we still need to be able to tell junk from
323 * real data. Note: We don't check that the nameID actually exists.
324 *
325 * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
326 *
327 * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
328 * coming out soon, and that the makeotf program will build a font with a
329 * 'size' feature that is correct by the specification.
330 *
331 * The specification for this feature tag is in the "OpenType Layout Tag
332 * Registry". You can see a copy of this at:
333 * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
334 *
335 * Here is one set of rules to determine if the 'size' feature is built
336 * correctly, or as by the older versions of MakeOTF. You may be able to do
337 * better.
338 *
339 * Assume that the offset to the size feature is according to specification,
340 * and make the following value checks. If it fails, assume the size
341 * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
342 * If this fails, reject the 'size' feature. The older makeOTF's calculated the
343 * offset from the beginning of the FeatureList table, rather than from the
344 * beginning of the 'size' Feature table.
345 *
346 * If "design size" == 0:
347 * fails check
348 *
349 * Else if ("subfamily identifier" == 0 and
350 * "range start" == 0 and
351 * "range end" == 0 and
352 * "range start" == 0 and
353 * "menu name ID" == 0)
354 * passes check: this is the format used when there is a design size
355 * specified, but there is no recommended size range.
356 *
357 * Else if ("design size" < "range start" or
358 * "design size" > "range end" or
359 * "range end" <= "range start" or
360 * "menu name ID" < 256 or
361 * "menu name ID" > 32767 or
362 * menu name ID is not a name ID which is actually in the name table)
363 * fails test
364 * Else
365 * passes test.
366 */
367
368 if (!designSize)
369 return_trace (false);
370 else if (subfamilyID == 0 &&
371 subfamilyNameID == 0 &&
372 rangeStart == 0 &&
373 rangeEnd == 0)
374 return_trace (true);
375 else if (designSize < rangeStart ||
376 designSize > rangeEnd ||
377 subfamilyNameID < 256 ||
378 subfamilyNameID > 32767)
379 return_trace (false);
380 else
381 return_trace (true);
382 }
383
384 HBUINT16 designSize; /* Represents the design size in 720/inch
385 * units (decipoints). The design size entry
386 * must be non-zero. When there is a design
387 * size but no recommended size range, the
388 * rest of the array will consist of zeros. */
389 HBUINT16 subfamilyID; /* Has no independent meaning, but serves
390 * as an identifier that associates fonts
391 * in a subfamily. All fonts which share a
392 * Preferred or Font Family name and which
393 * differ only by size range shall have the
394 * same subfamily value, and no fonts which
395 * differ in weight or style shall have the
396 * same subfamily value. If this value is
397 * zero, the remaining fields in the array
398 * will be ignored. */
399 NameID subfamilyNameID;/* If the preceding value is non-zero, this
400 * value must be set in the range 256 - 32767
401 * (inclusive). It records the value of a
402 * field in the name table, which must
403 * contain English-language strings encoded
404 * in Windows Unicode and Macintosh Roman,
405 * and may contain additional strings
406 * localized to other scripts and languages.
407 * Each of these strings is the name an
408 * application should use, in combination
409 * with the family name, to represent the
410 * subfamily in a menu. Applications will
411 * choose the appropriate version based on
412 * their selection criteria. */
413 HBUINT16 rangeStart; /* Large end of the recommended usage range
414 * (inclusive), stored in 720/inch units
415 * (decipoints). */
416 HBUINT16 rangeEnd; /* Small end of the recommended usage range
417 (exclusive), stored in 720/inch units
418 * (decipoints). */
419 public:
420 DEFINE_SIZE_STATIC (10);
421 };
422
423 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
424 struct FeatureParamsStylisticSet
425 {
sanitizeOT::FeatureParamsStylisticSet426 bool sanitize (hb_sanitize_context_t *c) const
427 {
428 TRACE_SANITIZE (this);
429 /* Right now minorVersion is at zero. Which means, any table supports
430 * the uiNameID field. */
431 return_trace (c->check_struct (this));
432 }
433
434 HBUINT16 version; /* (set to 0): This corresponds to a “minor”
435 * version number. Additional data may be
436 * added to the end of this Feature Parameters
437 * table in the future. */
438
439 NameID uiNameID; /* The 'name' table name ID that specifies a
440 * string (or strings, for multiple languages)
441 * for a user-interface label for this
442 * feature. The values of uiLabelNameId and
443 * sampleTextNameId are expected to be in the
444 * font-specific name ID range (256-32767),
445 * though that is not a requirement in this
446 * Feature Parameters specification. The
447 * user-interface label for the feature can
448 * be provided in multiple languages. An
449 * English string should be included as a
450 * fallback. The string should be kept to a
451 * minimal length to fit comfortably with
452 * different application interfaces. */
453 public:
454 DEFINE_SIZE_STATIC (4);
455 };
456
457 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
458 struct FeatureParamsCharacterVariants
459 {
sanitizeOT::FeatureParamsCharacterVariants460 bool sanitize (hb_sanitize_context_t *c) const
461 {
462 TRACE_SANITIZE (this);
463 return_trace (c->check_struct (this) &&
464 characters.sanitize (c));
465 }
466
467 HBUINT16 format; /* Format number is set to 0. */
468 NameID featUILableNameID; /* The ‘name’ table name ID that
469 * specifies a string (or strings,
470 * for multiple languages) for a
471 * user-interface label for this
472 * feature. (May be NULL.) */
473 NameID featUITooltipTextNameID;/* The ‘name’ table name ID that
474 * specifies a string (or strings,
475 * for multiple languages) that an
476 * application can use for tooltip
477 * text for this feature. (May be
478 * nullptr.) */
479 NameID sampleTextNameID; /* The ‘name’ table name ID that
480 * specifies sample text that
481 * illustrates the effect of this
482 * feature. (May be NULL.) */
483 HBUINT16 numNamedParameters; /* Number of named parameters. (May
484 * be zero.) */
485 NameID firstParamUILabelNameID;/* The first ‘name’ table name ID
486 * used to specify strings for
487 * user-interface labels for the
488 * feature parameters. (Must be zero
489 * if numParameters is zero.) */
490 ArrayOf<HBUINT24>
491 characters; /* Array of the Unicode Scalar Value
492 * of the characters for which this
493 * feature provides glyph variants.
494 * (May be zero.) */
495 public:
496 DEFINE_SIZE_ARRAY (14, characters);
497 };
498
499 struct FeatureParams
500 {
sanitizeOT::FeatureParams501 bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
502 {
503 TRACE_SANITIZE (this);
504 if (tag == HB_TAG ('s','i','z','e'))
505 return_trace (u.size.sanitize (c));
506 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
507 return_trace (u.stylisticSet.sanitize (c));
508 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
509 return_trace (u.characterVariants.sanitize (c));
510 return_trace (true);
511 }
512
get_size_paramsOT::FeatureParams513 const FeatureParamsSize& get_size_params (hb_tag_t tag) const
514 {
515 if (tag == HB_TAG ('s','i','z','e'))
516 return u.size;
517 return Null (FeatureParamsSize);
518 }
519
get_stylistic_set_paramsOT::FeatureParams520 const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
521 {
522 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
523 return u.stylisticSet;
524 return Null (FeatureParamsStylisticSet);
525 }
526
get_character_variants_paramsOT::FeatureParams527 const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
528 {
529 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
530 return u.characterVariants;
531 return Null (FeatureParamsCharacterVariants);
532 }
533
534 private:
535 union {
536 FeatureParamsSize size;
537 FeatureParamsStylisticSet stylisticSet;
538 FeatureParamsCharacterVariants characterVariants;
539 } u;
540 public:
541 DEFINE_SIZE_STATIC (17);
542 };
543
544 struct Feature
545 {
get_lookup_countOT::Feature546 unsigned int get_lookup_count () const
547 { return lookupIndex.len; }
get_lookup_indexOT::Feature548 hb_tag_t get_lookup_index (unsigned int i) const
549 { return lookupIndex[i]; }
get_lookup_indexesOT::Feature550 unsigned int get_lookup_indexes (unsigned int start_index,
551 unsigned int *lookup_count /* IN/OUT */,
552 unsigned int *lookup_tags /* OUT */) const
553 { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
add_lookup_indexes_toOT::Feature554 void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
555 { lookupIndex.add_indexes_to (lookup_indexes); }
556
get_feature_paramsOT::Feature557 const FeatureParams &get_feature_params () const
558 { return this+featureParams; }
559
subsetOT::Feature560 bool subset (hb_subset_context_t *c) const
561 {
562 TRACE_SUBSET (this);
563 struct Feature *out = c->serializer->embed (*this);
564 if (unlikely (!out)) return_trace (false);
565 out->featureParams.set (0); /* TODO(subset) FeatureParams. */
566 return_trace (true);
567 }
568
sanitizeOT::Feature569 bool sanitize (hb_sanitize_context_t *c,
570 const Record_sanitize_closure_t *closure = nullptr) const
571 {
572 TRACE_SANITIZE (this);
573 if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
574 return_trace (false);
575
576 /* Some earlier versions of Adobe tools calculated the offset of the
577 * FeatureParams subtable from the beginning of the FeatureList table!
578 *
579 * If sanitizing "failed" for the FeatureParams subtable, try it with the
580 * alternative location. We would know sanitize "failed" if old value
581 * of the offset was non-zero, but it's zeroed now.
582 *
583 * Only do this for the 'size' feature, since at the time of the faulty
584 * Adobe tools, only the 'size' feature had FeatureParams defined.
585 */
586
587 OffsetTo<FeatureParams> orig_offset = featureParams;
588 if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
589 return_trace (false);
590
591 if (likely (orig_offset.is_null ()))
592 return_trace (true);
593
594 if (featureParams == 0 && closure &&
595 closure->tag == HB_TAG ('s','i','z','e') &&
596 closure->list_base && closure->list_base < this)
597 {
598 unsigned int new_offset_int = (unsigned int) orig_offset -
599 (((char *) this) - ((char *) closure->list_base));
600
601 OffsetTo<FeatureParams> new_offset;
602 /* Check that it did not overflow. */
603 new_offset.set (new_offset_int);
604 if (new_offset == new_offset_int &&
605 c->try_set (&featureParams, new_offset) &&
606 !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
607 return_trace (false);
608 }
609
610 return_trace (true);
611 }
612
613 OffsetTo<FeatureParams>
614 featureParams; /* Offset to Feature Parameters table (if one
615 * has been defined for the feature), relative
616 * to the beginning of the Feature Table; = Null
617 * if not required */
618 IndexArray lookupIndex; /* Array of LookupList indices */
619 public:
620 DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
621 };
622
623 typedef RecordListOf<Feature> FeatureList;
624
625
626 struct LookupFlag : HBUINT16
627 {
628 enum Flags {
629 RightToLeft = 0x0001u,
630 IgnoreBaseGlyphs = 0x0002u,
631 IgnoreLigatures = 0x0004u,
632 IgnoreMarks = 0x0008u,
633 IgnoreFlags = 0x000Eu,
634 UseMarkFilteringSet = 0x0010u,
635 Reserved = 0x00E0u,
636 MarkAttachmentType = 0xFF00u
637 };
638 public:
639 DEFINE_SIZE_STATIC (2);
640 };
641
642 } /* namespace OT */
643 /* This has to be outside the namespace. */
644 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
645 namespace OT {
646
647 struct Lookup
648 {
get_subtable_countOT::Lookup649 unsigned int get_subtable_count () const { return subTable.len; }
650
651 template <typename TSubTable>
get_subtableOT::Lookup652 const TSubTable& get_subtable (unsigned int i) const
653 { return this+CastR<OffsetArrayOf<TSubTable> > (subTable)[i]; }
654
655 template <typename TSubTable>
get_subtablesOT::Lookup656 const OffsetArrayOf<TSubTable>& get_subtables () const
657 { return CastR<OffsetArrayOf<TSubTable> > (subTable); }
658 template <typename TSubTable>
get_subtablesOT::Lookup659 OffsetArrayOf<TSubTable>& get_subtables ()
660 { return CastR<OffsetArrayOf<TSubTable> > (subTable); }
661
get_sizeOT::Lookup662 unsigned int get_size () const
663 {
664 const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
665 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
666 return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
667 return (const char *) &markFilteringSet - (const char *) this;
668 }
669
get_typeOT::Lookup670 unsigned int get_type () const { return lookupType; }
671
672 /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
673 * higher 16-bit is mark-filtering-set if the lookup uses one.
674 * Not to be confused with glyph_props which is very similar. */
get_propsOT::Lookup675 uint32_t get_props () const
676 {
677 unsigned int flag = lookupFlag;
678 if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
679 {
680 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
681 flag += (markFilteringSet << 16);
682 }
683 return flag;
684 }
685
686 template <typename TSubTable, typename context_t>
dispatchOT::Lookup687 typename context_t::return_t dispatch (context_t *c) const
688 {
689 unsigned int lookup_type = get_type ();
690 TRACE_DISPATCH (this, lookup_type);
691 unsigned int count = get_subtable_count ();
692 for (unsigned int i = 0; i < count; i++) {
693 typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type);
694 if (c->stop_sublookup_iteration (r))
695 return_trace (r);
696 }
697 return_trace (c->default_return_value ());
698 }
699
serializeOT::Lookup700 bool serialize (hb_serialize_context_t *c,
701 unsigned int lookup_type,
702 uint32_t lookup_props,
703 unsigned int num_subtables)
704 {
705 TRACE_SERIALIZE (this);
706 if (unlikely (!c->extend_min (*this))) return_trace (false);
707 lookupType.set (lookup_type);
708 lookupFlag.set (lookup_props & 0xFFFFu);
709 if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
710 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
711 {
712 if (unlikely (!c->extend (*this))) return_trace (false);
713 HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
714 markFilteringSet.set (lookup_props >> 16);
715 }
716 return_trace (true);
717 }
718
719 /* Older compilers need this to NOT be locally defined in a function. */
720 template <typename TSubTable>
721 struct SubTableSubsetWrapper
722 {
SubTableSubsetWrapperOT::Lookup::SubTableSubsetWrapper723 SubTableSubsetWrapper (const TSubTable &subtable_,
724 unsigned int lookup_type_) :
725 subtable (subtable_),
726 lookup_type (lookup_type_) {}
727
subsetOT::Lookup::SubTableSubsetWrapper728 bool subset (hb_subset_context_t *c) const
729 { return subtable.dispatch (c, lookup_type); }
730
731 private:
732 const TSubTable &subtable;
733 unsigned int lookup_type;
734 };
735
736 template <typename TSubTable>
subsetOT::Lookup737 bool subset (hb_subset_context_t *c) const
738 {
739 TRACE_SUBSET (this);
740 struct Lookup *out = c->serializer->embed (*this);
741 if (unlikely (!out)) return_trace (false);
742
743 /* Subset the actual subtables. */
744 /* TODO Drop empty ones, either by calling intersects() beforehand,
745 * or just dropping null offsets after. */
746 const OffsetArrayOf<TSubTable>& subtables = get_subtables<TSubTable> ();
747 OffsetArrayOf<TSubTable>& out_subtables = out->get_subtables<TSubTable> ();
748 unsigned int count = subTable.len;
749 for (unsigned int i = 0; i < count; i++)
750 {
751 SubTableSubsetWrapper<TSubTable> wrapper (this+subtables[i], get_type ());
752
753 out_subtables[i].serialize_subset (c, wrapper, out);
754 }
755
756 return_trace (true);
757 }
758
759 /* Older compilers need this to NOT be locally defined in a function. */
760 template <typename TSubTable>
761 struct SubTableSanitizeWrapper : TSubTable
762 {
sanitizeOT::Lookup::SubTableSanitizeWrapper763 bool sanitize (hb_sanitize_context_t *c, unsigned int lookup_type) const
764 { return this->dispatch (c, lookup_type); }
765 };
766
767 template <typename TSubTable>
sanitizeOT::Lookup768 bool sanitize (hb_sanitize_context_t *c) const
769 {
770 TRACE_SANITIZE (this);
771 if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
772 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
773 {
774 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
775 if (!markFilteringSet.sanitize (c)) return_trace (false);
776 }
777
778 if (unlikely (!CastR<OffsetArrayOf<SubTableSanitizeWrapper<TSubTable> > > (subTable)
779 .sanitize (c, this, get_type ())))
780 return_trace (false);
781
782 if (unlikely (get_type () == TSubTable::Extension))
783 {
784 /* The spec says all subtables of an Extension lookup should
785 * have the same type, which shall not be the Extension type
786 * itself (but we already checked for that).
787 * This is specially important if one has a reverse type! */
788 unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
789 unsigned int count = get_subtable_count ();
790 for (unsigned int i = 1; i < count; i++)
791 if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
792 return_trace (false);
793 }
794 return_trace (true);
795 return_trace (true);
796 }
797
798 private:
799 HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */
800 HBUINT16 lookupFlag; /* Lookup qualifiers */
801 ArrayOf<Offset16>
802 subTable; /* Array of SubTables */
803 /*HBUINT16 markFilteringSetX[VAR];*//* Index (base 0) into GDEF mark glyph sets
804 * structure. This field is only present if bit
805 * UseMarkFilteringSet of lookup flags is set. */
806 public:
807 DEFINE_SIZE_ARRAY (6, subTable);
808 };
809
810 typedef OffsetListOf<Lookup> LookupList;
811
812
813 /*
814 * Coverage Table
815 */
816
817 struct CoverageFormat1
818 {
819 friend struct Coverage;
820
821 private:
get_coverageOT::CoverageFormat1822 unsigned int get_coverage (hb_codepoint_t glyph_id) const
823 {
824 unsigned int i;
825 glyphArray.bfind (glyph_id, &i, HB_BFIND_NOT_FOUND_STORE, NOT_COVERED);
826 return i;
827 }
828
serializeOT::CoverageFormat1829 bool serialize (hb_serialize_context_t *c,
830 hb_array_t<const GlyphID> glyphs)
831 {
832 TRACE_SERIALIZE (this);
833 return_trace (glyphArray.serialize (c, glyphs));
834 }
835
sanitizeOT::CoverageFormat1836 bool sanitize (hb_sanitize_context_t *c) const
837 {
838 TRACE_SANITIZE (this);
839 return_trace (glyphArray.sanitize (c));
840 }
841
intersectsOT::CoverageFormat1842 bool intersects (const hb_set_t *glyphs) const
843 {
844 /* TODO Speed up, using hb_set_next() and bsearch()? */
845 unsigned int count = glyphArray.len;
846 for (unsigned int i = 0; i < count; i++)
847 if (glyphs->has (glyphArray[i]))
848 return true;
849 return false;
850 }
intersects_coverageOT::CoverageFormat1851 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
852 { return glyphs->has (glyphArray[index]); }
853
854 template <typename set_t>
add_coverageOT::CoverageFormat1855 bool add_coverage (set_t *glyphs) const
856 {
857 return glyphs->add_sorted_array (glyphArray.arrayZ, glyphArray.len);
858 }
859
860 public:
861 /* Older compilers need this to be public. */
862 struct Iter {
initOT::CoverageFormat1::Iter863 void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
finiOT::CoverageFormat1::Iter864 void fini () {}
moreOT::CoverageFormat1::Iter865 bool more () { return i < c->glyphArray.len; }
nextOT::CoverageFormat1::Iter866 void next () { i++; }
get_glyphOT::CoverageFormat1::Iter867 hb_codepoint_t get_glyph () { return c->glyphArray[i]; }
get_coverageOT::CoverageFormat1::Iter868 unsigned int get_coverage () { return i; }
869
870 private:
871 const struct CoverageFormat1 *c;
872 unsigned int i;
873 };
874 private:
875
876 protected:
877 HBUINT16 coverageFormat; /* Format identifier--format = 1 */
878 SortedArrayOf<GlyphID>
879 glyphArray; /* Array of GlyphIDs--in numerical order */
880 public:
881 DEFINE_SIZE_ARRAY (4, glyphArray);
882 };
883
884 struct CoverageFormat2
885 {
886 friend struct Coverage;
887
888 private:
get_coverageOT::CoverageFormat2889 unsigned int get_coverage (hb_codepoint_t glyph_id) const
890 {
891 const RangeRecord &range = rangeRecord.bsearch (glyph_id);
892 return likely (range.start <= range.end) ?
893 (unsigned int) range.value + (glyph_id - range.start) :
894 NOT_COVERED;
895 }
896
serializeOT::CoverageFormat2897 bool serialize (hb_serialize_context_t *c,
898 hb_array_t<const GlyphID> glyphs)
899 {
900 TRACE_SERIALIZE (this);
901 if (unlikely (!c->extend_min (*this))) return_trace (false);
902
903 if (unlikely (!glyphs.len))
904 {
905 rangeRecord.len.set (0);
906 return_trace (true);
907 }
908
909 unsigned int num_ranges = 1;
910 for (unsigned int i = 1; i < glyphs.len; i++)
911 if (glyphs[i - 1] + 1 != glyphs[i])
912 num_ranges++;
913 rangeRecord.len.set (num_ranges);
914 if (unlikely (!c->extend (rangeRecord))) return_trace (false);
915
916 unsigned int range = 0;
917 rangeRecord[range].start = glyphs[0];
918 rangeRecord[range].value.set (0);
919 for (unsigned int i = 1; i < glyphs.len; i++)
920 {
921 if (glyphs[i - 1] + 1 != glyphs[i])
922 {
923 range++;
924 rangeRecord[range].start = glyphs[i];
925 rangeRecord[range].value.set (i);
926 }
927 rangeRecord[range].end = glyphs[i];
928 }
929 return_trace (true);
930 }
931
sanitizeOT::CoverageFormat2932 bool sanitize (hb_sanitize_context_t *c) const
933 {
934 TRACE_SANITIZE (this);
935 return_trace (rangeRecord.sanitize (c));
936 }
937
intersectsOT::CoverageFormat2938 bool intersects (const hb_set_t *glyphs) const
939 {
940 /* TODO Speed up, using hb_set_next() and bsearch()? */
941 unsigned int count = rangeRecord.len;
942 for (unsigned int i = 0; i < count; i++)
943 if (rangeRecord[i].intersects (glyphs))
944 return true;
945 return false;
946 }
intersects_coverageOT::CoverageFormat2947 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
948 {
949 unsigned int i;
950 unsigned int count = rangeRecord.len;
951 for (i = 0; i < count; i++) {
952 const RangeRecord &range = rangeRecord[i];
953 if (range.value <= index &&
954 index < (unsigned int) range.value + (range.end - range.start) &&
955 range.intersects (glyphs))
956 return true;
957 else if (index < range.value)
958 return false;
959 }
960 return false;
961 }
962
963 template <typename set_t>
add_coverageOT::CoverageFormat2964 bool add_coverage (set_t *glyphs) const
965 {
966 unsigned int count = rangeRecord.len;
967 for (unsigned int i = 0; i < count; i++)
968 if (unlikely (!rangeRecord[i].add_coverage (glyphs)))
969 return false;
970 return true;
971 }
972
973 public:
974 /* Older compilers need this to be public. */
975 struct Iter
976 {
initOT::CoverageFormat2::Iter977 void init (const CoverageFormat2 &c_)
978 {
979 c = &c_;
980 coverage = 0;
981 i = 0;
982 j = c->rangeRecord.len ? c->rangeRecord[0].start : 0;
983 if (unlikely (c->rangeRecord[0].start > c->rangeRecord[0].end))
984 {
985 /* Broken table. Skip. */
986 i = c->rangeRecord.len;
987 }
988 }
finiOT::CoverageFormat2::Iter989 void fini () {}
moreOT::CoverageFormat2::Iter990 bool more () { return i < c->rangeRecord.len; }
nextOT::CoverageFormat2::Iter991 void next ()
992 {
993 if (j >= c->rangeRecord[i].end)
994 {
995 i++;
996 if (more ())
997 {
998 hb_codepoint_t old = j;
999 j = c->rangeRecord[i].start;
1000 if (unlikely (j <= old))
1001 {
1002 /* Broken table. Skip. Important to avoid DoS. */
1003 i = c->rangeRecord.len;
1004 return;
1005 }
1006 coverage = c->rangeRecord[i].value;
1007 }
1008 return;
1009 }
1010 coverage++;
1011 j++;
1012 }
get_glyphOT::CoverageFormat2::Iter1013 hb_codepoint_t get_glyph () { return j; }
get_coverageOT::CoverageFormat2::Iter1014 unsigned int get_coverage () { return coverage; }
1015
1016 private:
1017 const struct CoverageFormat2 *c;
1018 unsigned int i, coverage;
1019 hb_codepoint_t j;
1020 };
1021 private:
1022
1023 protected:
1024 HBUINT16 coverageFormat; /* Format identifier--format = 2 */
1025 SortedArrayOf<RangeRecord>
1026 rangeRecord; /* Array of glyph ranges--ordered by
1027 * Start GlyphID. rangeCount entries
1028 * long */
1029 public:
1030 DEFINE_SIZE_ARRAY (4, rangeRecord);
1031 };
1032
1033 struct Coverage
1034 {
get_coverageOT::Coverage1035 unsigned int get_coverage (hb_codepoint_t glyph_id) const
1036 {
1037 switch (u.format) {
1038 case 1: return u.format1.get_coverage (glyph_id);
1039 case 2: return u.format2.get_coverage (glyph_id);
1040 default:return NOT_COVERED;
1041 }
1042 }
1043
serializeOT::Coverage1044 bool serialize (hb_serialize_context_t *c,
1045 hb_array_t<const GlyphID> glyphs)
1046 {
1047 TRACE_SERIALIZE (this);
1048 if (unlikely (!c->extend_min (*this))) return_trace (false);
1049
1050 unsigned int num_ranges = 1;
1051 for (unsigned int i = 1; i < glyphs.len; i++)
1052 if (glyphs[i - 1] + 1 != glyphs[i])
1053 num_ranges++;
1054 u.format.set (glyphs.len * 2 < num_ranges * 3 ? 1 : 2);
1055
1056 switch (u.format)
1057 {
1058 case 1: return_trace (u.format1.serialize (c, glyphs));
1059 case 2: return_trace (u.format2.serialize (c, glyphs));
1060 default:return_trace (false);
1061 }
1062 }
1063
sanitizeOT::Coverage1064 bool sanitize (hb_sanitize_context_t *c) const
1065 {
1066 TRACE_SANITIZE (this);
1067 if (!u.format.sanitize (c)) return_trace (false);
1068 switch (u.format)
1069 {
1070 case 1: return_trace (u.format1.sanitize (c));
1071 case 2: return_trace (u.format2.sanitize (c));
1072 default:return_trace (true);
1073 }
1074 }
1075
intersectsOT::Coverage1076 bool intersects (const hb_set_t *glyphs) const
1077 {
1078 switch (u.format)
1079 {
1080 case 1: return u.format1.intersects (glyphs);
1081 case 2: return u.format2.intersects (glyphs);
1082 default:return false;
1083 }
1084 }
intersects_coverageOT::Coverage1085 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1086 {
1087 switch (u.format)
1088 {
1089 case 1: return u.format1.intersects_coverage (glyphs, index);
1090 case 2: return u.format2.intersects_coverage (glyphs, index);
1091 default:return false;
1092 }
1093 }
1094
1095 /* Might return false if array looks unsorted.
1096 * Used for faster rejection of corrupt data. */
1097 template <typename set_t>
add_coverageOT::Coverage1098 bool add_coverage (set_t *glyphs) const
1099 {
1100 switch (u.format)
1101 {
1102 case 1: return u.format1.add_coverage (glyphs);
1103 case 2: return u.format2.add_coverage (glyphs);
1104 default:return false;
1105 }
1106 }
1107
1108 struct Iter
1109 {
IterOT::Coverage::Iter1110 Iter (const Coverage &c_)
1111 {
1112 memset (this, 0, sizeof (*this));
1113 format = c_.u.format;
1114 switch (format)
1115 {
1116 case 1: u.format1.init (c_.u.format1); return;
1117 case 2: u.format2.init (c_.u.format2); return;
1118 default: return;
1119 }
1120 }
moreOT::Coverage::Iter1121 bool more ()
1122 {
1123 switch (format)
1124 {
1125 case 1: return u.format1.more ();
1126 case 2: return u.format2.more ();
1127 default:return false;
1128 }
1129 }
nextOT::Coverage::Iter1130 void next ()
1131 {
1132 switch (format)
1133 {
1134 case 1: u.format1.next (); break;
1135 case 2: u.format2.next (); break;
1136 default: break;
1137 }
1138 }
get_glyphOT::Coverage::Iter1139 hb_codepoint_t get_glyph ()
1140 {
1141 switch (format)
1142 {
1143 case 1: return u.format1.get_glyph ();
1144 case 2: return u.format2.get_glyph ();
1145 default:return 0;
1146 }
1147 }
get_coverageOT::Coverage::Iter1148 unsigned int get_coverage ()
1149 {
1150 switch (format)
1151 {
1152 case 1: return u.format1.get_coverage ();
1153 case 2: return u.format2.get_coverage ();
1154 default:return -1;
1155 }
1156 }
1157
1158 private:
1159 unsigned int format;
1160 union {
1161 CoverageFormat2::Iter format2; /* Put this one first since it's larger; helps shut up compiler. */
1162 CoverageFormat1::Iter format1;
1163 } u;
1164 };
1165
1166 protected:
1167 union {
1168 HBUINT16 format; /* Format identifier */
1169 CoverageFormat1 format1;
1170 CoverageFormat2 format2;
1171 } u;
1172 public:
1173 DEFINE_SIZE_UNION (2, format);
1174 };
1175
1176
1177 /*
1178 * Class Definition Table
1179 */
1180
1181 static inline void ClassDef_serialize (hb_serialize_context_t *c,
1182 hb_array_t<const GlyphID> glyphs,
1183 hb_array_t<const HBUINT16> klasses);
1184
1185 struct ClassDefFormat1
1186 {
1187 friend struct ClassDef;
1188
1189 private:
get_classOT::ClassDefFormat11190 unsigned int get_class (hb_codepoint_t glyph_id) const
1191 {
1192 return classValue[(unsigned int) (glyph_id - startGlyph)];
1193 }
1194
serializeOT::ClassDefFormat11195 bool serialize (hb_serialize_context_t *c,
1196 hb_array_t<const HBUINT16> glyphs,
1197 hb_array_t<const HBUINT16> klasses)
1198 {
1199 TRACE_SERIALIZE (this);
1200 if (unlikely (!c->extend_min (*this))) return_trace (false);
1201
1202 if (unlikely (!glyphs.len))
1203 {
1204 startGlyph.set (0);
1205 classValue.len.set (0);
1206 return_trace (true);
1207 }
1208
1209 hb_codepoint_t glyph_min = glyphs[0];
1210 hb_codepoint_t glyph_max = glyphs[glyphs.len - 1];
1211
1212 startGlyph.set (glyph_min);
1213 classValue.len.set (glyph_max - glyph_min + 1);
1214 if (unlikely (!c->extend (classValue))) return_trace (false);
1215
1216 for (unsigned int i = 0; i < glyphs.len; i++)
1217 classValue[glyphs[i] - glyph_min] = klasses[i];
1218
1219 return_trace (true);
1220 }
1221
subsetOT::ClassDefFormat11222 bool subset (hb_subset_context_t *c) const
1223 {
1224 TRACE_SUBSET (this);
1225 const hb_set_t &glyphset = *c->plan->glyphset;
1226 const hb_map_t &glyph_map = *c->plan->glyph_map;
1227 hb_vector_t<GlyphID> glyphs;
1228 hb_vector_t<HBUINT16> klasses;
1229
1230 hb_codepoint_t start = startGlyph;
1231 hb_codepoint_t end = start + classValue.len;
1232 for (hb_codepoint_t g = start; g < end; g++)
1233 {
1234 unsigned int value = classValue[g - start];
1235 if (!value) continue;
1236 if (!glyphset.has (g)) continue;
1237 glyphs.push()->set (glyph_map[g]);
1238 klasses.push()->set (value);
1239 }
1240 c->serializer->propagate_error (glyphs, klasses);
1241 ClassDef_serialize (c->serializer, glyphs, klasses);
1242 return_trace (glyphs.len);
1243 }
1244
sanitizeOT::ClassDefFormat11245 bool sanitize (hb_sanitize_context_t *c) const
1246 {
1247 TRACE_SANITIZE (this);
1248 return_trace (c->check_struct (this) && classValue.sanitize (c));
1249 }
1250
1251 template <typename set_t>
add_coverageOT::ClassDefFormat11252 bool add_coverage (set_t *glyphs) const
1253 {
1254 unsigned int start = 0;
1255 unsigned int count = classValue.len;
1256 for (unsigned int i = 0; i < count; i++)
1257 {
1258 if (classValue[i])
1259 continue;
1260
1261 if (start != i)
1262 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
1263 return false;
1264
1265 start = i + 1;
1266 }
1267 if (start != count)
1268 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
1269 return false;
1270
1271 return true;
1272 }
1273
1274 template <typename set_t>
add_classOT::ClassDefFormat11275 bool add_class (set_t *glyphs, unsigned int klass) const
1276 {
1277 unsigned int count = classValue.len;
1278 for (unsigned int i = 0; i < count; i++)
1279 if (classValue[i] == klass) glyphs->add (startGlyph + i);
1280 return true;
1281 }
1282
intersectsOT::ClassDefFormat11283 bool intersects (const hb_set_t *glyphs) const
1284 {
1285 /* TODO Speed up, using hb_set_next()? */
1286 hb_codepoint_t start = startGlyph;
1287 hb_codepoint_t end = startGlyph + classValue.len;
1288 for (hb_codepoint_t iter = startGlyph - 1;
1289 hb_set_next (glyphs, &iter) && iter < end;)
1290 if (classValue[iter - start]) return true;
1291 return false;
1292 }
intersects_classOT::ClassDefFormat11293 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
1294 {
1295 unsigned int count = classValue.len;
1296 if (klass == 0)
1297 {
1298 /* Match if there's any glyph that is not listed! */
1299 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1300 if (!hb_set_next (glyphs, &g)) return false;
1301 if (g < startGlyph) return true;
1302 g = startGlyph + count - 1;
1303 if (hb_set_next (glyphs, &g)) return true;
1304 /* Fall through. */
1305 }
1306 for (unsigned int i = 0; i < count; i++)
1307 if (classValue[i] == klass && glyphs->has (startGlyph + i))
1308 return true;
1309 return false;
1310 }
1311
1312 protected:
1313 HBUINT16 classFormat; /* Format identifier--format = 1 */
1314 GlyphID startGlyph; /* First GlyphID of the classValueArray */
1315 ArrayOf<HBUINT16>
1316 classValue; /* Array of Class Values--one per GlyphID */
1317 public:
1318 DEFINE_SIZE_ARRAY (6, classValue);
1319 };
1320
1321 struct ClassDefFormat2
1322 {
1323 friend struct ClassDef;
1324
1325 private:
get_classOT::ClassDefFormat21326 unsigned int get_class (hb_codepoint_t glyph_id) const
1327 {
1328 return rangeRecord.bsearch (glyph_id).value;
1329 }
1330
serializeOT::ClassDefFormat21331 bool serialize (hb_serialize_context_t *c,
1332 hb_array_t<const HBUINT16> glyphs,
1333 hb_array_t<const HBUINT16> klasses)
1334 {
1335 TRACE_SERIALIZE (this);
1336 if (unlikely (!c->extend_min (*this))) return_trace (false);
1337
1338 if (unlikely (!glyphs.len))
1339 {
1340 rangeRecord.len.set (0);
1341 return_trace (true);
1342 }
1343
1344 unsigned int num_ranges = 1;
1345 for (unsigned int i = 1; i < glyphs.len; i++)
1346 if (glyphs[i - 1] + 1 != glyphs[i] ||
1347 klasses[i - 1] != klasses[i])
1348 num_ranges++;
1349 rangeRecord.len.set (num_ranges);
1350 if (unlikely (!c->extend (rangeRecord))) return_trace (false);
1351
1352 unsigned int range = 0;
1353 rangeRecord[range].start = glyphs[0];
1354 rangeRecord[range].value.set (klasses[0]);
1355 for (unsigned int i = 1; i < glyphs.len; i++)
1356 {
1357 if (glyphs[i - 1] + 1 != glyphs[i] ||
1358 klasses[i - 1] != klasses[i])
1359 {
1360 range++;
1361 rangeRecord[range].start = glyphs[i];
1362 rangeRecord[range].value = klasses[i];
1363 }
1364 rangeRecord[range].end = glyphs[i];
1365 }
1366 return_trace (true);
1367 }
1368
subsetOT::ClassDefFormat21369 bool subset (hb_subset_context_t *c) const
1370 {
1371 TRACE_SUBSET (this);
1372 const hb_set_t &glyphset = *c->plan->glyphset;
1373 const hb_map_t &glyph_map = *c->plan->glyph_map;
1374 hb_vector_t<GlyphID> glyphs;
1375 hb_vector_t<HBUINT16> klasses;
1376
1377 unsigned int count = rangeRecord.len;
1378 for (unsigned int i = 0; i < count; i++)
1379 {
1380 unsigned int value = rangeRecord[i].value;
1381 if (!value) continue;
1382 hb_codepoint_t start = rangeRecord[i].start;
1383 hb_codepoint_t end = rangeRecord[i].end + 1;
1384 for (hb_codepoint_t g = start; g < end; g++)
1385 {
1386 if (!glyphset.has (g)) continue;
1387 glyphs.push ()->set (glyph_map[g]);
1388 klasses.push ()->set (value);
1389 }
1390 }
1391 c->serializer->propagate_error (glyphs, klasses);
1392 ClassDef_serialize (c->serializer, glyphs, klasses);
1393 return_trace (glyphs.len);
1394 }
1395
sanitizeOT::ClassDefFormat21396 bool sanitize (hb_sanitize_context_t *c) const
1397 {
1398 TRACE_SANITIZE (this);
1399 return_trace (rangeRecord.sanitize (c));
1400 }
1401
1402 template <typename set_t>
add_coverageOT::ClassDefFormat21403 bool add_coverage (set_t *glyphs) const
1404 {
1405 unsigned int count = rangeRecord.len;
1406 for (unsigned int i = 0; i < count; i++)
1407 if (rangeRecord[i].value)
1408 if (unlikely (!rangeRecord[i].add_coverage (glyphs)))
1409 return false;
1410 return true;
1411 }
1412
1413 template <typename set_t>
add_classOT::ClassDefFormat21414 bool add_class (set_t *glyphs, unsigned int klass) const
1415 {
1416 unsigned int count = rangeRecord.len;
1417 for (unsigned int i = 0; i < count; i++)
1418 {
1419 if (rangeRecord[i].value == klass)
1420 if (unlikely (!rangeRecord[i].add_coverage (glyphs)))
1421 return false;
1422 }
1423 return true;
1424 }
1425
intersectsOT::ClassDefFormat21426 bool intersects (const hb_set_t *glyphs) const
1427 {
1428 /* TODO Speed up, using hb_set_next() and bsearch()? */
1429 unsigned int count = rangeRecord.len;
1430 for (unsigned int i = 0; i < count; i++)
1431 if (rangeRecord[i].intersects (glyphs))
1432 return true;
1433 return false;
1434 }
intersects_classOT::ClassDefFormat21435 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
1436 {
1437 unsigned int count = rangeRecord.len;
1438 if (klass == 0)
1439 {
1440 /* Match if there's any glyph that is not listed! */
1441 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1442 for (unsigned int i = 0; i < count; i++)
1443 {
1444 if (!hb_set_next (glyphs, &g))
1445 break;
1446 if (g < rangeRecord[i].start)
1447 return true;
1448 g = rangeRecord[i].end;
1449 }
1450 if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
1451 return true;
1452 /* Fall through. */
1453 }
1454 for (unsigned int i = 0; i < count; i++)
1455 if (rangeRecord[i].value == klass && rangeRecord[i].intersects (glyphs))
1456 return true;
1457 return false;
1458 }
1459
1460 protected:
1461 HBUINT16 classFormat; /* Format identifier--format = 2 */
1462 SortedArrayOf<RangeRecord>
1463 rangeRecord; /* Array of glyph ranges--ordered by
1464 * Start GlyphID */
1465 public:
1466 DEFINE_SIZE_ARRAY (4, rangeRecord);
1467 };
1468
1469 struct ClassDef
1470 {
get_classOT::ClassDef1471 unsigned int get_class (hb_codepoint_t glyph_id) const
1472 {
1473 switch (u.format) {
1474 case 1: return u.format1.get_class (glyph_id);
1475 case 2: return u.format2.get_class (glyph_id);
1476 default:return 0;
1477 }
1478 }
1479
serializeOT::ClassDef1480 bool serialize (hb_serialize_context_t *c,
1481 hb_array_t<const GlyphID> glyphs,
1482 hb_array_t<const HBUINT16> klasses)
1483 {
1484 TRACE_SERIALIZE (this);
1485 if (unlikely (!c->extend_min (*this))) return_trace (false);
1486
1487 unsigned int format = 2;
1488 if (glyphs.len)
1489 {
1490 hb_codepoint_t glyph_min = glyphs[0];
1491 hb_codepoint_t glyph_max = glyphs[glyphs.len - 1];
1492
1493 unsigned int num_ranges = 1;
1494 for (unsigned int i = 1; i < glyphs.len; i++)
1495 if (glyphs[i - 1] + 1 != glyphs[i] ||
1496 klasses[i - 1] != klasses[i])
1497 num_ranges++;
1498
1499 if (1 + (glyph_max - glyph_min + 1) < num_ranges * 3)
1500 format = 1;
1501 }
1502 u.format.set (format);
1503
1504 switch (u.format)
1505 {
1506 case 1: return_trace (u.format1.serialize (c, glyphs, klasses));
1507 case 2: return_trace (u.format2.serialize (c, glyphs, klasses));
1508 default:return_trace (false);
1509 }
1510 }
1511
subsetOT::ClassDef1512 bool subset (hb_subset_context_t *c) const
1513 {
1514 TRACE_SUBSET (this);
1515 switch (u.format) {
1516 case 1: return_trace (u.format1.subset (c));
1517 case 2: return_trace (u.format2.subset (c));
1518 default:return_trace (false);
1519 }
1520 }
1521
sanitizeOT::ClassDef1522 bool sanitize (hb_sanitize_context_t *c) const
1523 {
1524 TRACE_SANITIZE (this);
1525 if (!u.format.sanitize (c)) return_trace (false);
1526 switch (u.format) {
1527 case 1: return_trace (u.format1.sanitize (c));
1528 case 2: return_trace (u.format2.sanitize (c));
1529 default:return_trace (true);
1530 }
1531 }
1532
1533 /* Might return false if array looks unsorted.
1534 * Used for faster rejection of corrupt data. */
1535 template <typename set_t>
add_coverageOT::ClassDef1536 bool add_coverage (set_t *glyphs) const
1537 {
1538 switch (u.format) {
1539 case 1: return u.format1.add_coverage (glyphs);
1540 case 2: return u.format2.add_coverage (glyphs);
1541 default:return false;
1542 }
1543 }
1544
1545 /* Might return false if array looks unsorted.
1546 * Used for faster rejection of corrupt data. */
1547 template <typename set_t>
add_classOT::ClassDef1548 bool add_class (set_t *glyphs, unsigned int klass) const
1549 {
1550 switch (u.format) {
1551 case 1: return u.format1.add_class (glyphs, klass);
1552 case 2: return u.format2.add_class (glyphs, klass);
1553 default:return false;
1554 }
1555 }
1556
intersectsOT::ClassDef1557 bool intersects (const hb_set_t *glyphs) const
1558 {
1559 switch (u.format) {
1560 case 1: return u.format1.intersects (glyphs);
1561 case 2: return u.format2.intersects (glyphs);
1562 default:return false;
1563 }
1564 }
intersects_classOT::ClassDef1565 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
1566 {
1567 switch (u.format) {
1568 case 1: return u.format1.intersects_class (glyphs, klass);
1569 case 2: return u.format2.intersects_class (glyphs, klass);
1570 default:return false;
1571 }
1572 }
1573
1574 protected:
1575 union {
1576 HBUINT16 format; /* Format identifier */
1577 ClassDefFormat1 format1;
1578 ClassDefFormat2 format2;
1579 } u;
1580 public:
1581 DEFINE_SIZE_UNION (2, format);
1582 };
1583
ClassDef_serialize(hb_serialize_context_t * c,hb_array_t<const GlyphID> glyphs,hb_array_t<const HBUINT16> klasses)1584 static inline void ClassDef_serialize (hb_serialize_context_t *c,
1585 hb_array_t<const GlyphID> glyphs,
1586 hb_array_t<const HBUINT16> klasses)
1587 { c->start_embed<ClassDef> ()->serialize (c, glyphs, klasses); }
1588
1589
1590 /*
1591 * Item Variation Store
1592 */
1593
1594 struct VarRegionAxis
1595 {
evaluateOT::VarRegionAxis1596 float evaluate (int coord) const
1597 {
1598 int start = startCoord, peak = peakCoord, end = endCoord;
1599
1600 /* TODO Move these to sanitize(). */
1601 if (unlikely (start > peak || peak > end))
1602 return 1.;
1603 if (unlikely (start < 0 && end > 0 && peak != 0))
1604 return 1.;
1605
1606 if (peak == 0 || coord == peak)
1607 return 1.;
1608
1609 if (coord <= start || end <= coord)
1610 return 0.;
1611
1612 /* Interpolate */
1613 if (coord < peak)
1614 return float (coord - start) / (peak - start);
1615 else
1616 return float (end - coord) / (end - peak);
1617 }
1618
sanitizeOT::VarRegionAxis1619 bool sanitize (hb_sanitize_context_t *c) const
1620 {
1621 TRACE_SANITIZE (this);
1622 return_trace (c->check_struct (this));
1623 /* TODO Handle invalid start/peak/end configs, so we don't
1624 * have to do that at runtime. */
1625 }
1626
1627 public:
1628 F2DOT14 startCoord;
1629 F2DOT14 peakCoord;
1630 F2DOT14 endCoord;
1631 public:
1632 DEFINE_SIZE_STATIC (6);
1633 };
1634
1635 struct VarRegionList
1636 {
evaluateOT::VarRegionList1637 float evaluate (unsigned int region_index,
1638 const int *coords, unsigned int coord_len) const
1639 {
1640 if (unlikely (region_index >= regionCount))
1641 return 0.;
1642
1643 const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
1644
1645 float v = 1.;
1646 unsigned int count = axisCount;
1647 for (unsigned int i = 0; i < count; i++)
1648 {
1649 int coord = i < coord_len ? coords[i] : 0;
1650 float factor = axes[i].evaluate (coord);
1651 if (factor == 0.f)
1652 return 0.;
1653 v *= factor;
1654 }
1655 return v;
1656 }
1657
sanitizeOT::VarRegionList1658 bool sanitize (hb_sanitize_context_t *c) const
1659 {
1660 TRACE_SANITIZE (this);
1661 return_trace (c->check_struct (this) &&
1662 axesZ.sanitize (c, (unsigned int) axisCount * (unsigned int) regionCount));
1663 }
1664
get_region_countOT::VarRegionList1665 unsigned int get_region_count () const { return regionCount; }
1666
1667 protected:
1668 HBUINT16 axisCount;
1669 HBUINT16 regionCount;
1670 UnsizedArrayOf<VarRegionAxis>
1671 axesZ;
1672 public:
1673 DEFINE_SIZE_ARRAY (4, axesZ);
1674 };
1675
1676 struct VarData
1677 {
get_region_index_countOT::VarData1678 unsigned int get_region_index_count () const
1679 { return regionIndices.len; }
1680
get_row_sizeOT::VarData1681 unsigned int get_row_size () const
1682 { return shortCount + regionIndices.len; }
1683
get_sizeOT::VarData1684 unsigned int get_size () const
1685 { return itemCount * get_row_size (); }
1686
get_deltaOT::VarData1687 float get_delta (unsigned int inner,
1688 const int *coords, unsigned int coord_count,
1689 const VarRegionList ®ions) const
1690 {
1691 if (unlikely (inner >= itemCount))
1692 return 0.;
1693
1694 unsigned int count = regionIndices.len;
1695 unsigned int scount = shortCount;
1696
1697 const HBUINT8 *bytes = &StructAfter<HBUINT8> (regionIndices);
1698 const HBUINT8 *row = bytes + inner * (scount + count);
1699
1700 float delta = 0.;
1701 unsigned int i = 0;
1702
1703 const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
1704 for (; i < scount; i++)
1705 {
1706 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
1707 delta += scalar * *scursor++;
1708 }
1709 const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
1710 for (; i < count; i++)
1711 {
1712 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
1713 delta += scalar * *bcursor++;
1714 }
1715
1716 return delta;
1717 }
1718
get_scalarsOT::VarData1719 void get_scalars (int *coords, unsigned int coord_count,
1720 const VarRegionList ®ions,
1721 float *scalars /*OUT */,
1722 unsigned int num_scalars) const
1723 {
1724 assert (num_scalars == regionIndices.len);
1725 for (unsigned int i = 0; i < num_scalars; i++)
1726 {
1727 scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
1728 }
1729 }
1730
sanitizeOT::VarData1731 bool sanitize (hb_sanitize_context_t *c) const
1732 {
1733 TRACE_SANITIZE (this);
1734 return_trace (c->check_struct (this) &&
1735 regionIndices.sanitize (c) &&
1736 shortCount <= regionIndices.len &&
1737 c->check_range (&StructAfter<HBUINT8> (regionIndices),
1738 itemCount,
1739 get_row_size ()));
1740 }
1741
1742 protected:
1743 HBUINT16 itemCount;
1744 HBUINT16 shortCount;
1745 ArrayOf<HBUINT16> regionIndices;
1746 /*UnsizedArrayOf<HBUINT8>bytesX;*/
1747 public:
1748 DEFINE_SIZE_ARRAY (6, regionIndices);
1749 };
1750
1751 struct VariationStore
1752 {
get_deltaOT::VariationStore1753 float get_delta (unsigned int outer, unsigned int inner,
1754 const int *coords, unsigned int coord_count) const
1755 {
1756 if (unlikely (outer >= dataSets.len))
1757 return 0.;
1758
1759 return (this+dataSets[outer]).get_delta (inner,
1760 coords, coord_count,
1761 this+regions);
1762 }
1763
get_deltaOT::VariationStore1764 float get_delta (unsigned int index,
1765 const int *coords, unsigned int coord_count) const
1766 {
1767 unsigned int outer = index >> 16;
1768 unsigned int inner = index & 0xFFFF;
1769 return get_delta (outer, inner, coords, coord_count);
1770 }
1771
sanitizeOT::VariationStore1772 bool sanitize (hb_sanitize_context_t *c) const
1773 {
1774 TRACE_SANITIZE (this);
1775 return_trace (c->check_struct (this) &&
1776 format == 1 &&
1777 regions.sanitize (c, this) &&
1778 dataSets.sanitize (c, this));
1779 }
1780
get_region_index_countOT::VariationStore1781 unsigned int get_region_index_count (unsigned int ivs) const
1782 { return (this+dataSets[ivs]).get_region_index_count (); }
1783
get_scalarsOT::VariationStore1784 void get_scalars (unsigned int ivs,
1785 int *coords, unsigned int coord_count,
1786 float *scalars /*OUT*/,
1787 unsigned int num_scalars) const
1788 {
1789 (this+dataSets[ivs]).get_scalars (coords, coord_count, this+regions,
1790 &scalars[0], num_scalars);
1791 }
1792
1793 protected:
1794 HBUINT16 format;
1795 LOffsetTo<VarRegionList> regions;
1796 LOffsetArrayOf<VarData> dataSets;
1797 public:
1798 DEFINE_SIZE_ARRAY (8, dataSets);
1799 };
1800
1801 /*
1802 * Feature Variations
1803 */
1804
1805 struct ConditionFormat1
1806 {
1807 friend struct Condition;
1808
1809 private:
evaluateOT::ConditionFormat11810 bool evaluate (const int *coords, unsigned int coord_len) const
1811 {
1812 int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
1813 return filterRangeMinValue <= coord && coord <= filterRangeMaxValue;
1814 }
1815
sanitizeOT::ConditionFormat11816 bool sanitize (hb_sanitize_context_t *c) const
1817 {
1818 TRACE_SANITIZE (this);
1819 return_trace (c->check_struct (this));
1820 }
1821
1822 protected:
1823 HBUINT16 format; /* Format identifier--format = 1 */
1824 HBUINT16 axisIndex;
1825 F2DOT14 filterRangeMinValue;
1826 F2DOT14 filterRangeMaxValue;
1827 public:
1828 DEFINE_SIZE_STATIC (8);
1829 };
1830
1831 struct Condition
1832 {
evaluateOT::Condition1833 bool evaluate (const int *coords, unsigned int coord_len) const
1834 {
1835 switch (u.format) {
1836 case 1: return u.format1.evaluate (coords, coord_len);
1837 default:return false;
1838 }
1839 }
1840
sanitizeOT::Condition1841 bool sanitize (hb_sanitize_context_t *c) const
1842 {
1843 TRACE_SANITIZE (this);
1844 if (!u.format.sanitize (c)) return_trace (false);
1845 switch (u.format) {
1846 case 1: return_trace (u.format1.sanitize (c));
1847 default:return_trace (true);
1848 }
1849 }
1850
1851 protected:
1852 union {
1853 HBUINT16 format; /* Format identifier */
1854 ConditionFormat1 format1;
1855 } u;
1856 public:
1857 DEFINE_SIZE_UNION (2, format);
1858 };
1859
1860 struct ConditionSet
1861 {
evaluateOT::ConditionSet1862 bool evaluate (const int *coords, unsigned int coord_len) const
1863 {
1864 unsigned int count = conditions.len;
1865 for (unsigned int i = 0; i < count; i++)
1866 if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
1867 return false;
1868 return true;
1869 }
1870
sanitizeOT::ConditionSet1871 bool sanitize (hb_sanitize_context_t *c) const
1872 {
1873 TRACE_SANITIZE (this);
1874 return_trace (conditions.sanitize (c, this));
1875 }
1876
1877 protected:
1878 LOffsetArrayOf<Condition> conditions;
1879 public:
1880 DEFINE_SIZE_ARRAY (2, conditions);
1881 };
1882
1883 struct FeatureTableSubstitutionRecord
1884 {
1885 friend struct FeatureTableSubstitution;
1886
sanitizeOT::FeatureTableSubstitutionRecord1887 bool sanitize (hb_sanitize_context_t *c, const void *base) const
1888 {
1889 TRACE_SANITIZE (this);
1890 return_trace (c->check_struct (this) && feature.sanitize (c, base));
1891 }
1892
1893 protected:
1894 HBUINT16 featureIndex;
1895 LOffsetTo<Feature> feature;
1896 public:
1897 DEFINE_SIZE_STATIC (6);
1898 };
1899
1900 struct FeatureTableSubstitution
1901 {
find_substituteOT::FeatureTableSubstitution1902 const Feature *find_substitute (unsigned int feature_index) const
1903 {
1904 unsigned int count = substitutions.len;
1905 for (unsigned int i = 0; i < count; i++)
1906 {
1907 const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
1908 if (record.featureIndex == feature_index)
1909 return &(this+record.feature);
1910 }
1911 return nullptr;
1912 }
1913
sanitizeOT::FeatureTableSubstitution1914 bool sanitize (hb_sanitize_context_t *c) const
1915 {
1916 TRACE_SANITIZE (this);
1917 return_trace (version.sanitize (c) &&
1918 likely (version.major == 1) &&
1919 substitutions.sanitize (c, this));
1920 }
1921
1922 protected:
1923 FixedVersion<> version; /* Version--0x00010000u */
1924 ArrayOf<FeatureTableSubstitutionRecord>
1925 substitutions;
1926 public:
1927 DEFINE_SIZE_ARRAY (6, substitutions);
1928 };
1929
1930 struct FeatureVariationRecord
1931 {
1932 friend struct FeatureVariations;
1933
sanitizeOT::FeatureVariationRecord1934 bool sanitize (hb_sanitize_context_t *c, const void *base) const
1935 {
1936 TRACE_SANITIZE (this);
1937 return_trace (conditions.sanitize (c, base) &&
1938 substitutions.sanitize (c, base));
1939 }
1940
1941 protected:
1942 LOffsetTo<ConditionSet>
1943 conditions;
1944 LOffsetTo<FeatureTableSubstitution>
1945 substitutions;
1946 public:
1947 DEFINE_SIZE_STATIC (8);
1948 };
1949
1950 struct FeatureVariations
1951 {
1952 enum { NOT_FOUND_INDEX = 0xFFFFFFFFu };
1953
find_indexOT::FeatureVariations1954 bool find_index (const int *coords, unsigned int coord_len,
1955 unsigned int *index) const
1956 {
1957 unsigned int count = varRecords.len;
1958 for (unsigned int i = 0; i < count; i++)
1959 {
1960 const FeatureVariationRecord &record = varRecords.arrayZ[i];
1961 if ((this+record.conditions).evaluate (coords, coord_len))
1962 {
1963 *index = i;
1964 return true;
1965 }
1966 }
1967 *index = NOT_FOUND_INDEX;
1968 return false;
1969 }
1970
find_substituteOT::FeatureVariations1971 const Feature *find_substitute (unsigned int variations_index,
1972 unsigned int feature_index) const
1973 {
1974 const FeatureVariationRecord &record = varRecords[variations_index];
1975 return (this+record.substitutions).find_substitute (feature_index);
1976 }
1977
subsetOT::FeatureVariations1978 bool subset (hb_subset_context_t *c) const
1979 {
1980 TRACE_SUBSET (this);
1981 return_trace (c->serializer->embed (*this));
1982 }
1983
sanitizeOT::FeatureVariations1984 bool sanitize (hb_sanitize_context_t *c) const
1985 {
1986 TRACE_SANITIZE (this);
1987 return_trace (version.sanitize (c) &&
1988 likely (version.major == 1) &&
1989 varRecords.sanitize (c, this));
1990 }
1991
1992 protected:
1993 FixedVersion<> version; /* Version--0x00010000u */
1994 LArrayOf<FeatureVariationRecord>
1995 varRecords;
1996 public:
1997 DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
1998 };
1999
2000
2001 /*
2002 * Device Tables
2003 */
2004
2005 struct HintingDevice
2006 {
2007 friend struct Device;
2008
2009 private:
2010
get_x_deltaOT::HintingDevice2011 hb_position_t get_x_delta (hb_font_t *font) const
2012 { return get_delta (font->x_ppem, font->x_scale); }
2013
get_y_deltaOT::HintingDevice2014 hb_position_t get_y_delta (hb_font_t *font) const
2015 { return get_delta (font->y_ppem, font->y_scale); }
2016
get_sizeOT::HintingDevice2017 unsigned int get_size () const
2018 {
2019 unsigned int f = deltaFormat;
2020 if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
2021 return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
2022 }
2023
sanitizeOT::HintingDevice2024 bool sanitize (hb_sanitize_context_t *c) const
2025 {
2026 TRACE_SANITIZE (this);
2027 return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
2028 }
2029
2030 private:
2031
get_deltaOT::HintingDevice2032 int get_delta (unsigned int ppem, int scale) const
2033 {
2034 if (!ppem) return 0;
2035
2036 int pixels = get_delta_pixels (ppem);
2037
2038 if (!pixels) return 0;
2039
2040 return (int) (pixels * (int64_t) scale / ppem);
2041 }
get_delta_pixelsOT::HintingDevice2042 int get_delta_pixels (unsigned int ppem_size) const
2043 {
2044 unsigned int f = deltaFormat;
2045 if (unlikely (f < 1 || f > 3))
2046 return 0;
2047
2048 if (ppem_size < startSize || ppem_size > endSize)
2049 return 0;
2050
2051 unsigned int s = ppem_size - startSize;
2052
2053 unsigned int byte = deltaValueZ[s >> (4 - f)];
2054 unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
2055 unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
2056
2057 int delta = bits & mask;
2058
2059 if ((unsigned int) delta >= ((mask + 1) >> 1))
2060 delta -= mask + 1;
2061
2062 return delta;
2063 }
2064
2065 protected:
2066 HBUINT16 startSize; /* Smallest size to correct--in ppem */
2067 HBUINT16 endSize; /* Largest size to correct--in ppem */
2068 HBUINT16 deltaFormat; /* Format of DeltaValue array data: 1, 2, or 3
2069 * 1 Signed 2-bit value, 8 values per uint16
2070 * 2 Signed 4-bit value, 4 values per uint16
2071 * 3 Signed 8-bit value, 2 values per uint16
2072 */
2073 UnsizedArrayOf<HBUINT16>
2074 deltaValueZ; /* Array of compressed data */
2075 public:
2076 DEFINE_SIZE_ARRAY (6, deltaValueZ);
2077 };
2078
2079 struct VariationDevice
2080 {
2081 friend struct Device;
2082
2083 private:
2084
get_x_deltaOT::VariationDevice2085 hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
2086 { return font->em_scalef_x (get_delta (font, store)); }
2087
get_y_deltaOT::VariationDevice2088 hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
2089 { return font->em_scalef_y (get_delta (font, store)); }
2090
sanitizeOT::VariationDevice2091 bool sanitize (hb_sanitize_context_t *c) const
2092 {
2093 TRACE_SANITIZE (this);
2094 return_trace (c->check_struct (this));
2095 }
2096
2097 private:
2098
get_deltaOT::VariationDevice2099 float get_delta (hb_font_t *font, const VariationStore &store) const
2100 {
2101 return store.get_delta (outerIndex, innerIndex, font->coords, font->num_coords);
2102 }
2103
2104 protected:
2105 HBUINT16 outerIndex;
2106 HBUINT16 innerIndex;
2107 HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */
2108 public:
2109 DEFINE_SIZE_STATIC (6);
2110 };
2111
2112 struct DeviceHeader
2113 {
2114 protected:
2115 HBUINT16 reserved1;
2116 HBUINT16 reserved2;
2117 public:
2118 HBUINT16 format; /* Format identifier */
2119 public:
2120 DEFINE_SIZE_STATIC (6);
2121 };
2122
2123 struct Device
2124 {
get_x_deltaOT::Device2125 hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
2126 {
2127 switch (u.b.format)
2128 {
2129 case 1: case 2: case 3:
2130 return u.hinting.get_x_delta (font);
2131 case 0x8000:
2132 return u.variation.get_x_delta (font, store);
2133 default:
2134 return 0;
2135 }
2136 }
get_y_deltaOT::Device2137 hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
2138 {
2139 switch (u.b.format)
2140 {
2141 case 1: case 2: case 3:
2142 return u.hinting.get_y_delta (font);
2143 case 0x8000:
2144 return u.variation.get_y_delta (font, store);
2145 default:
2146 return 0;
2147 }
2148 }
2149
sanitizeOT::Device2150 bool sanitize (hb_sanitize_context_t *c) const
2151 {
2152 TRACE_SANITIZE (this);
2153 if (!u.b.format.sanitize (c)) return_trace (false);
2154 switch (u.b.format) {
2155 case 1: case 2: case 3:
2156 return_trace (u.hinting.sanitize (c));
2157 case 0x8000:
2158 return_trace (u.variation.sanitize (c));
2159 default:
2160 return_trace (true);
2161 }
2162 }
2163
2164 protected:
2165 union {
2166 DeviceHeader b;
2167 HintingDevice hinting;
2168 VariationDevice variation;
2169 } u;
2170 public:
2171 DEFINE_SIZE_UNION (6, b);
2172 };
2173
2174
2175 } /* namespace OT */
2176
2177
2178 #endif /* HB_OT_LAYOUT_COMMON_HH */
2179