1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
31
32 #include "hb.hh"
33 #include "hb-buffer.hh"
34 #include "hb-map.hh"
35 #include "hb-set.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
39
40
41 namespace OT {
42
43
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
46 {
47 template <typename T>
dispatchOT::hb_intersects_context_t48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
default_return_valueOT::hb_intersects_context_t49 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_intersects_context_t50 bool stop_sublookup_iteration (return_t r) const { return r; }
51
52 const hb_set_t *glyphs;
53
hb_intersects_context_tOT::hb_intersects_context_t54 hb_intersects_context_t (const hb_set_t *glyphs_) :
55 glyphs (glyphs_) {}
56 };
57
58 struct hb_have_non_1to1_context_t :
59 hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
60 {
61 template <typename T>
dispatchOT::hb_have_non_1to1_context_t62 return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
default_return_valueOT::hb_have_non_1to1_context_t63 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_have_non_1to1_context_t64 bool stop_sublookup_iteration (return_t r) const { return r; }
65 };
66
67 struct hb_closure_context_t :
68 hb_dispatch_context_t<hb_closure_context_t>
69 {
70 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
71 template <typename T>
dispatchOT::hb_closure_context_t72 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_context_t73 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_context_t74 void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
75 {
76 if (unlikely (nesting_level_left == 0 || !recurse_func))
77 return;
78
79 nesting_level_left--;
80 recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
81 nesting_level_left++;
82 }
83
lookup_limit_exceededOT::hb_closure_context_t84 bool lookup_limit_exceeded ()
85 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
86
should_visit_lookupOT::hb_closure_context_t87 bool should_visit_lookup (unsigned int lookup_index)
88 {
89 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
90 return false;
91
92 if (is_lookup_done (lookup_index))
93 return false;
94
95 return true;
96 }
97
is_lookup_doneOT::hb_closure_context_t98 bool is_lookup_done (unsigned int lookup_index)
99 {
100 if (done_lookups_glyph_count->in_error () ||
101 done_lookups_glyph_set->in_error ())
102 return true;
103
104 /* Have we visited this lookup with the current set of glyphs? */
105 if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
106 {
107 done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
108
109 if (!done_lookups_glyph_set->get (lookup_index))
110 {
111 hb_set_t* empty_set = hb_set_create ();
112 if (unlikely (!done_lookups_glyph_set->set (lookup_index, empty_set)))
113 {
114 hb_set_destroy (empty_set);
115 return true;
116 }
117 }
118
119 hb_set_clear (done_lookups_glyph_set->get (lookup_index));
120 }
121
122 hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
123 if (unlikely (covered_glyph_set->in_error ()))
124 return true;
125 if (parent_active_glyphs ()->is_subset (*covered_glyph_set))
126 return true;
127
128 hb_set_union (covered_glyph_set, parent_active_glyphs ());
129 return false;
130 }
131
parent_active_glyphsOT::hb_closure_context_t132 hb_set_t* parent_active_glyphs ()
133 {
134 if (active_glyphs_stack.length < 1)
135 return glyphs;
136
137 return active_glyphs_stack.tail ();
138 }
139
push_cur_active_glyphsOT::hb_closure_context_t140 void push_cur_active_glyphs (hb_set_t* cur_active_glyph_set)
141 {
142 active_glyphs_stack.push (cur_active_glyph_set);
143 }
144
pop_cur_done_glyphsOT::hb_closure_context_t145 bool pop_cur_done_glyphs ()
146 {
147 if (active_glyphs_stack.length < 1)
148 return false;
149
150 active_glyphs_stack.pop ();
151 return true;
152 }
153
154 hb_face_t *face;
155 hb_set_t *glyphs;
156 hb_set_t *cur_intersected_glyphs;
157 hb_set_t output[1];
158 hb_vector_t<hb_set_t *> active_glyphs_stack;
159 recurse_func_t recurse_func;
160 unsigned int nesting_level_left;
161
hb_closure_context_tOT::hb_closure_context_t162 hb_closure_context_t (hb_face_t *face_,
163 hb_set_t *glyphs_,
164 hb_set_t *cur_intersected_glyphs_,
165 hb_map_t *done_lookups_glyph_count_,
166 hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set_,
167 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
168 face (face_),
169 glyphs (glyphs_),
170 cur_intersected_glyphs (cur_intersected_glyphs_),
171 recurse_func (nullptr),
172 nesting_level_left (nesting_level_left_),
173 done_lookups_glyph_count (done_lookups_glyph_count_),
174 done_lookups_glyph_set (done_lookups_glyph_set_),
175 lookup_count (0)
176 {
177 push_cur_active_glyphs (glyphs_);
178 }
179
~hb_closure_context_tOT::hb_closure_context_t180 ~hb_closure_context_t () { flush (); }
181
set_recurse_funcOT::hb_closure_context_t182 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
183
flushOT::hb_closure_context_t184 void flush ()
185 {
186 hb_set_del_range (output, face->get_num_glyphs (), HB_SET_VALUE_INVALID); /* Remove invalid glyphs. */
187 hb_set_union (glyphs, output);
188 hb_set_clear (output);
189 active_glyphs_stack.pop ();
190 active_glyphs_stack.fini ();
191 }
192
193 private:
194 hb_map_t *done_lookups_glyph_count;
195 hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set;
196 unsigned int lookup_count;
197 };
198
199
200
201 struct hb_closure_lookups_context_t :
202 hb_dispatch_context_t<hb_closure_lookups_context_t>
203 {
204 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
205 template <typename T>
dispatchOT::hb_closure_lookups_context_t206 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_lookups_context_t207 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_lookups_context_t208 void recurse (unsigned lookup_index)
209 {
210 if (unlikely (nesting_level_left == 0 || !recurse_func))
211 return;
212
213 /* Return if new lookup was recursed to before. */
214 if (is_lookup_visited (lookup_index))
215 return;
216
217 nesting_level_left--;
218 recurse_func (this, lookup_index);
219 nesting_level_left++;
220 }
221
set_lookup_visitedOT::hb_closure_lookups_context_t222 void set_lookup_visited (unsigned lookup_index)
223 { visited_lookups->add (lookup_index); }
224
set_lookup_inactiveOT::hb_closure_lookups_context_t225 void set_lookup_inactive (unsigned lookup_index)
226 { inactive_lookups->add (lookup_index); }
227
lookup_limit_exceededOT::hb_closure_lookups_context_t228 bool lookup_limit_exceeded ()
229 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
230
is_lookup_visitedOT::hb_closure_lookups_context_t231 bool is_lookup_visited (unsigned lookup_index)
232 {
233 if (unlikely (lookup_count++ > HB_MAX_LOOKUP_INDICES))
234 return true;
235
236 if (unlikely (visited_lookups->in_error ()))
237 return true;
238
239 return visited_lookups->has (lookup_index);
240 }
241
242 hb_face_t *face;
243 const hb_set_t *glyphs;
244 recurse_func_t recurse_func;
245 unsigned int nesting_level_left;
246
hb_closure_lookups_context_tOT::hb_closure_lookups_context_t247 hb_closure_lookups_context_t (hb_face_t *face_,
248 const hb_set_t *glyphs_,
249 hb_set_t *visited_lookups_,
250 hb_set_t *inactive_lookups_,
251 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
252 face (face_),
253 glyphs (glyphs_),
254 recurse_func (nullptr),
255 nesting_level_left (nesting_level_left_),
256 visited_lookups (visited_lookups_),
257 inactive_lookups (inactive_lookups_),
258 lookup_count (0) {}
259
set_recurse_funcOT::hb_closure_lookups_context_t260 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
261
262 private:
263 hb_set_t *visited_lookups;
264 hb_set_t *inactive_lookups;
265 unsigned int lookup_count;
266 };
267
268 struct hb_would_apply_context_t :
269 hb_dispatch_context_t<hb_would_apply_context_t, bool>
270 {
271 template <typename T>
dispatchOT::hb_would_apply_context_t272 return_t dispatch (const T &obj) { return obj.would_apply (this); }
default_return_valueOT::hb_would_apply_context_t273 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_would_apply_context_t274 bool stop_sublookup_iteration (return_t r) const { return r; }
275
276 hb_face_t *face;
277 const hb_codepoint_t *glyphs;
278 unsigned int len;
279 bool zero_context;
280
hb_would_apply_context_tOT::hb_would_apply_context_t281 hb_would_apply_context_t (hb_face_t *face_,
282 const hb_codepoint_t *glyphs_,
283 unsigned int len_,
284 bool zero_context_) :
285 face (face_),
286 glyphs (glyphs_),
287 len (len_),
288 zero_context (zero_context_) {}
289 };
290
291 struct hb_collect_glyphs_context_t :
292 hb_dispatch_context_t<hb_collect_glyphs_context_t>
293 {
294 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
295 template <typename T>
dispatchOT::hb_collect_glyphs_context_t296 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_glyphs_context_t297 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_collect_glyphs_context_t298 void recurse (unsigned int lookup_index)
299 {
300 if (unlikely (nesting_level_left == 0 || !recurse_func))
301 return;
302
303 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
304 * past the previous check. For GSUB, we only want to collect the output
305 * glyphs in the recursion. If output is not requested, we can go home now.
306 *
307 * Note further, that the above is not exactly correct. A recursed lookup
308 * is allowed to match input that is not matched in the context, but that's
309 * not how most fonts are built. It's possible to relax that and recurse
310 * with all sets here if it proves to be an issue.
311 */
312
313 if (output == hb_set_get_empty ())
314 return;
315
316 /* Return if new lookup was recursed to before. */
317 if (recursed_lookups->has (lookup_index))
318 return;
319
320 hb_set_t *old_before = before;
321 hb_set_t *old_input = input;
322 hb_set_t *old_after = after;
323 before = input = after = hb_set_get_empty ();
324
325 nesting_level_left--;
326 recurse_func (this, lookup_index);
327 nesting_level_left++;
328
329 before = old_before;
330 input = old_input;
331 after = old_after;
332
333 recursed_lookups->add (lookup_index);
334 }
335
336 hb_face_t *face;
337 hb_set_t *before;
338 hb_set_t *input;
339 hb_set_t *after;
340 hb_set_t *output;
341 recurse_func_t recurse_func;
342 hb_set_t *recursed_lookups;
343 unsigned int nesting_level_left;
344
hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t345 hb_collect_glyphs_context_t (hb_face_t *face_,
346 hb_set_t *glyphs_before, /* OUT. May be NULL */
347 hb_set_t *glyphs_input, /* OUT. May be NULL */
348 hb_set_t *glyphs_after, /* OUT. May be NULL */
349 hb_set_t *glyphs_output, /* OUT. May be NULL */
350 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
351 face (face_),
352 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
353 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
354 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
355 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
356 recurse_func (nullptr),
357 recursed_lookups (hb_set_create ()),
358 nesting_level_left (nesting_level_left_) {}
~hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t359 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
360
set_recurse_funcOT::hb_collect_glyphs_context_t361 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
362 };
363
364
365
366 template <typename set_t>
367 struct hb_collect_coverage_context_t :
368 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
369 {
370 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
371 template <typename T>
dispatchOT::hb_collect_coverage_context_t372 return_t dispatch (const T &obj) { return obj.get_coverage (); }
default_return_valueOT::hb_collect_coverage_context_t373 static return_t default_return_value () { return Null (Coverage); }
stop_sublookup_iterationOT::hb_collect_coverage_context_t374 bool stop_sublookup_iteration (return_t r) const
375 {
376 r.collect_coverage (set);
377 return false;
378 }
379
hb_collect_coverage_context_tOT::hb_collect_coverage_context_t380 hb_collect_coverage_context_t (set_t *set_) :
381 set (set_) {}
382
383 set_t *set;
384 };
385
386
387 struct hb_ot_apply_context_t :
388 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
389 {
390 struct matcher_t
391 {
matcher_tOT::hb_ot_apply_context_t::matcher_t392 matcher_t () :
393 lookup_props (0),
394 ignore_zwnj (false),
395 ignore_zwj (false),
396 mask (-1),
397 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
398 syllable arg1(0),
399 #undef arg1
400 match_func (nullptr),
401 match_data (nullptr) {}
402
403 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
404
set_ignore_zwnjOT::hb_ot_apply_context_t::matcher_t405 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
set_ignore_zwjOT::hb_ot_apply_context_t::matcher_t406 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
set_lookup_propsOT::hb_ot_apply_context_t::matcher_t407 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
set_maskOT::hb_ot_apply_context_t::matcher_t408 void set_mask (hb_mask_t mask_) { mask = mask_; }
set_syllableOT::hb_ot_apply_context_t::matcher_t409 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
set_match_funcOT::hb_ot_apply_context_t::matcher_t410 void set_match_func (match_func_t match_func_,
411 const void *match_data_)
412 { match_func = match_func_; match_data = match_data_; }
413
414 enum may_match_t {
415 MATCH_NO,
416 MATCH_YES,
417 MATCH_MAYBE
418 };
419
may_matchOT::hb_ot_apply_context_t::matcher_t420 may_match_t may_match (const hb_glyph_info_t &info,
421 const HBUINT16 *glyph_data) const
422 {
423 if (!(info.mask & mask) ||
424 (syllable && syllable != info.syllable ()))
425 return MATCH_NO;
426
427 if (match_func)
428 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
429
430 return MATCH_MAYBE;
431 }
432
433 enum may_skip_t {
434 SKIP_NO,
435 SKIP_YES,
436 SKIP_MAYBE
437 };
438
may_skipOT::hb_ot_apply_context_t::matcher_t439 may_skip_t may_skip (const hb_ot_apply_context_t *c,
440 const hb_glyph_info_t &info) const
441 {
442 if (!c->check_glyph_property (&info, lookup_props))
443 return SKIP_YES;
444
445 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
446 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
447 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
448 return SKIP_MAYBE;
449
450 return SKIP_NO;
451 }
452
453 protected:
454 unsigned int lookup_props;
455 bool ignore_zwnj;
456 bool ignore_zwj;
457 hb_mask_t mask;
458 uint8_t syllable;
459 match_func_t match_func;
460 const void *match_data;
461 };
462
463 struct skipping_iterator_t
464 {
initOT::hb_ot_apply_context_t::skipping_iterator_t465 void init (hb_ot_apply_context_t *c_, bool context_match = false)
466 {
467 c = c_;
468 match_glyph_data = nullptr;
469 matcher.set_match_func (nullptr, nullptr);
470 matcher.set_lookup_props (c->lookup_props);
471 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
472 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
473 /* Ignore ZWJ if we are matching context, or asked to. */
474 matcher.set_ignore_zwj (context_match || c->auto_zwj);
475 matcher.set_mask (context_match ? -1 : c->lookup_mask);
476 }
set_lookup_propsOT::hb_ot_apply_context_t::skipping_iterator_t477 void set_lookup_props (unsigned int lookup_props)
478 {
479 matcher.set_lookup_props (lookup_props);
480 }
set_match_funcOT::hb_ot_apply_context_t::skipping_iterator_t481 void set_match_func (matcher_t::match_func_t match_func_,
482 const void *match_data_,
483 const HBUINT16 glyph_data[])
484 {
485 matcher.set_match_func (match_func_, match_data_);
486 match_glyph_data = glyph_data;
487 }
488
resetOT::hb_ot_apply_context_t::skipping_iterator_t489 void reset (unsigned int start_index_,
490 unsigned int num_items_)
491 {
492 idx = start_index_;
493 num_items = num_items_;
494 end = c->buffer->len;
495 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
496 }
497
rejectOT::hb_ot_apply_context_t::skipping_iterator_t498 void reject ()
499 {
500 num_items++;
501 if (match_glyph_data) match_glyph_data--;
502 }
503
504 matcher_t::may_skip_t
may_skipOT::hb_ot_apply_context_t::skipping_iterator_t505 may_skip (const hb_glyph_info_t &info) const
506 { return matcher.may_skip (c, info); }
507
nextOT::hb_ot_apply_context_t::skipping_iterator_t508 bool next ()
509 {
510 assert (num_items > 0);
511 while (idx + num_items < end)
512 {
513 idx++;
514 const hb_glyph_info_t &info = c->buffer->info[idx];
515
516 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
517 if (unlikely (skip == matcher_t::SKIP_YES))
518 continue;
519
520 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
521 if (match == matcher_t::MATCH_YES ||
522 (match == matcher_t::MATCH_MAYBE &&
523 skip == matcher_t::SKIP_NO))
524 {
525 num_items--;
526 if (match_glyph_data) match_glyph_data++;
527 return true;
528 }
529
530 if (skip == matcher_t::SKIP_NO)
531 return false;
532 }
533 return false;
534 }
prevOT::hb_ot_apply_context_t::skipping_iterator_t535 bool prev ()
536 {
537 assert (num_items > 0);
538 /* The alternate condition below is faster at string boundaries,
539 * but produces subpar "unsafe-to-concat" values. */
540 unsigned stop = num_items - 1;
541 if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
542 stop = 1 - 1;
543 /*When looking back, limit how far we search; this function is mostly
544 * used for looking back for base glyphs when attaching marks. If we
545 * don't limit, we can get O(n^2) behavior where n is the number of
546 * consecutive marks. */
547 stop = (unsigned) hb_max ((int) stop, (int) idx - HB_MAX_CONTEXT_LENGTH);
548 while (idx > stop)
549 {
550 idx--;
551 const hb_glyph_info_t &info = c->buffer->out_info[idx];
552
553 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
554 if (unlikely (skip == matcher_t::SKIP_YES))
555 continue;
556
557 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
558 if (match == matcher_t::MATCH_YES ||
559 (match == matcher_t::MATCH_MAYBE &&
560 skip == matcher_t::SKIP_NO))
561 {
562 num_items--;
563 if (match_glyph_data) match_glyph_data++;
564 return true;
565 }
566
567 if (skip == matcher_t::SKIP_NO)
568 return false;
569 }
570 return false;
571 }
572
573 unsigned int idx;
574 protected:
575 hb_ot_apply_context_t *c;
576 matcher_t matcher;
577 const HBUINT16 *match_glyph_data;
578
579 unsigned int num_items;
580 unsigned int end;
581 };
582
583
get_nameOT::hb_ot_apply_context_t584 const char *get_name () { return "APPLY"; }
585 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
586 template <typename T>
dispatchOT::hb_ot_apply_context_t587 return_t dispatch (const T &obj) { return obj.apply (this); }
default_return_valueOT::hb_ot_apply_context_t588 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_ot_apply_context_t589 bool stop_sublookup_iteration (return_t r) const { return r; }
recurseOT::hb_ot_apply_context_t590 return_t recurse (unsigned int sub_lookup_index)
591 {
592 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
593 return default_return_value ();
594
595 nesting_level_left--;
596 bool ret = recurse_func (this, sub_lookup_index);
597 nesting_level_left++;
598 return ret;
599 }
600
601 skipping_iterator_t iter_input, iter_context;
602
603 hb_font_t *font;
604 hb_face_t *face;
605 hb_buffer_t *buffer;
606 recurse_func_t recurse_func;
607 const GDEF &gdef;
608 const VariationStore &var_store;
609
610 hb_direction_t direction;
611 hb_mask_t lookup_mask;
612 unsigned int table_index; /* GSUB/GPOS */
613 unsigned int lookup_index;
614 unsigned int lookup_props;
615 unsigned int nesting_level_left;
616
617 bool has_glyph_classes;
618 bool auto_zwnj;
619 bool auto_zwj;
620 bool random;
621
622 uint32_t random_state;
623
624
hb_ot_apply_context_tOT::hb_ot_apply_context_t625 hb_ot_apply_context_t (unsigned int table_index_,
626 hb_font_t *font_,
627 hb_buffer_t *buffer_) :
628 iter_input (), iter_context (),
629 font (font_), face (font->face), buffer (buffer_),
630 recurse_func (nullptr),
631 gdef (
632 #ifndef HB_NO_OT_LAYOUT
633 *face->table.GDEF->table
634 #else
635 Null (GDEF)
636 #endif
637 ),
638 var_store (gdef.get_var_store ()),
639 direction (buffer_->props.direction),
640 lookup_mask (1),
641 table_index (table_index_),
642 lookup_index ((unsigned int) -1),
643 lookup_props (0),
644 nesting_level_left (HB_MAX_NESTING_LEVEL),
645 has_glyph_classes (gdef.has_glyph_classes ()),
646 auto_zwnj (true),
647 auto_zwj (true),
648 random (false),
649 random_state (1) { init_iters (); }
650
init_itersOT::hb_ot_apply_context_t651 void init_iters ()
652 {
653 iter_input.init (this, false);
654 iter_context.init (this, true);
655 }
656
set_lookup_maskOT::hb_ot_apply_context_t657 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; init_iters (); }
set_auto_zwjOT::hb_ot_apply_context_t658 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
set_auto_zwnjOT::hb_ot_apply_context_t659 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
set_randomOT::hb_ot_apply_context_t660 void set_random (bool random_) { random = random_; }
set_recurse_funcOT::hb_ot_apply_context_t661 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
set_lookup_indexOT::hb_ot_apply_context_t662 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
set_lookup_propsOT::hb_ot_apply_context_t663 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
664
random_numberOT::hb_ot_apply_context_t665 uint32_t random_number ()
666 {
667 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
668 random_state = random_state * 48271 % 2147483647;
669 return random_state;
670 }
671
match_properties_markOT::hb_ot_apply_context_t672 bool match_properties_mark (hb_codepoint_t glyph,
673 unsigned int glyph_props,
674 unsigned int match_props) const
675 {
676 /* If using mark filtering sets, the high short of
677 * match_props has the set index.
678 */
679 if (match_props & LookupFlag::UseMarkFilteringSet)
680 return gdef.mark_set_covers (match_props >> 16, glyph);
681
682 /* The second byte of match_props has the meaning
683 * "ignore marks of attachment type different than
684 * the attachment type specified."
685 */
686 if (match_props & LookupFlag::MarkAttachmentType)
687 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
688
689 return true;
690 }
691
check_glyph_propertyOT::hb_ot_apply_context_t692 bool check_glyph_property (const hb_glyph_info_t *info,
693 unsigned int match_props) const
694 {
695 hb_codepoint_t glyph = info->codepoint;
696 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
697
698 /* Not covered, if, for example, glyph class is ligature and
699 * match_props includes LookupFlags::IgnoreLigatures
700 */
701 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
702 return false;
703
704 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
705 return match_properties_mark (glyph, glyph_props, match_props);
706
707 return true;
708 }
709
_set_glyph_propsOT::hb_ot_apply_context_t710 void _set_glyph_props (hb_codepoint_t glyph_index,
711 unsigned int class_guess = 0,
712 bool ligature = false,
713 bool component = false) const
714 {
715 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) &
716 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
717 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
718 if (ligature)
719 {
720 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
721 /* In the only place that the MULTIPLIED bit is used, Uniscribe
722 * seems to only care about the "last" transformation between
723 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
724 * and ligate again, it forgives the multiplication and acts as
725 * if only ligation happened. As such, clear MULTIPLIED bit.
726 */
727 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
728 }
729 if (component)
730 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
731 if (likely (has_glyph_classes))
732 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index));
733 else if (class_guess)
734 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess);
735 }
736
replace_glyphOT::hb_ot_apply_context_t737 void replace_glyph (hb_codepoint_t glyph_index) const
738 {
739 _set_glyph_props (glyph_index);
740 (void) buffer->replace_glyph (glyph_index);
741 }
replace_glyph_inplaceOT::hb_ot_apply_context_t742 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
743 {
744 _set_glyph_props (glyph_index);
745 buffer->cur().codepoint = glyph_index;
746 }
replace_glyph_with_ligatureOT::hb_ot_apply_context_t747 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
748 unsigned int class_guess) const
749 {
750 _set_glyph_props (glyph_index, class_guess, true);
751 (void) buffer->replace_glyph (glyph_index);
752 }
output_glyph_for_componentOT::hb_ot_apply_context_t753 void output_glyph_for_component (hb_codepoint_t glyph_index,
754 unsigned int class_guess) const
755 {
756 _set_glyph_props (glyph_index, class_guess, false, true);
757 (void) buffer->output_glyph (glyph_index);
758 }
759 };
760
761
762 struct hb_get_subtables_context_t :
763 hb_dispatch_context_t<hb_get_subtables_context_t>
764 {
765 template <typename Type>
apply_toOT::hb_get_subtables_context_t766 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
767 {
768 const Type *typed_obj = (const Type *) obj;
769 return typed_obj->apply (c);
770 }
771
772 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
773
774 struct hb_applicable_t
775 {
776 template <typename T>
initOT::hb_get_subtables_context_t::hb_applicable_t777 void init (const T &obj_, hb_apply_func_t apply_func_)
778 {
779 obj = &obj_;
780 apply_func = apply_func_;
781 digest.init ();
782 obj_.get_coverage ().collect_coverage (&digest);
783 }
784
applyOT::hb_get_subtables_context_t::hb_applicable_t785 bool apply (OT::hb_ot_apply_context_t *c) const
786 {
787 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
788 }
789
790 private:
791 const void *obj;
792 hb_apply_func_t apply_func;
793 hb_set_digest_t digest;
794 };
795
796 typedef hb_vector_t<hb_applicable_t> array_t;
797
798 /* Dispatch interface. */
799 template <typename T>
dispatchOT::hb_get_subtables_context_t800 return_t dispatch (const T &obj)
801 {
802 hb_applicable_t *entry = array.push();
803 entry->init (obj, apply_to<T>);
804 return hb_empty_t ();
805 }
default_return_valueOT::hb_get_subtables_context_t806 static return_t default_return_value () { return hb_empty_t (); }
807
hb_get_subtables_context_tOT::hb_get_subtables_context_t808 hb_get_subtables_context_t (array_t &array_) :
809 array (array_) {}
810
811 array_t &array;
812 };
813
814
815
816
817 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
818 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
819 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
820 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
821
822 struct ContextClosureFuncs
823 {
824 intersects_func_t intersects;
825 intersected_glyphs_func_t intersected_glyphs;
826 };
827 struct ContextCollectGlyphsFuncs
828 {
829 collect_glyphs_func_t collect;
830 };
831 struct ContextApplyFuncs
832 {
833 match_func_t match;
834 };
835
836
intersects_glyph(const hb_set_t * glyphs,const HBUINT16 & value,const void * data HB_UNUSED)837 static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
838 {
839 return glyphs->has (value);
840 }
intersects_class(const hb_set_t * glyphs,const HBUINT16 & value,const void * data)841 static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
842 {
843 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
844 return class_def.intersects_class (glyphs, value);
845 }
intersects_coverage(const hb_set_t * glyphs,const HBUINT16 & value,const void * data)846 static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
847 {
848 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
849 return (data+coverage).intersects (glyphs);
850 }
851
852
intersected_glyph(const hb_set_t * glyphs HB_UNUSED,const void * data,unsigned value,hb_set_t * intersected_glyphs)853 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs)
854 {
855 unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
856 intersected_glyphs->add (g);
857 }
intersected_class_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs)858 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
859 {
860 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
861 class_def.intersected_class_glyphs (glyphs, value, intersected_glyphs);
862 }
intersected_coverage_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs)863 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
864 {
865 Offset16To<Coverage> coverage;
866 coverage = value;
867 (data+coverage).intersected_coverage_glyphs (glyphs, intersected_glyphs);
868 }
869
870
array_is_subset_of(const hb_set_t * glyphs,unsigned int count,const HBUINT16 values[],intersects_func_t intersects_func,const void * intersects_data)871 static inline bool array_is_subset_of (const hb_set_t *glyphs,
872 unsigned int count,
873 const HBUINT16 values[],
874 intersects_func_t intersects_func,
875 const void *intersects_data)
876 {
877 for (const HBUINT16 &_ : + hb_iter (values, count))
878 if (!intersects_func (glyphs, _, intersects_data)) return false;
879 return true;
880 }
881
882
collect_glyph(hb_set_t * glyphs,const HBUINT16 & value,const void * data HB_UNUSED)883 static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
884 {
885 glyphs->add (value);
886 }
collect_class(hb_set_t * glyphs,const HBUINT16 & value,const void * data)887 static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
888 {
889 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
890 class_def.collect_class (glyphs, value);
891 }
collect_coverage(hb_set_t * glyphs,const HBUINT16 & value,const void * data)892 static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
893 {
894 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
895 (data+coverage).collect_coverage (glyphs);
896 }
collect_array(hb_collect_glyphs_context_t * c HB_UNUSED,hb_set_t * glyphs,unsigned int count,const HBUINT16 values[],collect_glyphs_func_t collect_func,const void * collect_data)897 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
898 hb_set_t *glyphs,
899 unsigned int count,
900 const HBUINT16 values[],
901 collect_glyphs_func_t collect_func,
902 const void *collect_data)
903 {
904 return
905 + hb_iter (values, count)
906 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
907 ;
908 }
909
910
match_glyph(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data HB_UNUSED)911 static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
912 {
913 return glyph_id == value;
914 }
match_class(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data)915 static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
916 {
917 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
918 return class_def.get_class (glyph_id) == value;
919 }
match_coverage(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data)920 static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
921 {
922 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
923 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
924 }
925
would_match_input(hb_would_apply_context_t * c,unsigned int count,const HBUINT16 input[],match_func_t match_func,const void * match_data)926 static inline bool would_match_input (hb_would_apply_context_t *c,
927 unsigned int count, /* Including the first glyph (not matched) */
928 const HBUINT16 input[], /* Array of input values--start with second glyph */
929 match_func_t match_func,
930 const void *match_data)
931 {
932 if (count != c->len)
933 return false;
934
935 for (unsigned int i = 1; i < count; i++)
936 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
937 return false;
938
939 return true;
940 }
match_input(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 input[],match_func_t match_func,const void * match_data,unsigned int * end_offset,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int * p_total_component_count=nullptr)941 static inline bool match_input (hb_ot_apply_context_t *c,
942 unsigned int count, /* Including the first glyph (not matched) */
943 const HBUINT16 input[], /* Array of input values--start with second glyph */
944 match_func_t match_func,
945 const void *match_data,
946 unsigned int *end_offset,
947 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
948 unsigned int *p_total_component_count = nullptr)
949 {
950 TRACE_APPLY (nullptr);
951
952 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
953
954 hb_buffer_t *buffer = c->buffer;
955
956 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
957 skippy_iter.reset (buffer->idx, count - 1);
958 skippy_iter.set_match_func (match_func, match_data, input);
959
960 /*
961 * This is perhaps the trickiest part of OpenType... Remarks:
962 *
963 * - If all components of the ligature were marks, we call this a mark ligature.
964 *
965 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
966 * it as a ligature glyph.
967 *
968 * - Ligatures cannot be formed across glyphs attached to different components
969 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
970 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
971 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
972 * There are a couple of exceptions to this:
973 *
974 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
975 * assuming that the font designer knows what they are doing (otherwise it can
976 * break Indic stuff when a matra wants to ligate with a conjunct,
977 *
978 * o If two marks want to ligate and they belong to different components of the
979 * same ligature glyph, and said ligature glyph is to be ignored according to
980 * mark-filtering rules, then allow.
981 * https://github.com/harfbuzz/harfbuzz/issues/545
982 */
983
984 unsigned int total_component_count = 0;
985 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
986
987 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
988 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
989
990 enum {
991 LIGBASE_NOT_CHECKED,
992 LIGBASE_MAY_NOT_SKIP,
993 LIGBASE_MAY_SKIP
994 } ligbase = LIGBASE_NOT_CHECKED;
995
996 match_positions[0] = buffer->idx;
997 for (unsigned int i = 1; i < count; i++)
998 {
999 if (!skippy_iter.next ()) return_trace (false);
1000
1001 match_positions[i] = skippy_iter.idx;
1002
1003 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
1004 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
1005
1006 if (first_lig_id && first_lig_comp)
1007 {
1008 /* If first component was attached to a previous ligature component,
1009 * all subsequent components should be attached to the same ligature
1010 * component, otherwise we shouldn't ligate them... */
1011 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1012 {
1013 /* ...unless, we are attached to a base ligature and that base
1014 * ligature is ignorable. */
1015 if (ligbase == LIGBASE_NOT_CHECKED)
1016 {
1017 bool found = false;
1018 const auto *out = buffer->out_info;
1019 unsigned int j = buffer->out_len;
1020 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1021 {
1022 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1023 {
1024 j--;
1025 found = true;
1026 break;
1027 }
1028 j--;
1029 }
1030
1031 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1032 ligbase = LIGBASE_MAY_SKIP;
1033 else
1034 ligbase = LIGBASE_MAY_NOT_SKIP;
1035 }
1036
1037 if (ligbase == LIGBASE_MAY_NOT_SKIP)
1038 return_trace (false);
1039 }
1040 }
1041 else
1042 {
1043 /* If first component was NOT attached to a previous ligature component,
1044 * all subsequent components should also NOT be attached to any ligature
1045 * component, unless they are attached to the first component itself! */
1046 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1047 return_trace (false);
1048 }
1049
1050 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1051 }
1052
1053 *end_offset = skippy_iter.idx - buffer->idx + 1;
1054
1055 if (p_total_component_count)
1056 *p_total_component_count = total_component_count;
1057
1058 return_trace (true);
1059 }
ligate_input(hb_ot_apply_context_t * c,unsigned int count,const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int match_length,hb_codepoint_t lig_glyph,unsigned int total_component_count)1060 static inline bool ligate_input (hb_ot_apply_context_t *c,
1061 unsigned int count, /* Including the first glyph */
1062 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1063 unsigned int match_length,
1064 hb_codepoint_t lig_glyph,
1065 unsigned int total_component_count)
1066 {
1067 TRACE_APPLY (nullptr);
1068
1069 hb_buffer_t *buffer = c->buffer;
1070
1071 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
1072
1073 /* - If a base and one or more marks ligate, consider that as a base, NOT
1074 * ligature, such that all following marks can still attach to it.
1075 * https://github.com/harfbuzz/harfbuzz/issues/1109
1076 *
1077 * - If all components of the ligature were marks, we call this a mark ligature.
1078 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1079 * the ligature to keep its old ligature id. This will allow it to attach to
1080 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1081 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1082 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1083 * later, we don't want them to lose their ligature id/component, otherwise
1084 * GPOS will fail to correctly position the mark ligature on top of the
1085 * LAM,LAM,HEH ligature. See:
1086 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1087 *
1088 * - If a ligature is formed of components that some of which are also ligatures
1089 * themselves, and those ligature components had marks attached to *their*
1090 * components, we have to attach the marks to the new ligature component
1091 * positions! Now *that*'s tricky! And these marks may be following the
1092 * last component of the whole sequence, so we should loop forward looking
1093 * for them and update them.
1094 *
1095 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1096 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1097 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1098 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1099 * the new ligature with a component value of 2.
1100 *
1101 * This in fact happened to a font... See:
1102 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1103 */
1104
1105 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1106 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1107 for (unsigned int i = 1; i < count; i++)
1108 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1109 {
1110 is_base_ligature = false;
1111 is_mark_ligature = false;
1112 break;
1113 }
1114 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1115
1116 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1117 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1118 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1119 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1120 unsigned int components_so_far = last_num_components;
1121
1122 if (is_ligature)
1123 {
1124 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1125 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1126 {
1127 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1128 }
1129 }
1130 c->replace_glyph_with_ligature (lig_glyph, klass);
1131
1132 for (unsigned int i = 1; i < count; i++)
1133 {
1134 while (buffer->idx < match_positions[i] && buffer->successful)
1135 {
1136 if (is_ligature)
1137 {
1138 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1139 if (this_comp == 0)
1140 this_comp = last_num_components;
1141 unsigned int new_lig_comp = components_so_far - last_num_components +
1142 hb_min (this_comp, last_num_components);
1143 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1144 }
1145 (void) buffer->next_glyph ();
1146 }
1147
1148 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1149 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1150 components_so_far += last_num_components;
1151
1152 /* Skip the base glyph */
1153 buffer->idx++;
1154 }
1155
1156 if (!is_mark_ligature && last_lig_id)
1157 {
1158 /* Re-adjust components for any marks following. */
1159 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1160 {
1161 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1162
1163 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1164 if (!this_comp) break;
1165
1166 unsigned new_lig_comp = components_so_far - last_num_components +
1167 hb_min (this_comp, last_num_components);
1168 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1169 }
1170 }
1171 return_trace (true);
1172 }
1173
match_backtrack(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 backtrack[],match_func_t match_func,const void * match_data,unsigned int * match_start)1174 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1175 unsigned int count,
1176 const HBUINT16 backtrack[],
1177 match_func_t match_func,
1178 const void *match_data,
1179 unsigned int *match_start)
1180 {
1181 TRACE_APPLY (nullptr);
1182
1183 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1184 skippy_iter.reset (c->buffer->backtrack_len (), count);
1185 skippy_iter.set_match_func (match_func, match_data, backtrack);
1186
1187 for (unsigned int i = 0; i < count; i++)
1188 if (!skippy_iter.prev ())
1189 return_trace (false);
1190
1191 *match_start = skippy_iter.idx;
1192
1193 return_trace (true);
1194 }
1195
match_lookahead(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 lookahead[],match_func_t match_func,const void * match_data,unsigned int offset,unsigned int * end_index)1196 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1197 unsigned int count,
1198 const HBUINT16 lookahead[],
1199 match_func_t match_func,
1200 const void *match_data,
1201 unsigned int offset,
1202 unsigned int *end_index)
1203 {
1204 TRACE_APPLY (nullptr);
1205
1206 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1207 skippy_iter.reset (c->buffer->idx + offset - 1, count);
1208 skippy_iter.set_match_func (match_func, match_data, lookahead);
1209
1210 for (unsigned int i = 0; i < count; i++)
1211 if (!skippy_iter.next ())
1212 return_trace (false);
1213
1214 *end_index = skippy_iter.idx + 1;
1215
1216 return_trace (true);
1217 }
1218
1219
1220
1221 struct LookupRecord
1222 {
serializeOT::LookupRecord1223 bool serialize (hb_serialize_context_t *c,
1224 const hb_map_t *lookup_map) const
1225 {
1226 TRACE_SERIALIZE (this);
1227 auto *out = c->embed (*this);
1228 if (unlikely (!out)) return_trace (false);
1229
1230 return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
1231 }
1232
sanitizeOT::LookupRecord1233 bool sanitize (hb_sanitize_context_t *c) const
1234 {
1235 TRACE_SANITIZE (this);
1236 return_trace (c->check_struct (this));
1237 }
1238
1239 HBUINT16 sequenceIndex; /* Index into current glyph
1240 * sequence--first glyph = 0 */
1241 HBUINT16 lookupListIndex; /* Lookup to apply to that
1242 * position--zero--based */
1243 public:
1244 DEFINE_SIZE_STATIC (4);
1245 };
1246
serialize_lookuprecord_array(hb_serialize_context_t * c,const hb_array_t<const LookupRecord> lookupRecords,const hb_map_t * lookup_map)1247 static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
1248 const hb_array_t<const LookupRecord> lookupRecords,
1249 const hb_map_t *lookup_map)
1250 {
1251 unsigned count = 0;
1252 for (const LookupRecord& r : lookupRecords)
1253 {
1254 if (!lookup_map->has (r.lookupListIndex))
1255 continue;
1256
1257 if (!r.serialize (c, lookup_map))
1258 return 0;
1259
1260 count++;
1261 }
1262 return count;
1263 }
1264
1265 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1266
context_closure_recurse_lookups(hb_closure_context_t * c,unsigned inputCount,const HBUINT16 input[],unsigned lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextFormat context_format,const void * data,intersected_glyphs_func_t intersected_glyphs_func)1267 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1268 unsigned inputCount, const HBUINT16 input[],
1269 unsigned lookupCount,
1270 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1271 unsigned value,
1272 ContextFormat context_format,
1273 const void *data,
1274 intersected_glyphs_func_t intersected_glyphs_func)
1275 {
1276 hb_set_t *covered_seq_indicies = hb_set_create ();
1277 for (unsigned int i = 0; i < lookupCount; i++)
1278 {
1279 unsigned seqIndex = lookupRecord[i].sequenceIndex;
1280 if (seqIndex >= inputCount) continue;
1281
1282 hb_set_t *pos_glyphs = nullptr;
1283
1284 if (hb_set_is_empty (covered_seq_indicies) || !hb_set_has (covered_seq_indicies, seqIndex))
1285 {
1286 pos_glyphs = hb_set_create ();
1287 if (seqIndex == 0)
1288 {
1289 switch (context_format) {
1290 case ContextFormat::SimpleContext:
1291 pos_glyphs->add (value);
1292 break;
1293 case ContextFormat::ClassBasedContext:
1294 intersected_glyphs_func (c->cur_intersected_glyphs, data, value, pos_glyphs);
1295 break;
1296 case ContextFormat::CoverageBasedContext:
1297 hb_set_set (pos_glyphs, c->cur_intersected_glyphs);
1298 break;
1299 }
1300 }
1301 else
1302 {
1303 const void *input_data = input;
1304 unsigned input_value = seqIndex - 1;
1305 if (context_format != ContextFormat::SimpleContext)
1306 {
1307 input_data = data;
1308 input_value = input[seqIndex - 1];
1309 }
1310
1311 intersected_glyphs_func (c->glyphs, input_data, input_value, pos_glyphs);
1312 }
1313 }
1314
1315 hb_set_add (covered_seq_indicies, seqIndex);
1316 if (pos_glyphs)
1317 c->push_cur_active_glyphs (pos_glyphs);
1318
1319 unsigned endIndex = inputCount;
1320 if (context_format == ContextFormat::CoverageBasedContext)
1321 endIndex += 1;
1322
1323 c->recurse (lookupRecord[i].lookupListIndex, covered_seq_indicies, seqIndex, endIndex);
1324
1325 if (pos_glyphs) {
1326 c->pop_cur_done_glyphs ();
1327 hb_set_destroy (pos_glyphs);
1328 }
1329 }
1330
1331 hb_set_destroy (covered_seq_indicies);
1332 }
1333
1334 template <typename context_t>
recurse_lookups(context_t * c,unsigned int lookupCount,const LookupRecord lookupRecord[])1335 static inline void recurse_lookups (context_t *c,
1336 unsigned int lookupCount,
1337 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1338 {
1339 for (unsigned int i = 0; i < lookupCount; i++)
1340 c->recurse (lookupRecord[i].lookupListIndex);
1341 }
1342
apply_lookup(hb_ot_apply_context_t * c,unsigned int count,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned int match_length)1343 static inline bool apply_lookup (hb_ot_apply_context_t *c,
1344 unsigned int count, /* Including the first glyph */
1345 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1346 unsigned int lookupCount,
1347 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1348 unsigned int match_length)
1349 {
1350 TRACE_APPLY (nullptr);
1351
1352 hb_buffer_t *buffer = c->buffer;
1353 int end;
1354
1355 /* All positions are distance from beginning of *output* buffer.
1356 * Adjust. */
1357 {
1358 unsigned int bl = buffer->backtrack_len ();
1359 end = bl + match_length;
1360
1361 int delta = bl - buffer->idx;
1362 /* Convert positions to new indexing. */
1363 for (unsigned int j = 0; j < count; j++)
1364 match_positions[j] += delta;
1365 }
1366
1367 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1368 {
1369 unsigned int idx = lookupRecord[i].sequenceIndex;
1370 if (idx >= count)
1371 continue;
1372
1373 /* Don't recurse to ourself at same position.
1374 * Note that this test is too naive, it doesn't catch longer loops. */
1375 if (unlikely (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index))
1376 continue;
1377
1378 if (unlikely (!buffer->move_to (match_positions[idx])))
1379 break;
1380
1381 if (unlikely (buffer->max_ops <= 0))
1382 break;
1383
1384 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1385 if (!c->recurse (lookupRecord[i].lookupListIndex))
1386 continue;
1387
1388 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1389 int delta = new_len - orig_len;
1390
1391 if (!delta)
1392 continue;
1393
1394 /* Recursed lookup changed buffer len. Adjust.
1395 *
1396 * TODO:
1397 *
1398 * Right now, if buffer length increased by n, we assume n new glyphs
1399 * were added right after the current position, and if buffer length
1400 * was decreased by n, we assume n match positions after the current
1401 * one where removed. The former (buffer length increased) case is
1402 * fine, but the decrease case can be improved in at least two ways,
1403 * both of which are significant:
1404 *
1405 * - If recursed-to lookup is MultipleSubst and buffer length
1406 * decreased, then it's current match position that was deleted,
1407 * NOT the one after it.
1408 *
1409 * - If buffer length was decreased by n, it does not necessarily
1410 * mean that n match positions where removed, as there might
1411 * have been marks and default-ignorables in the sequence. We
1412 * should instead drop match positions between current-position
1413 * and current-position + n instead. Though, am not sure which
1414 * one is better. Both cases have valid uses. Sigh.
1415 *
1416 * It should be possible to construct tests for both of these cases.
1417 */
1418
1419 end += delta;
1420 if (end <= int (match_positions[idx]))
1421 {
1422 /* End might end up being smaller than match_positions[idx] if the recursed
1423 * lookup ended up removing many items, more than we have had matched.
1424 * Just never rewind end back and get out of here.
1425 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1426 end = match_positions[idx];
1427 /* There can't be any further changes. */
1428 break;
1429 }
1430
1431 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1432
1433 if (delta > 0)
1434 {
1435 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1436 break;
1437 }
1438 else
1439 {
1440 /* NOTE: delta is negative. */
1441 delta = hb_max (delta, (int) next - (int) count);
1442 next -= delta;
1443 }
1444
1445 /* Shift! */
1446 memmove (match_positions + next + delta, match_positions + next,
1447 (count - next) * sizeof (match_positions[0]));
1448 next += delta;
1449 count += delta;
1450
1451 /* Fill in new entries. */
1452 for (unsigned int j = idx + 1; j < next; j++)
1453 match_positions[j] = match_positions[j - 1] + 1;
1454
1455 /* And fixup the rest. */
1456 for (; next < count; next++)
1457 match_positions[next] += delta;
1458 }
1459
1460 (void) buffer->move_to (end);
1461
1462 return_trace (true);
1463 }
1464
1465
1466
1467 /* Contextual lookups */
1468
1469 struct ContextClosureLookupContext
1470 {
1471 ContextClosureFuncs funcs;
1472 ContextFormat context_format;
1473 const void *intersects_data;
1474 };
1475
1476 struct ContextCollectGlyphsLookupContext
1477 {
1478 ContextCollectGlyphsFuncs funcs;
1479 const void *collect_data;
1480 };
1481
1482 struct ContextApplyLookupContext
1483 {
1484 ContextApplyFuncs funcs;
1485 const void *match_data;
1486 };
1487
context_intersects(const hb_set_t * glyphs,unsigned int inputCount,const HBUINT16 input[],ContextClosureLookupContext & lookup_context)1488 static inline bool context_intersects (const hb_set_t *glyphs,
1489 unsigned int inputCount, /* Including the first glyph (not matched) */
1490 const HBUINT16 input[], /* Array of input values--start with second glyph */
1491 ContextClosureLookupContext &lookup_context)
1492 {
1493 return array_is_subset_of (glyphs,
1494 inputCount ? inputCount - 1 : 0, input,
1495 lookup_context.funcs.intersects, lookup_context.intersects_data);
1496 }
1497
context_closure_lookup(hb_closure_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextClosureLookupContext & lookup_context)1498 static inline void context_closure_lookup (hb_closure_context_t *c,
1499 unsigned int inputCount, /* Including the first glyph (not matched) */
1500 const HBUINT16 input[], /* Array of input values--start with second glyph */
1501 unsigned int lookupCount,
1502 const LookupRecord lookupRecord[],
1503 unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1504 ContextClosureLookupContext &lookup_context)
1505 {
1506 if (context_intersects (c->glyphs,
1507 inputCount, input,
1508 lookup_context))
1509 context_closure_recurse_lookups (c,
1510 inputCount, input,
1511 lookupCount, lookupRecord,
1512 value,
1513 lookup_context.context_format,
1514 lookup_context.intersects_data,
1515 lookup_context.funcs.intersected_glyphs);
1516 }
1517
context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextCollectGlyphsLookupContext & lookup_context)1518 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1519 unsigned int inputCount, /* Including the first glyph (not matched) */
1520 const HBUINT16 input[], /* Array of input values--start with second glyph */
1521 unsigned int lookupCount,
1522 const LookupRecord lookupRecord[],
1523 ContextCollectGlyphsLookupContext &lookup_context)
1524 {
1525 collect_array (c, c->input,
1526 inputCount ? inputCount - 1 : 0, input,
1527 lookup_context.funcs.collect, lookup_context.collect_data);
1528 recurse_lookups (c,
1529 lookupCount, lookupRecord);
1530 }
1531
context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,ContextApplyLookupContext & lookup_context)1532 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1533 unsigned int inputCount, /* Including the first glyph (not matched) */
1534 const HBUINT16 input[], /* Array of input values--start with second glyph */
1535 unsigned int lookupCount HB_UNUSED,
1536 const LookupRecord lookupRecord[] HB_UNUSED,
1537 ContextApplyLookupContext &lookup_context)
1538 {
1539 return would_match_input (c,
1540 inputCount, input,
1541 lookup_context.funcs.match, lookup_context.match_data);
1542 }
context_apply_lookup(hb_ot_apply_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextApplyLookupContext & lookup_context)1543 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1544 unsigned int inputCount, /* Including the first glyph (not matched) */
1545 const HBUINT16 input[], /* Array of input values--start with second glyph */
1546 unsigned int lookupCount,
1547 const LookupRecord lookupRecord[],
1548 ContextApplyLookupContext &lookup_context)
1549 {
1550 unsigned int match_length = 0;
1551 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1552 return match_input (c,
1553 inputCount, input,
1554 lookup_context.funcs.match, lookup_context.match_data,
1555 &match_length, match_positions)
1556 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length),
1557 apply_lookup (c,
1558 inputCount, match_positions,
1559 lookupCount, lookupRecord,
1560 match_length));
1561 }
1562
1563 struct Rule
1564 {
intersectsOT::Rule1565 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1566 {
1567 return context_intersects (glyphs,
1568 inputCount, inputZ.arrayZ,
1569 lookup_context);
1570 }
1571
closureOT::Rule1572 void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
1573 {
1574 if (unlikely (c->lookup_limit_exceeded ())) return;
1575
1576 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1577 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1578 context_closure_lookup (c,
1579 inputCount, inputZ.arrayZ,
1580 lookupCount, lookupRecord.arrayZ,
1581 value, lookup_context);
1582 }
1583
closure_lookupsOT::Rule1584 void closure_lookups (hb_closure_lookups_context_t *c,
1585 ContextClosureLookupContext &lookup_context) const
1586 {
1587 if (unlikely (c->lookup_limit_exceeded ())) return;
1588 if (!intersects (c->glyphs, lookup_context)) return;
1589
1590 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1591 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1592 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1593 }
1594
collect_glyphsOT::Rule1595 void collect_glyphs (hb_collect_glyphs_context_t *c,
1596 ContextCollectGlyphsLookupContext &lookup_context) const
1597 {
1598 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1599 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1600 context_collect_glyphs_lookup (c,
1601 inputCount, inputZ.arrayZ,
1602 lookupCount, lookupRecord.arrayZ,
1603 lookup_context);
1604 }
1605
would_applyOT::Rule1606 bool would_apply (hb_would_apply_context_t *c,
1607 ContextApplyLookupContext &lookup_context) const
1608 {
1609 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1610 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1611 return context_would_apply_lookup (c,
1612 inputCount, inputZ.arrayZ,
1613 lookupCount, lookupRecord.arrayZ,
1614 lookup_context);
1615 }
1616
applyOT::Rule1617 bool apply (hb_ot_apply_context_t *c,
1618 ContextApplyLookupContext &lookup_context) const
1619 {
1620 TRACE_APPLY (this);
1621 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1622 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1623 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1624 }
1625
serializeOT::Rule1626 bool serialize (hb_serialize_context_t *c,
1627 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1628 const hb_map_t *lookup_map) const
1629 {
1630 TRACE_SERIALIZE (this);
1631 auto *out = c->start_embed (this);
1632 if (unlikely (!c->extend_min (out))) return_trace (false);
1633
1634 out->inputCount = inputCount;
1635 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1636 for (const auto org : input)
1637 {
1638 HBUINT16 d;
1639 d = input_mapping->get (org);
1640 c->copy (d);
1641 }
1642
1643 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1644 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1645
1646 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
1647 return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
1648 }
1649
subsetOT::Rule1650 bool subset (hb_subset_context_t *c,
1651 const hb_map_t *lookup_map,
1652 const hb_map_t *klass_map = nullptr) const
1653 {
1654 TRACE_SUBSET (this);
1655 if (unlikely (!inputCount)) return_trace (false);
1656 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1657
1658 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1659 if (!hb_all (input, mapping)) return_trace (false);
1660 return_trace (serialize (c->serializer, mapping, lookup_map));
1661 }
1662
1663 public:
sanitizeOT::Rule1664 bool sanitize (hb_sanitize_context_t *c) const
1665 {
1666 TRACE_SANITIZE (this);
1667 return_trace (inputCount.sanitize (c) &&
1668 lookupCount.sanitize (c) &&
1669 c->check_range (inputZ.arrayZ,
1670 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1671 LookupRecord::static_size * lookupCount));
1672 }
1673
1674 protected:
1675 HBUINT16 inputCount; /* Total number of glyphs in input
1676 * glyph sequence--includes the first
1677 * glyph */
1678 HBUINT16 lookupCount; /* Number of LookupRecords */
1679 UnsizedArrayOf<HBUINT16>
1680 inputZ; /* Array of match inputs--start with
1681 * second glyph */
1682 /*UnsizedArrayOf<LookupRecord>
1683 lookupRecordX;*/ /* Array of LookupRecords--in
1684 * design order */
1685 public:
1686 DEFINE_SIZE_ARRAY (4, inputZ);
1687 };
1688
1689 struct RuleSet
1690 {
intersectsOT::RuleSet1691 bool intersects (const hb_set_t *glyphs,
1692 ContextClosureLookupContext &lookup_context) const
1693 {
1694 return
1695 + hb_iter (rule)
1696 | hb_map (hb_add (this))
1697 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1698 | hb_any
1699 ;
1700 }
1701
closureOT::RuleSet1702 void closure (hb_closure_context_t *c, unsigned value,
1703 ContextClosureLookupContext &lookup_context) const
1704 {
1705 if (unlikely (c->lookup_limit_exceeded ())) return;
1706
1707 return
1708 + hb_iter (rule)
1709 | hb_map (hb_add (this))
1710 | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
1711 ;
1712 }
1713
closure_lookupsOT::RuleSet1714 void closure_lookups (hb_closure_lookups_context_t *c,
1715 ContextClosureLookupContext &lookup_context) const
1716 {
1717 if (unlikely (c->lookup_limit_exceeded ())) return;
1718 + hb_iter (rule)
1719 | hb_map (hb_add (this))
1720 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
1721 ;
1722 }
1723
collect_glyphsOT::RuleSet1724 void collect_glyphs (hb_collect_glyphs_context_t *c,
1725 ContextCollectGlyphsLookupContext &lookup_context) const
1726 {
1727 return
1728 + hb_iter (rule)
1729 | hb_map (hb_add (this))
1730 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1731 ;
1732 }
1733
would_applyOT::RuleSet1734 bool would_apply (hb_would_apply_context_t *c,
1735 ContextApplyLookupContext &lookup_context) const
1736 {
1737 return
1738 + hb_iter (rule)
1739 | hb_map (hb_add (this))
1740 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1741 | hb_any
1742 ;
1743 }
1744
applyOT::RuleSet1745 bool apply (hb_ot_apply_context_t *c,
1746 ContextApplyLookupContext &lookup_context) const
1747 {
1748 TRACE_APPLY (this);
1749 return_trace (
1750 + hb_iter (rule)
1751 | hb_map (hb_add (this))
1752 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1753 | hb_any
1754 )
1755 ;
1756 }
1757
subsetOT::RuleSet1758 bool subset (hb_subset_context_t *c,
1759 const hb_map_t *lookup_map,
1760 const hb_map_t *klass_map = nullptr) const
1761 {
1762 TRACE_SUBSET (this);
1763
1764 auto snap = c->serializer->snapshot ();
1765 auto *out = c->serializer->start_embed (*this);
1766 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1767
1768 for (const Offset16To<Rule>& _ : rule)
1769 {
1770 if (!_) continue;
1771 auto o_snap = c->serializer->snapshot ();
1772 auto *o = out->rule.serialize_append (c->serializer);
1773 if (unlikely (!o)) continue;
1774
1775 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1776 {
1777 out->rule.pop ();
1778 c->serializer->revert (o_snap);
1779 }
1780 }
1781
1782 bool ret = bool (out->rule);
1783 if (!ret) c->serializer->revert (snap);
1784
1785 return_trace (ret);
1786 }
1787
sanitizeOT::RuleSet1788 bool sanitize (hb_sanitize_context_t *c) const
1789 {
1790 TRACE_SANITIZE (this);
1791 return_trace (rule.sanitize (c, this));
1792 }
1793
1794 protected:
1795 Array16OfOffset16To<Rule>
1796 rule; /* Array of Rule tables
1797 * ordered by preference */
1798 public:
1799 DEFINE_SIZE_ARRAY (2, rule);
1800 };
1801
1802
1803 struct ContextFormat1
1804 {
intersectsOT::ContextFormat11805 bool intersects (const hb_set_t *glyphs) const
1806 {
1807 struct ContextClosureLookupContext lookup_context = {
1808 {intersects_glyph, intersected_glyph},
1809 ContextFormat::SimpleContext,
1810 nullptr
1811 };
1812
1813 return
1814 + hb_zip (this+coverage, ruleSet)
1815 | hb_filter (*glyphs, hb_first)
1816 | hb_map (hb_second)
1817 | hb_map (hb_add (this))
1818 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1819 | hb_any
1820 ;
1821 }
1822
may_have_non_1to1OT::ContextFormat11823 bool may_have_non_1to1 () const
1824 { return true; }
1825
closureOT::ContextFormat11826 void closure (hb_closure_context_t *c) const
1827 {
1828 c->cur_intersected_glyphs->clear ();
1829 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
1830
1831 struct ContextClosureLookupContext lookup_context = {
1832 {intersects_glyph, intersected_glyph},
1833 ContextFormat::SimpleContext,
1834 nullptr
1835 };
1836
1837 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
1838 | hb_filter (c->parent_active_glyphs (), hb_first)
1839 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
1840 | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
1841 ;
1842 }
1843
closure_lookupsOT::ContextFormat11844 void closure_lookups (hb_closure_lookups_context_t *c) const
1845 {
1846 struct ContextClosureLookupContext lookup_context = {
1847 {intersects_glyph, intersected_glyph},
1848 ContextFormat::SimpleContext,
1849 nullptr
1850 };
1851
1852 + hb_zip (this+coverage, ruleSet)
1853 | hb_filter (*c->glyphs, hb_first)
1854 | hb_map (hb_second)
1855 | hb_map (hb_add (this))
1856 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
1857 ;
1858 }
1859
collect_variation_indicesOT::ContextFormat11860 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1861
collect_glyphsOT::ContextFormat11862 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1863 {
1864 (this+coverage).collect_coverage (c->input);
1865
1866 struct ContextCollectGlyphsLookupContext lookup_context = {
1867 {collect_glyph},
1868 nullptr
1869 };
1870
1871 + hb_iter (ruleSet)
1872 | hb_map (hb_add (this))
1873 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1874 ;
1875 }
1876
would_applyOT::ContextFormat11877 bool would_apply (hb_would_apply_context_t *c) const
1878 {
1879 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1880 struct ContextApplyLookupContext lookup_context = {
1881 {match_glyph},
1882 nullptr
1883 };
1884 return rule_set.would_apply (c, lookup_context);
1885 }
1886
get_coverageOT::ContextFormat11887 const Coverage &get_coverage () const { return this+coverage; }
1888
applyOT::ContextFormat11889 bool apply (hb_ot_apply_context_t *c) const
1890 {
1891 TRACE_APPLY (this);
1892 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1893 if (likely (index == NOT_COVERED))
1894 return_trace (false);
1895
1896 const RuleSet &rule_set = this+ruleSet[index];
1897 struct ContextApplyLookupContext lookup_context = {
1898 {match_glyph},
1899 nullptr
1900 };
1901 return_trace (rule_set.apply (c, lookup_context));
1902 }
1903
subsetOT::ContextFormat11904 bool subset (hb_subset_context_t *c) const
1905 {
1906 TRACE_SUBSET (this);
1907 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1908 const hb_map_t &glyph_map = *c->plan->glyph_map;
1909
1910 auto *out = c->serializer->start_embed (*this);
1911 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1912 out->format = format;
1913
1914 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1915 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1916 + hb_zip (this+coverage, ruleSet)
1917 | hb_filter (glyphset, hb_first)
1918 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1919 | hb_map (hb_first)
1920 | hb_map (glyph_map)
1921 | hb_sink (new_coverage)
1922 ;
1923
1924 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
1925 return_trace (bool (new_coverage));
1926 }
1927
sanitizeOT::ContextFormat11928 bool sanitize (hb_sanitize_context_t *c) const
1929 {
1930 TRACE_SANITIZE (this);
1931 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1932 }
1933
1934 protected:
1935 HBUINT16 format; /* Format identifier--format = 1 */
1936 Offset16To<Coverage>
1937 coverage; /* Offset to Coverage table--from
1938 * beginning of table */
1939 Array16OfOffset16To<RuleSet>
1940 ruleSet; /* Array of RuleSet tables
1941 * ordered by Coverage Index */
1942 public:
1943 DEFINE_SIZE_ARRAY (6, ruleSet);
1944 };
1945
1946
1947 struct ContextFormat2
1948 {
intersectsOT::ContextFormat21949 bool intersects (const hb_set_t *glyphs) const
1950 {
1951 if (!(this+coverage).intersects (glyphs))
1952 return false;
1953
1954 const ClassDef &class_def = this+classDef;
1955
1956 struct ContextClosureLookupContext lookup_context = {
1957 {intersects_class, intersected_class_glyphs},
1958 ContextFormat::ClassBasedContext,
1959 &class_def
1960 };
1961
1962 hb_set_t retained_coverage_glyphs;
1963 (this+coverage).intersected_coverage_glyphs (glyphs, &retained_coverage_glyphs);
1964
1965 hb_set_t coverage_glyph_classes;
1966 class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
1967
1968
1969 return
1970 + hb_iter (ruleSet)
1971 | hb_map (hb_add (this))
1972 | hb_enumerate
1973 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
1974 { return class_def.intersects_class (glyphs, p.first) &&
1975 coverage_glyph_classes.has (p.first) &&
1976 p.second.intersects (glyphs, lookup_context); })
1977 | hb_any
1978 ;
1979 }
1980
may_have_non_1to1OT::ContextFormat21981 bool may_have_non_1to1 () const
1982 { return true; }
1983
closureOT::ContextFormat21984 void closure (hb_closure_context_t *c) const
1985 {
1986 if (!(this+coverage).intersects (c->glyphs))
1987 return;
1988
1989 c->cur_intersected_glyphs->clear ();
1990 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
1991
1992 const ClassDef &class_def = this+classDef;
1993
1994 struct ContextClosureLookupContext lookup_context = {
1995 {intersects_class, intersected_class_glyphs},
1996 ContextFormat::ClassBasedContext,
1997 &class_def
1998 };
1999
2000 return
2001 + hb_enumerate (ruleSet)
2002 | hb_filter ([&] (unsigned _)
2003 { return class_def.intersects_class (c->cur_intersected_glyphs, _); },
2004 hb_first)
2005 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<RuleSet>&> _)
2006 {
2007 const RuleSet& rule_set = this+_.second;
2008 rule_set.closure (c, _.first, lookup_context);
2009 })
2010 ;
2011 }
2012
closure_lookupsOT::ContextFormat22013 void closure_lookups (hb_closure_lookups_context_t *c) const
2014 {
2015 if (!(this+coverage).intersects (c->glyphs))
2016 return;
2017
2018 const ClassDef &class_def = this+classDef;
2019
2020 struct ContextClosureLookupContext lookup_context = {
2021 {intersects_class, intersected_class_glyphs},
2022 ContextFormat::ClassBasedContext,
2023 &class_def
2024 };
2025
2026 + hb_iter (ruleSet)
2027 | hb_map (hb_add (this))
2028 | hb_enumerate
2029 | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2030 { return class_def.intersects_class (c->glyphs, p.first); })
2031 | hb_map (hb_second)
2032 | hb_apply ([&] (const RuleSet & _)
2033 { _.closure_lookups (c, lookup_context); });
2034 }
2035
collect_variation_indicesOT::ContextFormat22036 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2037
collect_glyphsOT::ContextFormat22038 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2039 {
2040 (this+coverage).collect_coverage (c->input);
2041
2042 const ClassDef &class_def = this+classDef;
2043 struct ContextCollectGlyphsLookupContext lookup_context = {
2044 {collect_class},
2045 &class_def
2046 };
2047
2048 + hb_iter (ruleSet)
2049 | hb_map (hb_add (this))
2050 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2051 ;
2052 }
2053
would_applyOT::ContextFormat22054 bool would_apply (hb_would_apply_context_t *c) const
2055 {
2056 const ClassDef &class_def = this+classDef;
2057 unsigned int index = class_def.get_class (c->glyphs[0]);
2058 const RuleSet &rule_set = this+ruleSet[index];
2059 struct ContextApplyLookupContext lookup_context = {
2060 {match_class},
2061 &class_def
2062 };
2063 return rule_set.would_apply (c, lookup_context);
2064 }
2065
get_coverageOT::ContextFormat22066 const Coverage &get_coverage () const { return this+coverage; }
2067
applyOT::ContextFormat22068 bool apply (hb_ot_apply_context_t *c) const
2069 {
2070 TRACE_APPLY (this);
2071 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2072 if (likely (index == NOT_COVERED)) return_trace (false);
2073
2074 const ClassDef &class_def = this+classDef;
2075 index = class_def.get_class (c->buffer->cur().codepoint);
2076 const RuleSet &rule_set = this+ruleSet[index];
2077 struct ContextApplyLookupContext lookup_context = {
2078 {match_class},
2079 &class_def
2080 };
2081 return_trace (rule_set.apply (c, lookup_context));
2082 }
2083
subsetOT::ContextFormat22084 bool subset (hb_subset_context_t *c) const
2085 {
2086 TRACE_SUBSET (this);
2087 auto *out = c->serializer->start_embed (*this);
2088 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2089 out->format = format;
2090 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2091 return_trace (false);
2092
2093 hb_map_t klass_map;
2094 out->classDef.serialize_subset (c, classDef, this, &klass_map);
2095
2096 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
2097 hb_set_t retained_coverage_glyphs;
2098 (this+coverage).intersected_coverage_glyphs (glyphset, &retained_coverage_glyphs);
2099
2100 hb_set_t coverage_glyph_classes;
2101 (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2102
2103 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2104 bool ret = true;
2105 int non_zero_index = -1, index = 0;
2106 for (const auto& _ : + hb_enumerate (ruleSet)
2107 | hb_filter (klass_map, hb_first))
2108 {
2109 auto *o = out->ruleSet.serialize_append (c->serializer);
2110 if (unlikely (!o))
2111 {
2112 ret = false;
2113 break;
2114 }
2115
2116 if (coverage_glyph_classes.has (_.first) &&
2117 o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
2118 non_zero_index = index;
2119
2120 index++;
2121 }
2122
2123 if (!ret || non_zero_index == -1) return_trace (false);
2124
2125 //prune empty trailing ruleSets
2126 --index;
2127 while (index > non_zero_index)
2128 {
2129 out->ruleSet.pop ();
2130 index--;
2131 }
2132
2133 return_trace (bool (out->ruleSet));
2134 }
2135
sanitizeOT::ContextFormat22136 bool sanitize (hb_sanitize_context_t *c) const
2137 {
2138 TRACE_SANITIZE (this);
2139 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2140 }
2141
2142 protected:
2143 HBUINT16 format; /* Format identifier--format = 2 */
2144 Offset16To<Coverage>
2145 coverage; /* Offset to Coverage table--from
2146 * beginning of table */
2147 Offset16To<ClassDef>
2148 classDef; /* Offset to glyph ClassDef table--from
2149 * beginning of table */
2150 Array16OfOffset16To<RuleSet>
2151 ruleSet; /* Array of RuleSet tables
2152 * ordered by class */
2153 public:
2154 DEFINE_SIZE_ARRAY (8, ruleSet);
2155 };
2156
2157
2158 struct ContextFormat3
2159 {
intersectsOT::ContextFormat32160 bool intersects (const hb_set_t *glyphs) const
2161 {
2162 if (!(this+coverageZ[0]).intersects (glyphs))
2163 return false;
2164
2165 struct ContextClosureLookupContext lookup_context = {
2166 {intersects_coverage, intersected_coverage_glyphs},
2167 ContextFormat::CoverageBasedContext,
2168 this
2169 };
2170 return context_intersects (glyphs,
2171 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2172 lookup_context);
2173 }
2174
may_have_non_1to1OT::ContextFormat32175 bool may_have_non_1to1 () const
2176 { return true; }
2177
closureOT::ContextFormat32178 void closure (hb_closure_context_t *c) const
2179 {
2180 if (!(this+coverageZ[0]).intersects (c->glyphs))
2181 return;
2182
2183 c->cur_intersected_glyphs->clear ();
2184 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2185
2186 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2187 struct ContextClosureLookupContext lookup_context = {
2188 {intersects_coverage, intersected_coverage_glyphs},
2189 ContextFormat::CoverageBasedContext,
2190 this
2191 };
2192 context_closure_lookup (c,
2193 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2194 lookupCount, lookupRecord,
2195 0, lookup_context);
2196 }
2197
closure_lookupsOT::ContextFormat32198 void closure_lookups (hb_closure_lookups_context_t *c) const
2199 {
2200 if (!intersects (c->glyphs))
2201 return;
2202 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2203 recurse_lookups (c, lookupCount, lookupRecord);
2204 }
2205
collect_variation_indicesOT::ContextFormat32206 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2207
collect_glyphsOT::ContextFormat32208 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2209 {
2210 (this+coverageZ[0]).collect_coverage (c->input);
2211
2212 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2213 struct ContextCollectGlyphsLookupContext lookup_context = {
2214 {collect_coverage},
2215 this
2216 };
2217
2218 context_collect_glyphs_lookup (c,
2219 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2220 lookupCount, lookupRecord,
2221 lookup_context);
2222 }
2223
would_applyOT::ContextFormat32224 bool would_apply (hb_would_apply_context_t *c) const
2225 {
2226 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2227 struct ContextApplyLookupContext lookup_context = {
2228 {match_coverage},
2229 this
2230 };
2231 return context_would_apply_lookup (c,
2232 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2233 lookupCount, lookupRecord,
2234 lookup_context);
2235 }
2236
get_coverageOT::ContextFormat32237 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2238
applyOT::ContextFormat32239 bool apply (hb_ot_apply_context_t *c) const
2240 {
2241 TRACE_APPLY (this);
2242 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2243 if (likely (index == NOT_COVERED)) return_trace (false);
2244
2245 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2246 struct ContextApplyLookupContext lookup_context = {
2247 {match_coverage},
2248 this
2249 };
2250 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2251 }
2252
subsetOT::ContextFormat32253 bool subset (hb_subset_context_t *c) const
2254 {
2255 TRACE_SUBSET (this);
2256 auto *out = c->serializer->start_embed (this);
2257 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2258
2259 out->format = format;
2260 out->glyphCount = glyphCount;
2261
2262 auto coverages = coverageZ.as_array (glyphCount);
2263
2264 for (const Offset16To<Coverage>& offset : coverages)
2265 {
2266 /* TODO(subset) This looks like should not be necessary to write this way. */
2267 auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2268 if (unlikely (!o)) return_trace (false);
2269 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2270 }
2271
2272 const UnsizedArrayOf<LookupRecord>& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
2273 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2274
2275
2276 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
2277 return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2278 }
2279
sanitizeOT::ContextFormat32280 bool sanitize (hb_sanitize_context_t *c) const
2281 {
2282 TRACE_SANITIZE (this);
2283 if (!c->check_struct (this)) return_trace (false);
2284 unsigned int count = glyphCount;
2285 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2286 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2287 for (unsigned int i = 0; i < count; i++)
2288 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2289 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2290 return_trace (c->check_array (lookupRecord, lookupCount));
2291 }
2292
2293 protected:
2294 HBUINT16 format; /* Format identifier--format = 3 */
2295 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2296 * sequence */
2297 HBUINT16 lookupCount; /* Number of LookupRecords */
2298 UnsizedArrayOf<Offset16To<Coverage>>
2299 coverageZ; /* Array of offsets to Coverage
2300 * table in glyph sequence order */
2301 /*UnsizedArrayOf<LookupRecord>
2302 lookupRecordX;*/ /* Array of LookupRecords--in
2303 * design order */
2304 public:
2305 DEFINE_SIZE_ARRAY (6, coverageZ);
2306 };
2307
2308 struct Context
2309 {
2310 template <typename context_t, typename ...Ts>
dispatchOT::Context2311 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2312 {
2313 TRACE_DISPATCH (this, u.format);
2314 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2315 switch (u.format) {
2316 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2317 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
2318 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2319 default:return_trace (c->default_return_value ());
2320 }
2321 }
2322
2323 protected:
2324 union {
2325 HBUINT16 format; /* Format identifier */
2326 ContextFormat1 format1;
2327 ContextFormat2 format2;
2328 ContextFormat3 format3;
2329 } u;
2330 };
2331
2332
2333 /* Chaining Contextual lookups */
2334
2335 struct ChainContextClosureLookupContext
2336 {
2337 ContextClosureFuncs funcs;
2338 ContextFormat context_format;
2339 const void *intersects_data[3];
2340 };
2341
2342 struct ChainContextCollectGlyphsLookupContext
2343 {
2344 ContextCollectGlyphsFuncs funcs;
2345 const void *collect_data[3];
2346 };
2347
2348 struct ChainContextApplyLookupContext
2349 {
2350 ContextApplyFuncs funcs;
2351 const void *match_data[3];
2352 };
2353
chain_context_intersects(const hb_set_t * glyphs,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],ChainContextClosureLookupContext & lookup_context)2354 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2355 unsigned int backtrackCount,
2356 const HBUINT16 backtrack[],
2357 unsigned int inputCount, /* Including the first glyph (not matched) */
2358 const HBUINT16 input[], /* Array of input values--start with second glyph */
2359 unsigned int lookaheadCount,
2360 const HBUINT16 lookahead[],
2361 ChainContextClosureLookupContext &lookup_context)
2362 {
2363 return array_is_subset_of (glyphs,
2364 backtrackCount, backtrack,
2365 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2366 && array_is_subset_of (glyphs,
2367 inputCount ? inputCount - 1 : 0, input,
2368 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2369 && array_is_subset_of (glyphs,
2370 lookaheadCount, lookahead,
2371 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2372 }
2373
chain_context_closure_lookup(hb_closure_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ChainContextClosureLookupContext & lookup_context)2374 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2375 unsigned int backtrackCount,
2376 const HBUINT16 backtrack[],
2377 unsigned int inputCount, /* Including the first glyph (not matched) */
2378 const HBUINT16 input[], /* Array of input values--start with second glyph */
2379 unsigned int lookaheadCount,
2380 const HBUINT16 lookahead[],
2381 unsigned int lookupCount,
2382 const LookupRecord lookupRecord[],
2383 unsigned value,
2384 ChainContextClosureLookupContext &lookup_context)
2385 {
2386 if (chain_context_intersects (c->glyphs,
2387 backtrackCount, backtrack,
2388 inputCount, input,
2389 lookaheadCount, lookahead,
2390 lookup_context))
2391 context_closure_recurse_lookups (c,
2392 inputCount, input,
2393 lookupCount, lookupRecord,
2394 value,
2395 lookup_context.context_format,
2396 lookup_context.intersects_data[1],
2397 lookup_context.funcs.intersected_glyphs);
2398 }
2399
chain_context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextCollectGlyphsLookupContext & lookup_context)2400 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2401 unsigned int backtrackCount,
2402 const HBUINT16 backtrack[],
2403 unsigned int inputCount, /* Including the first glyph (not matched) */
2404 const HBUINT16 input[], /* Array of input values--start with second glyph */
2405 unsigned int lookaheadCount,
2406 const HBUINT16 lookahead[],
2407 unsigned int lookupCount,
2408 const LookupRecord lookupRecord[],
2409 ChainContextCollectGlyphsLookupContext &lookup_context)
2410 {
2411 collect_array (c, c->before,
2412 backtrackCount, backtrack,
2413 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2414 collect_array (c, c->input,
2415 inputCount ? inputCount - 1 : 0, input,
2416 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2417 collect_array (c, c->after,
2418 lookaheadCount, lookahead,
2419 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2420 recurse_lookups (c,
2421 lookupCount, lookupRecord);
2422 }
2423
chain_context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[]HB_UNUSED,unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[]HB_UNUSED,unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,ChainContextApplyLookupContext & lookup_context)2424 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2425 unsigned int backtrackCount,
2426 const HBUINT16 backtrack[] HB_UNUSED,
2427 unsigned int inputCount, /* Including the first glyph (not matched) */
2428 const HBUINT16 input[], /* Array of input values--start with second glyph */
2429 unsigned int lookaheadCount,
2430 const HBUINT16 lookahead[] HB_UNUSED,
2431 unsigned int lookupCount HB_UNUSED,
2432 const LookupRecord lookupRecord[] HB_UNUSED,
2433 ChainContextApplyLookupContext &lookup_context)
2434 {
2435 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2436 && would_match_input (c,
2437 inputCount, input,
2438 lookup_context.funcs.match, lookup_context.match_data[1]);
2439 }
2440
chain_context_apply_lookup(hb_ot_apply_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextApplyLookupContext & lookup_context)2441 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2442 unsigned int backtrackCount,
2443 const HBUINT16 backtrack[],
2444 unsigned int inputCount, /* Including the first glyph (not matched) */
2445 const HBUINT16 input[], /* Array of input values--start with second glyph */
2446 unsigned int lookaheadCount,
2447 const HBUINT16 lookahead[],
2448 unsigned int lookupCount,
2449 const LookupRecord lookupRecord[],
2450 ChainContextApplyLookupContext &lookup_context)
2451 {
2452 unsigned int start_index = 0, match_length = 0, end_index = 0;
2453 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
2454 return match_input (c,
2455 inputCount, input,
2456 lookup_context.funcs.match, lookup_context.match_data[1],
2457 &match_length, match_positions)
2458 && match_backtrack (c,
2459 backtrackCount, backtrack,
2460 lookup_context.funcs.match, lookup_context.match_data[0],
2461 &start_index)
2462 && match_lookahead (c,
2463 lookaheadCount, lookahead,
2464 lookup_context.funcs.match, lookup_context.match_data[2],
2465 match_length, &end_index)
2466 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index),
2467 apply_lookup (c,
2468 inputCount, match_positions,
2469 lookupCount, lookupRecord,
2470 match_length));
2471 }
2472
2473 struct ChainRule
2474 {
intersectsOT::ChainRule2475 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2476 {
2477 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2478 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2479 return chain_context_intersects (glyphs,
2480 backtrack.len, backtrack.arrayZ,
2481 input.lenP1, input.arrayZ,
2482 lookahead.len, lookahead.arrayZ,
2483 lookup_context);
2484 }
2485
closureOT::ChainRule2486 void closure (hb_closure_context_t *c, unsigned value,
2487 ChainContextClosureLookupContext &lookup_context) const
2488 {
2489 if (unlikely (c->lookup_limit_exceeded ())) return;
2490
2491 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2492 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2493 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2494 chain_context_closure_lookup (c,
2495 backtrack.len, backtrack.arrayZ,
2496 input.lenP1, input.arrayZ,
2497 lookahead.len, lookahead.arrayZ,
2498 lookup.len, lookup.arrayZ,
2499 value,
2500 lookup_context);
2501 }
2502
closure_lookupsOT::ChainRule2503 void closure_lookups (hb_closure_lookups_context_t *c,
2504 ChainContextClosureLookupContext &lookup_context) const
2505 {
2506 if (unlikely (c->lookup_limit_exceeded ())) return;
2507 if (!intersects (c->glyphs, lookup_context)) return;
2508
2509 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2510 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2511 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2512 recurse_lookups (c, lookup.len, lookup.arrayZ);
2513 }
2514
collect_glyphsOT::ChainRule2515 void collect_glyphs (hb_collect_glyphs_context_t *c,
2516 ChainContextCollectGlyphsLookupContext &lookup_context) const
2517 {
2518 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2519 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2520 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2521 chain_context_collect_glyphs_lookup (c,
2522 backtrack.len, backtrack.arrayZ,
2523 input.lenP1, input.arrayZ,
2524 lookahead.len, lookahead.arrayZ,
2525 lookup.len, lookup.arrayZ,
2526 lookup_context);
2527 }
2528
would_applyOT::ChainRule2529 bool would_apply (hb_would_apply_context_t *c,
2530 ChainContextApplyLookupContext &lookup_context) const
2531 {
2532 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2533 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2534 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2535 return chain_context_would_apply_lookup (c,
2536 backtrack.len, backtrack.arrayZ,
2537 input.lenP1, input.arrayZ,
2538 lookahead.len, lookahead.arrayZ, lookup.len,
2539 lookup.arrayZ, lookup_context);
2540 }
2541
applyOT::ChainRule2542 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2543 {
2544 TRACE_APPLY (this);
2545 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2546 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2547 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2548 return_trace (chain_context_apply_lookup (c,
2549 backtrack.len, backtrack.arrayZ,
2550 input.lenP1, input.arrayZ,
2551 lookahead.len, lookahead.arrayZ, lookup.len,
2552 lookup.arrayZ, lookup_context));
2553 }
2554
2555 template<typename Iterator,
2556 hb_requires (hb_is_iterator (Iterator))>
serialize_arrayOT::ChainRule2557 void serialize_array (hb_serialize_context_t *c,
2558 HBUINT16 len,
2559 Iterator it) const
2560 {
2561 c->copy (len);
2562 for (const auto g : it)
2563 c->copy ((HBUINT16) g);
2564 }
2565
serializeOT::ChainRule2566 bool serialize (hb_serialize_context_t *c,
2567 const hb_map_t *lookup_map,
2568 const hb_map_t *backtrack_map,
2569 const hb_map_t *input_map = nullptr,
2570 const hb_map_t *lookahead_map = nullptr) const
2571 {
2572 TRACE_SERIALIZE (this);
2573 auto *out = c->start_embed (this);
2574 if (unlikely (!out)) return_trace (false);
2575
2576 const hb_map_t *mapping = backtrack_map;
2577 serialize_array (c, backtrack.len, + backtrack.iter ()
2578 | hb_map (mapping));
2579
2580 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2581 if (input_map) mapping = input_map;
2582 serialize_array (c, input.lenP1, + input.iter ()
2583 | hb_map (mapping));
2584
2585 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2586 if (lookahead_map) mapping = lookahead_map;
2587 serialize_array (c, lookahead.len, + lookahead.iter ()
2588 | hb_map (mapping));
2589
2590 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
2591
2592 HBUINT16* lookupCount = c->embed (&(lookupRecord.len));
2593 if (!lookupCount) return_trace (false);
2594
2595 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (), lookup_map);
2596 return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2597 }
2598
subsetOT::ChainRule2599 bool subset (hb_subset_context_t *c,
2600 const hb_map_t *lookup_map,
2601 const hb_map_t *backtrack_map = nullptr,
2602 const hb_map_t *input_map = nullptr,
2603 const hb_map_t *lookahead_map = nullptr) const
2604 {
2605 TRACE_SUBSET (this);
2606
2607 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2608 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2609
2610 if (!backtrack_map)
2611 {
2612 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2613 if (!hb_all (backtrack, glyphset) ||
2614 !hb_all (input, glyphset) ||
2615 !hb_all (lookahead, glyphset))
2616 return_trace (false);
2617
2618 serialize (c->serializer, lookup_map, c->plan->glyph_map);
2619 }
2620 else
2621 {
2622 if (!hb_all (backtrack, backtrack_map) ||
2623 !hb_all (input, input_map) ||
2624 !hb_all (lookahead, lookahead_map))
2625 return_trace (false);
2626
2627 serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2628 }
2629
2630 return_trace (true);
2631 }
2632
sanitizeOT::ChainRule2633 bool sanitize (hb_sanitize_context_t *c) const
2634 {
2635 TRACE_SANITIZE (this);
2636 if (!backtrack.sanitize (c)) return_trace (false);
2637 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2638 if (!input.sanitize (c)) return_trace (false);
2639 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2640 if (!lookahead.sanitize (c)) return_trace (false);
2641 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2642 return_trace (lookup.sanitize (c));
2643 }
2644
2645 protected:
2646 Array16Of<HBUINT16>
2647 backtrack; /* Array of backtracking values
2648 * (to be matched before the input
2649 * sequence) */
2650 HeadlessArrayOf<HBUINT16>
2651 inputX; /* Array of input values (start with
2652 * second glyph) */
2653 Array16Of<HBUINT16>
2654 lookaheadX; /* Array of lookahead values's (to be
2655 * matched after the input sequence) */
2656 Array16Of<LookupRecord>
2657 lookupX; /* Array of LookupRecords--in
2658 * design order) */
2659 public:
2660 DEFINE_SIZE_MIN (8);
2661 };
2662
2663 struct ChainRuleSet
2664 {
intersectsOT::ChainRuleSet2665 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2666 {
2667 return
2668 + hb_iter (rule)
2669 | hb_map (hb_add (this))
2670 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2671 | hb_any
2672 ;
2673 }
closureOT::ChainRuleSet2674 void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
2675 {
2676 if (unlikely (c->lookup_limit_exceeded ())) return;
2677
2678 return
2679 + hb_iter (rule)
2680 | hb_map (hb_add (this))
2681 | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
2682 ;
2683 }
2684
closure_lookupsOT::ChainRuleSet2685 void closure_lookups (hb_closure_lookups_context_t *c,
2686 ChainContextClosureLookupContext &lookup_context) const
2687 {
2688 if (unlikely (c->lookup_limit_exceeded ())) return;
2689
2690 + hb_iter (rule)
2691 | hb_map (hb_add (this))
2692 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
2693 ;
2694 }
2695
collect_glyphsOT::ChainRuleSet2696 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2697 {
2698 return
2699 + hb_iter (rule)
2700 | hb_map (hb_add (this))
2701 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2702 ;
2703 }
2704
would_applyOT::ChainRuleSet2705 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2706 {
2707 return
2708 + hb_iter (rule)
2709 | hb_map (hb_add (this))
2710 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2711 | hb_any
2712 ;
2713 }
2714
applyOT::ChainRuleSet2715 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2716 {
2717 TRACE_APPLY (this);
2718 return_trace (
2719 + hb_iter (rule)
2720 | hb_map (hb_add (this))
2721 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2722 | hb_any
2723 )
2724 ;
2725 }
2726
subsetOT::ChainRuleSet2727 bool subset (hb_subset_context_t *c,
2728 const hb_map_t *lookup_map,
2729 const hb_map_t *backtrack_klass_map = nullptr,
2730 const hb_map_t *input_klass_map = nullptr,
2731 const hb_map_t *lookahead_klass_map = nullptr) const
2732 {
2733 TRACE_SUBSET (this);
2734
2735 auto snap = c->serializer->snapshot ();
2736 auto *out = c->serializer->start_embed (*this);
2737 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2738
2739 for (const Offset16To<ChainRule>& _ : rule)
2740 {
2741 if (!_) continue;
2742 auto o_snap = c->serializer->snapshot ();
2743 auto *o = out->rule.serialize_append (c->serializer);
2744 if (unlikely (!o)) continue;
2745
2746 if (!o->serialize_subset (c, _, this,
2747 lookup_map,
2748 backtrack_klass_map,
2749 input_klass_map,
2750 lookahead_klass_map))
2751 {
2752 out->rule.pop ();
2753 c->serializer->revert (o_snap);
2754 }
2755 }
2756
2757 bool ret = bool (out->rule);
2758 if (!ret) c->serializer->revert (snap);
2759
2760 return_trace (ret);
2761 }
2762
sanitizeOT::ChainRuleSet2763 bool sanitize (hb_sanitize_context_t *c) const
2764 {
2765 TRACE_SANITIZE (this);
2766 return_trace (rule.sanitize (c, this));
2767 }
2768
2769 protected:
2770 Array16OfOffset16To<ChainRule>
2771 rule; /* Array of ChainRule tables
2772 * ordered by preference */
2773 public:
2774 DEFINE_SIZE_ARRAY (2, rule);
2775 };
2776
2777 struct ChainContextFormat1
2778 {
intersectsOT::ChainContextFormat12779 bool intersects (const hb_set_t *glyphs) const
2780 {
2781 struct ChainContextClosureLookupContext lookup_context = {
2782 {intersects_glyph, intersected_glyph},
2783 ContextFormat::SimpleContext,
2784 {nullptr, nullptr, nullptr}
2785 };
2786
2787 return
2788 + hb_zip (this+coverage, ruleSet)
2789 | hb_filter (*glyphs, hb_first)
2790 | hb_map (hb_second)
2791 | hb_map (hb_add (this))
2792 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2793 | hb_any
2794 ;
2795 }
2796
may_have_non_1to1OT::ChainContextFormat12797 bool may_have_non_1to1 () const
2798 { return true; }
2799
closureOT::ChainContextFormat12800 void closure (hb_closure_context_t *c) const
2801 {
2802 c->cur_intersected_glyphs->clear ();
2803 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2804
2805 struct ChainContextClosureLookupContext lookup_context = {
2806 {intersects_glyph, intersected_glyph},
2807 ContextFormat::SimpleContext,
2808 {nullptr, nullptr, nullptr}
2809 };
2810
2811 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2812 | hb_filter (c->parent_active_glyphs (), hb_first)
2813 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
2814 | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2815 ;
2816 }
2817
closure_lookupsOT::ChainContextFormat12818 void closure_lookups (hb_closure_lookups_context_t *c) const
2819 {
2820 struct ChainContextClosureLookupContext lookup_context = {
2821 {intersects_glyph, intersected_glyph},
2822 ContextFormat::SimpleContext,
2823 {nullptr, nullptr, nullptr}
2824 };
2825
2826 + hb_zip (this+coverage, ruleSet)
2827 | hb_filter (*c->glyphs, hb_first)
2828 | hb_map (hb_second)
2829 | hb_map (hb_add (this))
2830 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
2831 ;
2832 }
2833
collect_variation_indicesOT::ChainContextFormat12834 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2835
collect_glyphsOT::ChainContextFormat12836 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2837 {
2838 (this+coverage).collect_coverage (c->input);
2839
2840 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2841 {collect_glyph},
2842 {nullptr, nullptr, nullptr}
2843 };
2844
2845 + hb_iter (ruleSet)
2846 | hb_map (hb_add (this))
2847 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2848 ;
2849 }
2850
would_applyOT::ChainContextFormat12851 bool would_apply (hb_would_apply_context_t *c) const
2852 {
2853 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2854 struct ChainContextApplyLookupContext lookup_context = {
2855 {match_glyph},
2856 {nullptr, nullptr, nullptr}
2857 };
2858 return rule_set.would_apply (c, lookup_context);
2859 }
2860
get_coverageOT::ChainContextFormat12861 const Coverage &get_coverage () const { return this+coverage; }
2862
applyOT::ChainContextFormat12863 bool apply (hb_ot_apply_context_t *c) const
2864 {
2865 TRACE_APPLY (this);
2866 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2867 if (likely (index == NOT_COVERED)) return_trace (false);
2868
2869 const ChainRuleSet &rule_set = this+ruleSet[index];
2870 struct ChainContextApplyLookupContext lookup_context = {
2871 {match_glyph},
2872 {nullptr, nullptr, nullptr}
2873 };
2874 return_trace (rule_set.apply (c, lookup_context));
2875 }
2876
subsetOT::ChainContextFormat12877 bool subset (hb_subset_context_t *c) const
2878 {
2879 TRACE_SUBSET (this);
2880 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2881 const hb_map_t &glyph_map = *c->plan->glyph_map;
2882
2883 auto *out = c->serializer->start_embed (*this);
2884 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2885 out->format = format;
2886
2887 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2888 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2889 + hb_zip (this+coverage, ruleSet)
2890 | hb_filter (glyphset, hb_first)
2891 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2892 | hb_map (hb_first)
2893 | hb_map (glyph_map)
2894 | hb_sink (new_coverage)
2895 ;
2896
2897 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
2898 return_trace (bool (new_coverage));
2899 }
2900
sanitizeOT::ChainContextFormat12901 bool sanitize (hb_sanitize_context_t *c) const
2902 {
2903 TRACE_SANITIZE (this);
2904 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2905 }
2906
2907 protected:
2908 HBUINT16 format; /* Format identifier--format = 1 */
2909 Offset16To<Coverage>
2910 coverage; /* Offset to Coverage table--from
2911 * beginning of table */
2912 Array16OfOffset16To<ChainRuleSet>
2913 ruleSet; /* Array of ChainRuleSet tables
2914 * ordered by Coverage Index */
2915 public:
2916 DEFINE_SIZE_ARRAY (6, ruleSet);
2917 };
2918
2919 struct ChainContextFormat2
2920 {
intersectsOT::ChainContextFormat22921 bool intersects (const hb_set_t *glyphs) const
2922 {
2923 if (!(this+coverage).intersects (glyphs))
2924 return false;
2925
2926 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2927 const ClassDef &input_class_def = this+inputClassDef;
2928 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2929
2930 struct ChainContextClosureLookupContext lookup_context = {
2931 {intersects_class, intersected_class_glyphs},
2932 ContextFormat::ClassBasedContext,
2933 {&backtrack_class_def,
2934 &input_class_def,
2935 &lookahead_class_def}
2936 };
2937
2938 hb_set_t retained_coverage_glyphs;
2939 (this+coverage).intersected_coverage_glyphs (glyphs, &retained_coverage_glyphs);
2940
2941 hb_set_t coverage_glyph_classes;
2942 input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2943
2944 return
2945 + hb_iter (ruleSet)
2946 | hb_map (hb_add (this))
2947 | hb_enumerate
2948 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
2949 { return input_class_def.intersects_class (glyphs, p.first) &&
2950 coverage_glyph_classes.has (p.first) &&
2951 p.second.intersects (glyphs, lookup_context); })
2952 | hb_any
2953 ;
2954 }
2955
may_have_non_1to1OT::ChainContextFormat22956 bool may_have_non_1to1 () const
2957 { return true; }
2958
closureOT::ChainContextFormat22959 void closure (hb_closure_context_t *c) const
2960 {
2961 if (!(this+coverage).intersects (c->glyphs))
2962 return;
2963
2964 c->cur_intersected_glyphs->clear ();
2965 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2966
2967 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2968 const ClassDef &input_class_def = this+inputClassDef;
2969 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2970
2971 struct ChainContextClosureLookupContext lookup_context = {
2972 {intersects_class, intersected_class_glyphs},
2973 ContextFormat::ClassBasedContext,
2974 {&backtrack_class_def,
2975 &input_class_def,
2976 &lookahead_class_def}
2977 };
2978
2979 return
2980 + hb_enumerate (ruleSet)
2981 | hb_filter ([&] (unsigned _)
2982 { return input_class_def.intersects_class (c->cur_intersected_glyphs, _); },
2983 hb_first)
2984 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<ChainRuleSet>&> _)
2985 {
2986 const ChainRuleSet& chainrule_set = this+_.second;
2987 chainrule_set.closure (c, _.first, lookup_context);
2988 })
2989 ;
2990 }
2991
closure_lookupsOT::ChainContextFormat22992 void closure_lookups (hb_closure_lookups_context_t *c) const
2993 {
2994 if (!(this+coverage).intersects (c->glyphs))
2995 return;
2996
2997 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2998 const ClassDef &input_class_def = this+inputClassDef;
2999 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3000
3001 struct ChainContextClosureLookupContext lookup_context = {
3002 {intersects_class, intersected_class_glyphs},
3003 ContextFormat::ClassBasedContext,
3004 {&backtrack_class_def,
3005 &input_class_def,
3006 &lookahead_class_def}
3007 };
3008
3009 + hb_iter (ruleSet)
3010 | hb_map (hb_add (this))
3011 | hb_enumerate
3012 | hb_filter([&] (unsigned klass)
3013 { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
3014 | hb_map (hb_second)
3015 | hb_apply ([&] (const ChainRuleSet &_)
3016 { _.closure_lookups (c, lookup_context); })
3017 ;
3018 }
3019
collect_variation_indicesOT::ChainContextFormat23020 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3021
collect_glyphsOT::ChainContextFormat23022 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3023 {
3024 (this+coverage).collect_coverage (c->input);
3025
3026 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3027 const ClassDef &input_class_def = this+inputClassDef;
3028 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3029
3030 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3031 {collect_class},
3032 {&backtrack_class_def,
3033 &input_class_def,
3034 &lookahead_class_def}
3035 };
3036
3037 + hb_iter (ruleSet)
3038 | hb_map (hb_add (this))
3039 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3040 ;
3041 }
3042
would_applyOT::ChainContextFormat23043 bool would_apply (hb_would_apply_context_t *c) const
3044 {
3045 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3046 const ClassDef &input_class_def = this+inputClassDef;
3047 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3048
3049 unsigned int index = input_class_def.get_class (c->glyphs[0]);
3050 const ChainRuleSet &rule_set = this+ruleSet[index];
3051 struct ChainContextApplyLookupContext lookup_context = {
3052 {match_class},
3053 {&backtrack_class_def,
3054 &input_class_def,
3055 &lookahead_class_def}
3056 };
3057 return rule_set.would_apply (c, lookup_context);
3058 }
3059
get_coverageOT::ChainContextFormat23060 const Coverage &get_coverage () const { return this+coverage; }
3061
applyOT::ChainContextFormat23062 bool apply (hb_ot_apply_context_t *c) const
3063 {
3064 TRACE_APPLY (this);
3065 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3066 if (likely (index == NOT_COVERED)) return_trace (false);
3067
3068 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3069 const ClassDef &input_class_def = this+inputClassDef;
3070 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3071
3072 index = input_class_def.get_class (c->buffer->cur().codepoint);
3073 const ChainRuleSet &rule_set = this+ruleSet[index];
3074 struct ChainContextApplyLookupContext lookup_context = {
3075 {match_class},
3076 {&backtrack_class_def,
3077 &input_class_def,
3078 &lookahead_class_def}
3079 };
3080 return_trace (rule_set.apply (c, lookup_context));
3081 }
3082
subsetOT::ChainContextFormat23083 bool subset (hb_subset_context_t *c) const
3084 {
3085 TRACE_SUBSET (this);
3086 auto *out = c->serializer->start_embed (*this);
3087 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3088 out->format = format;
3089 out->coverage.serialize_subset (c, coverage, this);
3090
3091 hb_map_t backtrack_klass_map;
3092 hb_map_t input_klass_map;
3093 hb_map_t lookahead_klass_map;
3094
3095 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3096 // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3097 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3098 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3099
3100 if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3101 input_klass_map,
3102 lookahead_klass_map)))
3103 return_trace (false);
3104
3105 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
3106 hb_set_t retained_coverage_glyphs;
3107 (this+coverage).intersected_coverage_glyphs (glyphset, &retained_coverage_glyphs);
3108
3109 hb_set_t coverage_glyph_classes;
3110 (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3111
3112 int non_zero_index = -1, index = 0;
3113 bool ret = true;
3114 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3115 auto last_non_zero = c->serializer->snapshot ();
3116 for (const auto& _ : + hb_enumerate (ruleSet)
3117 | hb_filter (input_klass_map, hb_first))
3118 {
3119 auto *o = out->ruleSet.serialize_append (c->serializer);
3120 if (unlikely (!o))
3121 {
3122 ret = false;
3123 break;
3124 }
3125 if (coverage_glyph_classes.has (_.first) &&
3126 o->serialize_subset (c, _.second, this,
3127 lookup_map,
3128 &backtrack_klass_map,
3129 &input_klass_map,
3130 &lookahead_klass_map))
3131 {
3132 last_non_zero = c->serializer->snapshot ();
3133 non_zero_index = index;
3134 }
3135
3136 index++;
3137 }
3138
3139 if (!ret || non_zero_index == -1) return_trace (false);
3140
3141 // prune empty trailing ruleSets
3142 if (index > non_zero_index) {
3143 c->serializer->revert (last_non_zero);
3144 out->ruleSet.len = non_zero_index + 1;
3145 }
3146
3147 return_trace (bool (out->ruleSet));
3148 }
3149
sanitizeOT::ChainContextFormat23150 bool sanitize (hb_sanitize_context_t *c) const
3151 {
3152 TRACE_SANITIZE (this);
3153 return_trace (coverage.sanitize (c, this) &&
3154 backtrackClassDef.sanitize (c, this) &&
3155 inputClassDef.sanitize (c, this) &&
3156 lookaheadClassDef.sanitize (c, this) &&
3157 ruleSet.sanitize (c, this));
3158 }
3159
3160 protected:
3161 HBUINT16 format; /* Format identifier--format = 2 */
3162 Offset16To<Coverage>
3163 coverage; /* Offset to Coverage table--from
3164 * beginning of table */
3165 Offset16To<ClassDef>
3166 backtrackClassDef; /* Offset to glyph ClassDef table
3167 * containing backtrack sequence
3168 * data--from beginning of table */
3169 Offset16To<ClassDef>
3170 inputClassDef; /* Offset to glyph ClassDef
3171 * table containing input sequence
3172 * data--from beginning of table */
3173 Offset16To<ClassDef>
3174 lookaheadClassDef; /* Offset to glyph ClassDef table
3175 * containing lookahead sequence
3176 * data--from beginning of table */
3177 Array16OfOffset16To<ChainRuleSet>
3178 ruleSet; /* Array of ChainRuleSet tables
3179 * ordered by class */
3180 public:
3181 DEFINE_SIZE_ARRAY (12, ruleSet);
3182 };
3183
3184 struct ChainContextFormat3
3185 {
intersectsOT::ChainContextFormat33186 bool intersects (const hb_set_t *glyphs) const
3187 {
3188 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3189
3190 if (!(this+input[0]).intersects (glyphs))
3191 return false;
3192
3193 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3194 struct ChainContextClosureLookupContext lookup_context = {
3195 {intersects_coverage, intersected_coverage_glyphs},
3196 ContextFormat::CoverageBasedContext,
3197 {this, this, this}
3198 };
3199 return chain_context_intersects (glyphs,
3200 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3201 input.len, (const HBUINT16 *) input.arrayZ + 1,
3202 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3203 lookup_context);
3204 }
3205
may_have_non_1to1OT::ChainContextFormat33206 bool may_have_non_1to1 () const
3207 { return true; }
3208
closureOT::ChainContextFormat33209 void closure (hb_closure_context_t *c) const
3210 {
3211 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3212
3213 if (!(this+input[0]).intersects (c->glyphs))
3214 return;
3215
3216 c->cur_intersected_glyphs->clear ();
3217 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
3218
3219 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3220 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3221 struct ChainContextClosureLookupContext lookup_context = {
3222 {intersects_coverage, intersected_coverage_glyphs},
3223 ContextFormat::CoverageBasedContext,
3224 {this, this, this}
3225 };
3226 chain_context_closure_lookup (c,
3227 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3228 input.len, (const HBUINT16 *) input.arrayZ + 1,
3229 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3230 lookup.len, lookup.arrayZ,
3231 0, lookup_context);
3232 }
3233
closure_lookupsOT::ChainContextFormat33234 void closure_lookups (hb_closure_lookups_context_t *c) const
3235 {
3236 if (!intersects (c->glyphs))
3237 return;
3238
3239 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3240 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3241 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3242 recurse_lookups (c, lookup.len, lookup.arrayZ);
3243 }
3244
collect_variation_indicesOT::ChainContextFormat33245 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3246
collect_glyphsOT::ChainContextFormat33247 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3248 {
3249 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3250
3251 (this+input[0]).collect_coverage (c->input);
3252
3253 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3254 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3255 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3256 {collect_coverage},
3257 {this, this, this}
3258 };
3259 chain_context_collect_glyphs_lookup (c,
3260 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3261 input.len, (const HBUINT16 *) input.arrayZ + 1,
3262 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3263 lookup.len, lookup.arrayZ,
3264 lookup_context);
3265 }
3266
would_applyOT::ChainContextFormat33267 bool would_apply (hb_would_apply_context_t *c) const
3268 {
3269 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3270 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3271 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3272 struct ChainContextApplyLookupContext lookup_context = {
3273 {match_coverage},
3274 {this, this, this}
3275 };
3276 return chain_context_would_apply_lookup (c,
3277 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3278 input.len, (const HBUINT16 *) input.arrayZ + 1,
3279 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3280 lookup.len, lookup.arrayZ, lookup_context);
3281 }
3282
get_coverageOT::ChainContextFormat33283 const Coverage &get_coverage () const
3284 {
3285 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3286 return this+input[0];
3287 }
3288
applyOT::ChainContextFormat33289 bool apply (hb_ot_apply_context_t *c) const
3290 {
3291 TRACE_APPLY (this);
3292 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3293
3294 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
3295 if (likely (index == NOT_COVERED)) return_trace (false);
3296
3297 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3298 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3299 struct ChainContextApplyLookupContext lookup_context = {
3300 {match_coverage},
3301 {this, this, this}
3302 };
3303 return_trace (chain_context_apply_lookup (c,
3304 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3305 input.len, (const HBUINT16 *) input.arrayZ + 1,
3306 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3307 lookup.len, lookup.arrayZ, lookup_context));
3308 }
3309
3310 template<typename Iterator,
3311 hb_requires (hb_is_iterator (Iterator))>
serialize_coverage_offsetsOT::ChainContextFormat33312 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
3313 {
3314 TRACE_SERIALIZE (this);
3315 auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
3316
3317 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
3318 return_trace (false);
3319
3320 for (auto& offset : it) {
3321 auto *o = out->serialize_append (c->serializer);
3322 if (unlikely (!o) || !o->serialize_subset (c, offset, base))
3323 return_trace (false);
3324 }
3325
3326 return_trace (true);
3327 }
3328
subsetOT::ChainContextFormat33329 bool subset (hb_subset_context_t *c) const
3330 {
3331 TRACE_SUBSET (this);
3332
3333 auto *out = c->serializer->start_embed (this);
3334 if (unlikely (!out)) return_trace (false);
3335 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3336
3337 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3338 return_trace (false);
3339
3340 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3341 if (!serialize_coverage_offsets (c, input.iter (), this))
3342 return_trace (false);
3343
3344 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3345 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3346 return_trace (false);
3347
3348 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
3349 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3350
3351 HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookupRecord.len);
3352 if (!lookupCount) return_trace (false);
3353
3354 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (), lookup_map);
3355 return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
3356 }
3357
sanitizeOT::ChainContextFormat33358 bool sanitize (hb_sanitize_context_t *c) const
3359 {
3360 TRACE_SANITIZE (this);
3361 if (!backtrack.sanitize (c, this)) return_trace (false);
3362 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3363 if (!input.sanitize (c, this)) return_trace (false);
3364 if (!input.len) return_trace (false); /* To be consistent with Context. */
3365 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3366 if (!lookahead.sanitize (c, this)) return_trace (false);
3367 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3368 return_trace (lookup.sanitize (c));
3369 }
3370
3371 protected:
3372 HBUINT16 format; /* Format identifier--format = 3 */
3373 Array16OfOffset16To<Coverage>
3374 backtrack; /* Array of coverage tables
3375 * in backtracking sequence, in glyph
3376 * sequence order */
3377 Array16OfOffset16To<Coverage>
3378 inputX ; /* Array of coverage
3379 * tables in input sequence, in glyph
3380 * sequence order */
3381 Array16OfOffset16To<Coverage>
3382 lookaheadX; /* Array of coverage tables
3383 * in lookahead sequence, in glyph
3384 * sequence order */
3385 Array16Of<LookupRecord>
3386 lookupX; /* Array of LookupRecords--in
3387 * design order) */
3388 public:
3389 DEFINE_SIZE_MIN (10);
3390 };
3391
3392 struct ChainContext
3393 {
3394 template <typename context_t, typename ...Ts>
dispatchOT::ChainContext3395 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3396 {
3397 TRACE_DISPATCH (this, u.format);
3398 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3399 switch (u.format) {
3400 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3401 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
3402 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
3403 default:return_trace (c->default_return_value ());
3404 }
3405 }
3406
3407 protected:
3408 union {
3409 HBUINT16 format; /* Format identifier */
3410 ChainContextFormat1 format1;
3411 ChainContextFormat2 format2;
3412 ChainContextFormat3 format3;
3413 } u;
3414 };
3415
3416
3417 template <typename T>
3418 struct ExtensionFormat1
3419 {
get_typeOT::ExtensionFormat13420 unsigned int get_type () const { return extensionLookupType; }
3421
3422 template <typename X>
get_subtableOT::ExtensionFormat13423 const X& get_subtable () const
3424 { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
3425
3426 template <typename context_t, typename ...Ts>
dispatchOT::ExtensionFormat13427 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3428 {
3429 TRACE_DISPATCH (this, format);
3430 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3431 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
3432 }
3433
collect_variation_indicesOT::ExtensionFormat13434 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3435 { dispatch (c); }
3436
3437 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
sanitizeOT::ExtensionFormat13438 bool sanitize (hb_sanitize_context_t *c) const
3439 {
3440 TRACE_SANITIZE (this);
3441 return_trace (c->check_struct (this) &&
3442 extensionLookupType != T::SubTable::Extension);
3443 }
3444
subsetOT::ExtensionFormat13445 bool subset (hb_subset_context_t *c) const
3446 {
3447 TRACE_SUBSET (this);
3448
3449 auto *out = c->serializer->start_embed (this);
3450 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3451
3452 out->format = format;
3453 out->extensionLookupType = extensionLookupType;
3454
3455 const auto& src_offset =
3456 reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
3457 auto& dest_offset =
3458 reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
3459
3460 return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
3461 }
3462
3463 protected:
3464 HBUINT16 format; /* Format identifier. Set to 1. */
3465 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3466 * by ExtensionOffset (i.e. the
3467 * extension subtable). */
3468 Offset32 extensionOffset; /* Offset to the extension subtable,
3469 * of lookup type subtable. */
3470 public:
3471 DEFINE_SIZE_STATIC (8);
3472 };
3473
3474 template <typename T>
3475 struct Extension
3476 {
get_typeOT::Extension3477 unsigned int get_type () const
3478 {
3479 switch (u.format) {
3480 case 1: return u.format1.get_type ();
3481 default:return 0;
3482 }
3483 }
3484 template <typename X>
get_subtableOT::Extension3485 const X& get_subtable () const
3486 {
3487 switch (u.format) {
3488 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3489 default:return Null (typename T::SubTable);
3490 }
3491 }
3492
3493 // Specialization of dispatch for subset. dispatch() normally just
3494 // dispatches to the sub table this points too, but for subset
3495 // we need to run subset on this subtable too.
3496 template <typename ...Ts>
dispatchOT::Extension3497 typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
3498 {
3499 switch (u.format) {
3500 case 1: return u.format1.subset (c);
3501 default: return c->default_return_value ();
3502 }
3503 }
3504
3505 template <typename context_t, typename ...Ts>
dispatchOT::Extension3506 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3507 {
3508 TRACE_DISPATCH (this, u.format);
3509 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3510 switch (u.format) {
3511 case 1: return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
3512 default:return_trace (c->default_return_value ());
3513 }
3514 }
3515
3516 protected:
3517 union {
3518 HBUINT16 format; /* Format identifier */
3519 ExtensionFormat1<T> format1;
3520 } u;
3521 };
3522
3523
3524 /*
3525 * GSUB/GPOS Common
3526 */
3527
3528 struct hb_ot_layout_lookup_accelerator_t
3529 {
3530 template <typename TLookup>
initOT::hb_ot_layout_lookup_accelerator_t3531 void init (const TLookup &lookup)
3532 {
3533 digest.init ();
3534 lookup.collect_coverage (&digest);
3535
3536 subtables.init ();
3537 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3538 lookup.dispatch (&c_get_subtables);
3539 }
finiOT::hb_ot_layout_lookup_accelerator_t3540 void fini () { subtables.fini (); }
3541
may_haveOT::hb_ot_layout_lookup_accelerator_t3542 bool may_have (hb_codepoint_t g) const
3543 { return digest.may_have (g); }
3544
applyOT::hb_ot_layout_lookup_accelerator_t3545 bool apply (hb_ot_apply_context_t *c) const
3546 {
3547 for (unsigned int i = 0; i < subtables.length; i++)
3548 if (subtables[i].apply (c))
3549 return true;
3550 return false;
3551 }
3552
3553 private:
3554 hb_set_digest_t digest;
3555 hb_get_subtables_context_t::array_t subtables;
3556 };
3557
3558 struct GSUBGPOS
3559 {
has_dataOT::GSUBGPOS3560 bool has_data () const { return version.to_int (); }
get_script_countOT::GSUBGPOS3561 unsigned int get_script_count () const
3562 { return (this+scriptList).len; }
get_script_tagOT::GSUBGPOS3563 const Tag& get_script_tag (unsigned int i) const
3564 { return (this+scriptList).get_tag (i); }
get_script_tagsOT::GSUBGPOS3565 unsigned int get_script_tags (unsigned int start_offset,
3566 unsigned int *script_count /* IN/OUT */,
3567 hb_tag_t *script_tags /* OUT */) const
3568 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
get_scriptOT::GSUBGPOS3569 const Script& get_script (unsigned int i) const
3570 { return (this+scriptList)[i]; }
find_script_indexOT::GSUBGPOS3571 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3572 { return (this+scriptList).find_index (tag, index); }
3573
get_feature_countOT::GSUBGPOS3574 unsigned int get_feature_count () const
3575 { return (this+featureList).len; }
get_feature_tagOT::GSUBGPOS3576 hb_tag_t get_feature_tag (unsigned int i) const
3577 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
get_feature_tagsOT::GSUBGPOS3578 unsigned int get_feature_tags (unsigned int start_offset,
3579 unsigned int *feature_count /* IN/OUT */,
3580 hb_tag_t *feature_tags /* OUT */) const
3581 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
get_featureOT::GSUBGPOS3582 const Feature& get_feature (unsigned int i) const
3583 { return (this+featureList)[i]; }
find_feature_indexOT::GSUBGPOS3584 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3585 { return (this+featureList).find_index (tag, index); }
3586
get_lookup_countOT::GSUBGPOS3587 unsigned int get_lookup_count () const
3588 { return (this+lookupList).len; }
get_lookupOT::GSUBGPOS3589 const Lookup& get_lookup (unsigned int i) const
3590 { return (this+lookupList)[i]; }
3591
find_variations_indexOT::GSUBGPOS3592 bool find_variations_index (const int *coords, unsigned int num_coords,
3593 unsigned int *index) const
3594 {
3595 #ifdef HB_NO_VAR
3596 *index = FeatureVariations::NOT_FOUND_INDEX;
3597 return false;
3598 #endif
3599 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3600 .find_index (coords, num_coords, index);
3601 }
get_feature_variationOT::GSUBGPOS3602 const Feature& get_feature_variation (unsigned int feature_index,
3603 unsigned int variations_index) const
3604 {
3605 #ifndef HB_NO_VAR
3606 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3607 version.to_int () >= 0x00010001u)
3608 {
3609 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3610 feature_index);
3611 if (feature)
3612 return *feature;
3613 }
3614 #endif
3615 return get_feature (feature_index);
3616 }
3617
feature_variation_collect_lookupsOT::GSUBGPOS3618 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3619 hb_set_t *lookup_indexes /* OUT */) const
3620 {
3621 #ifndef HB_NO_VAR
3622 if (version.to_int () >= 0x00010001u)
3623 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3624 #endif
3625 }
3626
3627 template <typename TLookup>
closure_lookupsOT::GSUBGPOS3628 void closure_lookups (hb_face_t *face,
3629 const hb_set_t *glyphs,
3630 hb_set_t *lookup_indexes /* IN/OUT */) const
3631 {
3632 hb_set_t visited_lookups, inactive_lookups;
3633 OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
3634
3635 for (unsigned lookup_index : + hb_iter (lookup_indexes))
3636 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
3637
3638 hb_set_union (lookup_indexes, &visited_lookups);
3639 hb_set_subtract (lookup_indexes, &inactive_lookups);
3640 }
3641
prune_langsysOT::GSUBGPOS3642 void prune_langsys (const hb_map_t *duplicate_feature_map,
3643 hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map,
3644 hb_set_t *new_feature_indexes /* OUT */) const
3645 {
3646 hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
3647
3648 unsigned count = get_script_count ();
3649 for (unsigned script_index = 0; script_index < count; script_index++)
3650 {
3651 const Script& s = get_script (script_index);
3652 s.prune_langsys (&c, script_index);
3653 }
3654 }
3655
3656 template <typename TLookup>
subsetOT::GSUBGPOS3657 bool subset (hb_subset_layout_context_t *c) const
3658 {
3659 TRACE_SUBSET (this);
3660 auto *out = c->subset_context->serializer->embed (*this);
3661 if (unlikely (!out)) return_trace (false);
3662
3663 typedef LookupOffsetList<TLookup> TLookupList;
3664 reinterpret_cast<Offset16To<TLookupList> &> (out->lookupList)
3665 .serialize_subset (c->subset_context,
3666 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList),
3667 this,
3668 c);
3669
3670 reinterpret_cast<Offset16To<RecordListOfFeature> &> (out->featureList)
3671 .serialize_subset (c->subset_context,
3672 reinterpret_cast<const Offset16To<RecordListOfFeature> &> (featureList),
3673 this,
3674 c);
3675
3676 out->scriptList.serialize_subset (c->subset_context,
3677 scriptList,
3678 this,
3679 c);
3680
3681 #ifndef HB_NO_VAR
3682 if (version.to_int () >= 0x00010001u)
3683 {
3684 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3685 if (!ret)
3686 {
3687 out->version.major = 1;
3688 out->version.minor = 0;
3689 }
3690 }
3691 #endif
3692
3693 return_trace (true);
3694 }
3695
find_duplicate_featuresOT::GSUBGPOS3696 void find_duplicate_features (const hb_map_t *lookup_indices,
3697 const hb_set_t *feature_indices,
3698 hb_map_t *duplicate_feature_map /* OUT */) const
3699 {
3700 if (feature_indices->is_empty ()) return;
3701 hb_hashmap_t<hb_tag_t, hb_set_t *> unique_features;
3702 //find out duplicate features after subset
3703 for (unsigned i : feature_indices->iter ())
3704 {
3705 hb_tag_t t = get_feature_tag (i);
3706 if (t == unique_features.INVALID_KEY) continue;
3707 if (!unique_features.has (t))
3708 {
3709 hb_set_t* indices = hb_set_create ();
3710 if (unlikely (indices == hb_set_get_empty () ||
3711 !unique_features.set (t, indices)))
3712 {
3713 hb_set_destroy (indices);
3714 for (auto _ : unique_features.iter ())
3715 hb_set_destroy (_.second);
3716 return;
3717 }
3718 if (unique_features.get (t))
3719 unique_features.get (t)->add (i);
3720 duplicate_feature_map->set (i, i);
3721 continue;
3722 }
3723
3724 bool found = false;
3725
3726 hb_set_t* same_tag_features = unique_features.get (t);
3727 for (unsigned other_f_index : same_tag_features->iter ())
3728 {
3729 const Feature& f = get_feature (i);
3730 const Feature& other_f = get_feature (other_f_index);
3731
3732 auto f_iter =
3733 + hb_iter (f.lookupIndex)
3734 | hb_filter (lookup_indices)
3735 ;
3736
3737 auto other_f_iter =
3738 + hb_iter (other_f.lookupIndex)
3739 | hb_filter (lookup_indices)
3740 ;
3741
3742 bool is_equal = true;
3743 for (; f_iter && other_f_iter; f_iter++, other_f_iter++)
3744 {
3745 unsigned a = *f_iter;
3746 unsigned b = *other_f_iter;
3747 if (a != b) { is_equal = false; break; }
3748 }
3749
3750 if (is_equal == false || f_iter || other_f_iter) continue;
3751
3752 found = true;
3753 duplicate_feature_map->set (i, other_f_index);
3754 break;
3755 }
3756
3757 if (found == false)
3758 {
3759 same_tag_features->add (i);
3760 duplicate_feature_map->set (i, i);
3761 }
3762 }
3763
3764 for (auto _ : unique_features.iter ())
3765 hb_set_destroy (_.second);
3766 }
3767
prune_featuresOT::GSUBGPOS3768 void prune_features (const hb_map_t *lookup_indices, /* IN */
3769 hb_set_t *feature_indices /* IN/OUT */) const
3770 {
3771 #ifndef HB_NO_VAR
3772 // This is the set of feature indices which have alternate versions defined
3773 // if the FeatureVariation's table and the alternate version(s) intersect the
3774 // set of lookup indices.
3775 hb_set_t alternate_feature_indices;
3776 if (version.to_int () >= 0x00010001u)
3777 (this+featureVars).closure_features (lookup_indices, &alternate_feature_indices);
3778 if (unlikely (alternate_feature_indices.in_error()))
3779 {
3780 feature_indices->err ();
3781 return;
3782 }
3783 #endif
3784
3785 for (unsigned i : feature_indices->iter())
3786 {
3787 const Feature& f = get_feature (i);
3788 hb_tag_t tag = get_feature_tag (i);
3789 if (tag == HB_TAG ('p', 'r', 'e', 'f'))
3790 // Note: Never ever drop feature 'pref', even if it's empty.
3791 // HarfBuzz chooses shaper for Khmer based on presence of this
3792 // feature. See thread at:
3793 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
3794 continue;
3795
3796
3797 if (!f.featureParams.is_null () &&
3798 tag == HB_TAG ('s', 'i', 'z', 'e'))
3799 continue;
3800
3801 if (!f.intersects_lookup_indexes (lookup_indices)
3802 #ifndef HB_NO_VAR
3803 && !alternate_feature_indices.has (i)
3804 #endif
3805 )
3806 feature_indices->del (i);
3807 }
3808 }
3809
get_sizeOT::GSUBGPOS3810 unsigned int get_size () const
3811 {
3812 return min_size +
3813 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3814 }
3815
3816 template <typename TLookup>
sanitizeOT::GSUBGPOS3817 bool sanitize (hb_sanitize_context_t *c) const
3818 {
3819 TRACE_SANITIZE (this);
3820 typedef List16OfOffset16To<TLookup> TLookupList;
3821 if (unlikely (!(version.sanitize (c) &&
3822 likely (version.major == 1) &&
3823 scriptList.sanitize (c, this) &&
3824 featureList.sanitize (c, this) &&
3825 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList).sanitize (c, this))))
3826 return_trace (false);
3827
3828 #ifndef HB_NO_VAR
3829 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3830 return_trace (false);
3831 #endif
3832
3833 return_trace (true);
3834 }
3835
3836 template <typename T>
3837 struct accelerator_t
3838 {
initOT::GSUBGPOS::accelerator_t3839 void init (hb_face_t *face)
3840 {
3841 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3842 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
3843 {
3844 hb_blob_destroy (this->table.get_blob ());
3845 this->table = hb_blob_get_empty ();
3846 }
3847
3848 this->lookup_count = table->get_lookup_count ();
3849
3850 this->accels = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3851 if (unlikely (!this->accels))
3852 {
3853 this->lookup_count = 0;
3854 this->table.destroy ();
3855 this->table = hb_blob_get_empty ();
3856 }
3857
3858 for (unsigned int i = 0; i < this->lookup_count; i++)
3859 this->accels[i].init (table->get_lookup (i));
3860 }
3861
finiOT::GSUBGPOS::accelerator_t3862 void fini ()
3863 {
3864 for (unsigned int i = 0; i < this->lookup_count; i++)
3865 this->accels[i].fini ();
3866 hb_free (this->accels);
3867 this->table.destroy ();
3868 }
3869
3870 hb_blob_ptr_t<T> table;
3871 unsigned int lookup_count;
3872 hb_ot_layout_lookup_accelerator_t *accels;
3873 };
3874
3875 protected:
3876 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3877 * to 0x00010000u */
3878 Offset16To<ScriptList>
3879 scriptList; /* ScriptList table */
3880 Offset16To<FeatureList>
3881 featureList; /* FeatureList table */
3882 Offset16To<LookupList>
3883 lookupList; /* LookupList table */
3884 Offset32To<FeatureVariations>
3885 featureVars; /* Offset to Feature Variations
3886 table--from beginning of table
3887 * (may be NULL). Introduced
3888 * in version 0x00010001. */
3889 public:
3890 DEFINE_SIZE_MIN (10);
3891 };
3892
3893
3894 } /* namespace OT */
3895
3896
3897 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */
3898