1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
31
32 #include "hb.hh"
33 #include "hb-buffer.hh"
34 #include "hb-map.hh"
35 #include "hb-set.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
39
40
41 namespace OT {
42
43
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
46 {
47 template <typename T>
dispatchOT::hb_intersects_context_t48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
default_return_valueOT::hb_intersects_context_t49 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_intersects_context_t50 bool stop_sublookup_iteration (return_t r) const { return r; }
51
52 const hb_set_t *glyphs;
53
hb_intersects_context_tOT::hb_intersects_context_t54 hb_intersects_context_t (const hb_set_t *glyphs_) :
55 glyphs (glyphs_) {}
56 };
57
58 struct hb_have_non_1to1_context_t :
59 hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
60 {
61 template <typename T>
dispatchOT::hb_have_non_1to1_context_t62 return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
default_return_valueOT::hb_have_non_1to1_context_t63 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_have_non_1to1_context_t64 bool stop_sublookup_iteration (return_t r) const { return r; }
65 };
66
67 struct hb_closure_context_t :
68 hb_dispatch_context_t<hb_closure_context_t>
69 {
70 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
71 template <typename T>
dispatchOT::hb_closure_context_t72 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_context_t73 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_context_t74 void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
75 {
76 if (unlikely (nesting_level_left == 0 || !recurse_func))
77 return;
78
79 nesting_level_left--;
80 recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
81 nesting_level_left++;
82 }
83
lookup_limit_exceededOT::hb_closure_context_t84 bool lookup_limit_exceeded ()
85 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
86
should_visit_lookupOT::hb_closure_context_t87 bool should_visit_lookup (unsigned int lookup_index)
88 {
89 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
90 return false;
91
92 if (is_lookup_done (lookup_index))
93 return false;
94
95 return true;
96 }
97
is_lookup_doneOT::hb_closure_context_t98 bool is_lookup_done (unsigned int lookup_index)
99 {
100 if (done_lookups_glyph_count->in_error () ||
101 done_lookups_glyph_set->in_error ())
102 return true;
103
104 /* Have we visited this lookup with the current set of glyphs? */
105 if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
106 {
107 done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
108
109 if (!done_lookups_glyph_set->get (lookup_index))
110 {
111 hb_set_t* empty_set = hb_set_create ();
112 if (unlikely (!done_lookups_glyph_set->set (lookup_index, empty_set)))
113 {
114 hb_set_destroy (empty_set);
115 return true;
116 }
117 }
118
119 hb_set_clear (done_lookups_glyph_set->get (lookup_index));
120 }
121
122 hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
123 if (unlikely (covered_glyph_set->in_error ()))
124 return true;
125 if (parent_active_glyphs ()->is_subset (covered_glyph_set))
126 return true;
127
128 hb_set_union (covered_glyph_set, parent_active_glyphs ());
129 return false;
130 }
131
parent_active_glyphsOT::hb_closure_context_t132 hb_set_t* parent_active_glyphs ()
133 {
134 if (active_glyphs_stack.length < 1)
135 return glyphs;
136
137 return active_glyphs_stack.tail ();
138 }
139
push_cur_active_glyphsOT::hb_closure_context_t140 void push_cur_active_glyphs (hb_set_t* cur_active_glyph_set)
141 {
142 active_glyphs_stack.push (cur_active_glyph_set);
143 }
144
pop_cur_done_glyphsOT::hb_closure_context_t145 bool pop_cur_done_glyphs ()
146 {
147 if (active_glyphs_stack.length < 1)
148 return false;
149
150 active_glyphs_stack.pop ();
151 return true;
152 }
153
154 hb_face_t *face;
155 hb_set_t *glyphs;
156 hb_set_t *cur_intersected_glyphs;
157 hb_set_t output[1];
158 hb_vector_t<hb_set_t *> active_glyphs_stack;
159 recurse_func_t recurse_func;
160 unsigned int nesting_level_left;
161
hb_closure_context_tOT::hb_closure_context_t162 hb_closure_context_t (hb_face_t *face_,
163 hb_set_t *glyphs_,
164 hb_set_t *cur_intersected_glyphs_,
165 hb_map_t *done_lookups_glyph_count_,
166 hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *done_lookups_glyph_set_,
167 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
168 face (face_),
169 glyphs (glyphs_),
170 cur_intersected_glyphs (cur_intersected_glyphs_),
171 recurse_func (nullptr),
172 nesting_level_left (nesting_level_left_),
173 done_lookups_glyph_count (done_lookups_glyph_count_),
174 done_lookups_glyph_set (done_lookups_glyph_set_),
175 lookup_count (0)
176 {
177 push_cur_active_glyphs (glyphs_);
178 }
179
~hb_closure_context_tOT::hb_closure_context_t180 ~hb_closure_context_t () { flush (); }
181
set_recurse_funcOT::hb_closure_context_t182 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
183
flushOT::hb_closure_context_t184 void flush ()
185 {
186 hb_set_del_range (output, face->get_num_glyphs (), hb_set_get_max (output)); /* Remove invalid glyphs. */
187 hb_set_union (glyphs, output);
188 hb_set_clear (output);
189 active_glyphs_stack.pop ();
190 active_glyphs_stack.fini ();
191 }
192
193 private:
194 hb_map_t *done_lookups_glyph_count;
195 hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *done_lookups_glyph_set;
196 unsigned int lookup_count;
197 };
198
199
200
201 struct hb_closure_lookups_context_t :
202 hb_dispatch_context_t<hb_closure_lookups_context_t>
203 {
204 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
205 template <typename T>
dispatchOT::hb_closure_lookups_context_t206 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_lookups_context_t207 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_lookups_context_t208 void recurse (unsigned lookup_index)
209 {
210 if (unlikely (nesting_level_left == 0 || !recurse_func))
211 return;
212
213 /* Return if new lookup was recursed to before. */
214 if (is_lookup_visited (lookup_index))
215 return;
216
217 nesting_level_left--;
218 recurse_func (this, lookup_index);
219 nesting_level_left++;
220 }
221
set_lookup_visitedOT::hb_closure_lookups_context_t222 void set_lookup_visited (unsigned lookup_index)
223 { visited_lookups->add (lookup_index); }
224
set_lookup_inactiveOT::hb_closure_lookups_context_t225 void set_lookup_inactive (unsigned lookup_index)
226 { inactive_lookups->add (lookup_index); }
227
lookup_limit_exceededOT::hb_closure_lookups_context_t228 bool lookup_limit_exceeded ()
229 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
230
is_lookup_visitedOT::hb_closure_lookups_context_t231 bool is_lookup_visited (unsigned lookup_index)
232 {
233 if (unlikely (lookup_count++ > HB_MAX_LOOKUP_INDICES))
234 return true;
235
236 if (unlikely (visited_lookups->in_error ()))
237 return true;
238
239 return visited_lookups->has (lookup_index);
240 }
241
242 hb_face_t *face;
243 const hb_set_t *glyphs;
244 recurse_func_t recurse_func;
245 unsigned int nesting_level_left;
246
hb_closure_lookups_context_tOT::hb_closure_lookups_context_t247 hb_closure_lookups_context_t (hb_face_t *face_,
248 const hb_set_t *glyphs_,
249 hb_set_t *visited_lookups_,
250 hb_set_t *inactive_lookups_,
251 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
252 face (face_),
253 glyphs (glyphs_),
254 recurse_func (nullptr),
255 nesting_level_left (nesting_level_left_),
256 visited_lookups (visited_lookups_),
257 inactive_lookups (inactive_lookups_),
258 lookup_count (0) {}
259
set_recurse_funcOT::hb_closure_lookups_context_t260 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
261
262 private:
263 hb_set_t *visited_lookups;
264 hb_set_t *inactive_lookups;
265 unsigned int lookup_count;
266 };
267
268 struct hb_would_apply_context_t :
269 hb_dispatch_context_t<hb_would_apply_context_t, bool>
270 {
271 template <typename T>
dispatchOT::hb_would_apply_context_t272 return_t dispatch (const T &obj) { return obj.would_apply (this); }
default_return_valueOT::hb_would_apply_context_t273 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_would_apply_context_t274 bool stop_sublookup_iteration (return_t r) const { return r; }
275
276 hb_face_t *face;
277 const hb_codepoint_t *glyphs;
278 unsigned int len;
279 bool zero_context;
280
hb_would_apply_context_tOT::hb_would_apply_context_t281 hb_would_apply_context_t (hb_face_t *face_,
282 const hb_codepoint_t *glyphs_,
283 unsigned int len_,
284 bool zero_context_) :
285 face (face_),
286 glyphs (glyphs_),
287 len (len_),
288 zero_context (zero_context_) {}
289 };
290
291 struct hb_collect_glyphs_context_t :
292 hb_dispatch_context_t<hb_collect_glyphs_context_t>
293 {
294 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
295 template <typename T>
dispatchOT::hb_collect_glyphs_context_t296 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_glyphs_context_t297 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_collect_glyphs_context_t298 void recurse (unsigned int lookup_index)
299 {
300 if (unlikely (nesting_level_left == 0 || !recurse_func))
301 return;
302
303 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
304 * past the previous check. For GSUB, we only want to collect the output
305 * glyphs in the recursion. If output is not requested, we can go home now.
306 *
307 * Note further, that the above is not exactly correct. A recursed lookup
308 * is allowed to match input that is not matched in the context, but that's
309 * not how most fonts are built. It's possible to relax that and recurse
310 * with all sets here if it proves to be an issue.
311 */
312
313 if (output == hb_set_get_empty ())
314 return;
315
316 /* Return if new lookup was recursed to before. */
317 if (recursed_lookups->has (lookup_index))
318 return;
319
320 hb_set_t *old_before = before;
321 hb_set_t *old_input = input;
322 hb_set_t *old_after = after;
323 before = input = after = hb_set_get_empty ();
324
325 nesting_level_left--;
326 recurse_func (this, lookup_index);
327 nesting_level_left++;
328
329 before = old_before;
330 input = old_input;
331 after = old_after;
332
333 recursed_lookups->add (lookup_index);
334 }
335
336 hb_face_t *face;
337 hb_set_t *before;
338 hb_set_t *input;
339 hb_set_t *after;
340 hb_set_t *output;
341 recurse_func_t recurse_func;
342 hb_set_t *recursed_lookups;
343 unsigned int nesting_level_left;
344
hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t345 hb_collect_glyphs_context_t (hb_face_t *face_,
346 hb_set_t *glyphs_before, /* OUT. May be NULL */
347 hb_set_t *glyphs_input, /* OUT. May be NULL */
348 hb_set_t *glyphs_after, /* OUT. May be NULL */
349 hb_set_t *glyphs_output, /* OUT. May be NULL */
350 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
351 face (face_),
352 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
353 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
354 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
355 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
356 recurse_func (nullptr),
357 recursed_lookups (hb_set_create ()),
358 nesting_level_left (nesting_level_left_) {}
~hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t359 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
360
set_recurse_funcOT::hb_collect_glyphs_context_t361 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
362 };
363
364
365
366 template <typename set_t>
367 struct hb_collect_coverage_context_t :
368 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
369 {
370 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
371 template <typename T>
dispatchOT::hb_collect_coverage_context_t372 return_t dispatch (const T &obj) { return obj.get_coverage (); }
default_return_valueOT::hb_collect_coverage_context_t373 static return_t default_return_value () { return Null (Coverage); }
stop_sublookup_iterationOT::hb_collect_coverage_context_t374 bool stop_sublookup_iteration (return_t r) const
375 {
376 r.collect_coverage (set);
377 return false;
378 }
379
hb_collect_coverage_context_tOT::hb_collect_coverage_context_t380 hb_collect_coverage_context_t (set_t *set_) :
381 set (set_) {}
382
383 set_t *set;
384 };
385
386
387 struct hb_ot_apply_context_t :
388 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
389 {
390 struct matcher_t
391 {
matcher_tOT::hb_ot_apply_context_t::matcher_t392 matcher_t () :
393 lookup_props (0),
394 ignore_zwnj (false),
395 ignore_zwj (false),
396 mask (-1),
397 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
398 syllable arg1(0),
399 #undef arg1
400 match_func (nullptr),
401 match_data (nullptr) {}
402
403 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
404
set_ignore_zwnjOT::hb_ot_apply_context_t::matcher_t405 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
set_ignore_zwjOT::hb_ot_apply_context_t::matcher_t406 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
set_lookup_propsOT::hb_ot_apply_context_t::matcher_t407 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
set_maskOT::hb_ot_apply_context_t::matcher_t408 void set_mask (hb_mask_t mask_) { mask = mask_; }
set_syllableOT::hb_ot_apply_context_t::matcher_t409 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
set_match_funcOT::hb_ot_apply_context_t::matcher_t410 void set_match_func (match_func_t match_func_,
411 const void *match_data_)
412 { match_func = match_func_; match_data = match_data_; }
413
414 enum may_match_t {
415 MATCH_NO,
416 MATCH_YES,
417 MATCH_MAYBE
418 };
419
may_matchOT::hb_ot_apply_context_t::matcher_t420 may_match_t may_match (const hb_glyph_info_t &info,
421 const HBUINT16 *glyph_data) const
422 {
423 if (!(info.mask & mask) ||
424 (syllable && syllable != info.syllable ()))
425 return MATCH_NO;
426
427 if (match_func)
428 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
429
430 return MATCH_MAYBE;
431 }
432
433 enum may_skip_t {
434 SKIP_NO,
435 SKIP_YES,
436 SKIP_MAYBE
437 };
438
may_skipOT::hb_ot_apply_context_t::matcher_t439 may_skip_t may_skip (const hb_ot_apply_context_t *c,
440 const hb_glyph_info_t &info) const
441 {
442 if (!c->check_glyph_property (&info, lookup_props))
443 return SKIP_YES;
444
445 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
446 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
447 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
448 return SKIP_MAYBE;
449
450 return SKIP_NO;
451 }
452
453 protected:
454 unsigned int lookup_props;
455 bool ignore_zwnj;
456 bool ignore_zwj;
457 hb_mask_t mask;
458 uint8_t syllable;
459 match_func_t match_func;
460 const void *match_data;
461 };
462
463 struct skipping_iterator_t
464 {
initOT::hb_ot_apply_context_t::skipping_iterator_t465 void init (hb_ot_apply_context_t *c_, bool context_match = false)
466 {
467 c = c_;
468 match_glyph_data = nullptr;
469 matcher.set_match_func (nullptr, nullptr);
470 matcher.set_lookup_props (c->lookup_props);
471 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
472 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
473 /* Ignore ZWJ if we are matching context, or asked to. */
474 matcher.set_ignore_zwj (context_match || c->auto_zwj);
475 matcher.set_mask (context_match ? -1 : c->lookup_mask);
476 }
set_lookup_propsOT::hb_ot_apply_context_t::skipping_iterator_t477 void set_lookup_props (unsigned int lookup_props)
478 {
479 matcher.set_lookup_props (lookup_props);
480 }
set_match_funcOT::hb_ot_apply_context_t::skipping_iterator_t481 void set_match_func (matcher_t::match_func_t match_func_,
482 const void *match_data_,
483 const HBUINT16 glyph_data[])
484 {
485 matcher.set_match_func (match_func_, match_data_);
486 match_glyph_data = glyph_data;
487 }
488
resetOT::hb_ot_apply_context_t::skipping_iterator_t489 void reset (unsigned int start_index_,
490 unsigned int num_items_)
491 {
492 idx = start_index_;
493 num_items = num_items_;
494 end = c->buffer->len;
495 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
496 }
497
rejectOT::hb_ot_apply_context_t::skipping_iterator_t498 void reject ()
499 {
500 num_items++;
501 if (match_glyph_data) match_glyph_data--;
502 }
503
504 matcher_t::may_skip_t
may_skipOT::hb_ot_apply_context_t::skipping_iterator_t505 may_skip (const hb_glyph_info_t &info) const
506 { return matcher.may_skip (c, info); }
507
nextOT::hb_ot_apply_context_t::skipping_iterator_t508 bool next ()
509 {
510 assert (num_items > 0);
511 while (idx + num_items < end)
512 {
513 idx++;
514 const hb_glyph_info_t &info = c->buffer->info[idx];
515
516 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
517 if (unlikely (skip == matcher_t::SKIP_YES))
518 continue;
519
520 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
521 if (match == matcher_t::MATCH_YES ||
522 (match == matcher_t::MATCH_MAYBE &&
523 skip == matcher_t::SKIP_NO))
524 {
525 num_items--;
526 if (match_glyph_data) match_glyph_data++;
527 return true;
528 }
529
530 if (skip == matcher_t::SKIP_NO)
531 return false;
532 }
533 return false;
534 }
prevOT::hb_ot_apply_context_t::skipping_iterator_t535 bool prev ()
536 {
537 assert (num_items > 0);
538 while (idx > num_items - 1)
539 {
540 idx--;
541 const hb_glyph_info_t &info = c->buffer->out_info[idx];
542
543 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
544 if (unlikely (skip == matcher_t::SKIP_YES))
545 continue;
546
547 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
548 if (match == matcher_t::MATCH_YES ||
549 (match == matcher_t::MATCH_MAYBE &&
550 skip == matcher_t::SKIP_NO))
551 {
552 num_items--;
553 if (match_glyph_data) match_glyph_data++;
554 return true;
555 }
556
557 if (skip == matcher_t::SKIP_NO)
558 return false;
559 }
560 return false;
561 }
562
563 unsigned int idx;
564 protected:
565 hb_ot_apply_context_t *c;
566 matcher_t matcher;
567 const HBUINT16 *match_glyph_data;
568
569 unsigned int num_items;
570 unsigned int end;
571 };
572
573
get_nameOT::hb_ot_apply_context_t574 const char *get_name () { return "APPLY"; }
575 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
576 template <typename T>
dispatchOT::hb_ot_apply_context_t577 return_t dispatch (const T &obj) { return obj.apply (this); }
default_return_valueOT::hb_ot_apply_context_t578 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_ot_apply_context_t579 bool stop_sublookup_iteration (return_t r) const { return r; }
recurseOT::hb_ot_apply_context_t580 return_t recurse (unsigned int sub_lookup_index)
581 {
582 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
583 return default_return_value ();
584
585 nesting_level_left--;
586 bool ret = recurse_func (this, sub_lookup_index);
587 nesting_level_left++;
588 return ret;
589 }
590
591 skipping_iterator_t iter_input, iter_context;
592
593 hb_font_t *font;
594 hb_face_t *face;
595 hb_buffer_t *buffer;
596 recurse_func_t recurse_func;
597 const GDEF &gdef;
598 const VariationStore &var_store;
599
600 hb_direction_t direction;
601 hb_mask_t lookup_mask;
602 unsigned int table_index; /* GSUB/GPOS */
603 unsigned int lookup_index;
604 unsigned int lookup_props;
605 unsigned int nesting_level_left;
606
607 bool has_glyph_classes;
608 bool auto_zwnj;
609 bool auto_zwj;
610 bool random;
611
612 uint32_t random_state;
613
614
hb_ot_apply_context_tOT::hb_ot_apply_context_t615 hb_ot_apply_context_t (unsigned int table_index_,
616 hb_font_t *font_,
617 hb_buffer_t *buffer_) :
618 iter_input (), iter_context (),
619 font (font_), face (font->face), buffer (buffer_),
620 recurse_func (nullptr),
621 gdef (
622 #ifndef HB_NO_OT_LAYOUT
623 *face->table.GDEF->table
624 #else
625 Null (GDEF)
626 #endif
627 ),
628 var_store (gdef.get_var_store ()),
629 direction (buffer_->props.direction),
630 lookup_mask (1),
631 table_index (table_index_),
632 lookup_index ((unsigned int) -1),
633 lookup_props (0),
634 nesting_level_left (HB_MAX_NESTING_LEVEL),
635 has_glyph_classes (gdef.has_glyph_classes ()),
636 auto_zwnj (true),
637 auto_zwj (true),
638 random (false),
639 random_state (1) { init_iters (); }
640
init_itersOT::hb_ot_apply_context_t641 void init_iters ()
642 {
643 iter_input.init (this, false);
644 iter_context.init (this, true);
645 }
646
set_lookup_maskOT::hb_ot_apply_context_t647 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; init_iters (); }
set_auto_zwjOT::hb_ot_apply_context_t648 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
set_auto_zwnjOT::hb_ot_apply_context_t649 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
set_randomOT::hb_ot_apply_context_t650 void set_random (bool random_) { random = random_; }
set_recurse_funcOT::hb_ot_apply_context_t651 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
set_lookup_indexOT::hb_ot_apply_context_t652 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
set_lookup_propsOT::hb_ot_apply_context_t653 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
654
random_numberOT::hb_ot_apply_context_t655 uint32_t random_number ()
656 {
657 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
658 random_state = random_state * 48271 % 2147483647;
659 return random_state;
660 }
661
match_properties_markOT::hb_ot_apply_context_t662 bool match_properties_mark (hb_codepoint_t glyph,
663 unsigned int glyph_props,
664 unsigned int match_props) const
665 {
666 /* If using mark filtering sets, the high short of
667 * match_props has the set index.
668 */
669 if (match_props & LookupFlag::UseMarkFilteringSet)
670 return gdef.mark_set_covers (match_props >> 16, glyph);
671
672 /* The second byte of match_props has the meaning
673 * "ignore marks of attachment type different than
674 * the attachment type specified."
675 */
676 if (match_props & LookupFlag::MarkAttachmentType)
677 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
678
679 return true;
680 }
681
check_glyph_propertyOT::hb_ot_apply_context_t682 bool check_glyph_property (const hb_glyph_info_t *info,
683 unsigned int match_props) const
684 {
685 hb_codepoint_t glyph = info->codepoint;
686 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
687
688 /* Not covered, if, for example, glyph class is ligature and
689 * match_props includes LookupFlags::IgnoreLigatures
690 */
691 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
692 return false;
693
694 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
695 return match_properties_mark (glyph, glyph_props, match_props);
696
697 return true;
698 }
699
_set_glyph_propsOT::hb_ot_apply_context_t700 void _set_glyph_props (hb_codepoint_t glyph_index,
701 unsigned int class_guess = 0,
702 bool ligature = false,
703 bool component = false) const
704 {
705 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) &
706 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
707 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
708 if (ligature)
709 {
710 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
711 /* In the only place that the MULTIPLIED bit is used, Uniscribe
712 * seems to only care about the "last" transformation between
713 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
714 * and ligate again, it forgives the multiplication and acts as
715 * if only ligation happened. As such, clear MULTIPLIED bit.
716 */
717 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
718 }
719 if (component)
720 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
721 if (likely (has_glyph_classes))
722 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index));
723 else if (class_guess)
724 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess);
725 }
726
replace_glyphOT::hb_ot_apply_context_t727 void replace_glyph (hb_codepoint_t glyph_index) const
728 {
729 _set_glyph_props (glyph_index);
730 (void) buffer->replace_glyph (glyph_index);
731 }
replace_glyph_inplaceOT::hb_ot_apply_context_t732 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
733 {
734 _set_glyph_props (glyph_index);
735 buffer->cur().codepoint = glyph_index;
736 }
replace_glyph_with_ligatureOT::hb_ot_apply_context_t737 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
738 unsigned int class_guess) const
739 {
740 _set_glyph_props (glyph_index, class_guess, true);
741 (void) buffer->replace_glyph (glyph_index);
742 }
output_glyph_for_componentOT::hb_ot_apply_context_t743 void output_glyph_for_component (hb_codepoint_t glyph_index,
744 unsigned int class_guess) const
745 {
746 _set_glyph_props (glyph_index, class_guess, false, true);
747 (void) buffer->output_glyph (glyph_index);
748 }
749 };
750
751
752 struct hb_get_subtables_context_t :
753 hb_dispatch_context_t<hb_get_subtables_context_t>
754 {
755 template <typename Type>
apply_toOT::hb_get_subtables_context_t756 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
757 {
758 const Type *typed_obj = (const Type *) obj;
759 return typed_obj->apply (c);
760 }
761
762 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
763
764 struct hb_applicable_t
765 {
766 template <typename T>
initOT::hb_get_subtables_context_t::hb_applicable_t767 void init (const T &obj_, hb_apply_func_t apply_func_)
768 {
769 obj = &obj_;
770 apply_func = apply_func_;
771 digest.init ();
772 obj_.get_coverage ().collect_coverage (&digest);
773 }
774
applyOT::hb_get_subtables_context_t::hb_applicable_t775 bool apply (OT::hb_ot_apply_context_t *c) const
776 {
777 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
778 }
779
780 private:
781 const void *obj;
782 hb_apply_func_t apply_func;
783 hb_set_digest_t digest;
784 };
785
786 typedef hb_vector_t<hb_applicable_t> array_t;
787
788 /* Dispatch interface. */
789 template <typename T>
dispatchOT::hb_get_subtables_context_t790 return_t dispatch (const T &obj)
791 {
792 hb_applicable_t *entry = array.push();
793 entry->init (obj, apply_to<T>);
794 return hb_empty_t ();
795 }
default_return_valueOT::hb_get_subtables_context_t796 static return_t default_return_value () { return hb_empty_t (); }
797
hb_get_subtables_context_tOT::hb_get_subtables_context_t798 hb_get_subtables_context_t (array_t &array_) :
799 array (array_) {}
800
801 array_t &array;
802 };
803
804
805
806
807 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
808 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
809 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
810 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
811
812 struct ContextClosureFuncs
813 {
814 intersects_func_t intersects;
815 intersected_glyphs_func_t intersected_glyphs;
816 };
817 struct ContextCollectGlyphsFuncs
818 {
819 collect_glyphs_func_t collect;
820 };
821 struct ContextApplyFuncs
822 {
823 match_func_t match;
824 };
825
826
intersects_glyph(const hb_set_t * glyphs,const HBUINT16 & value,const void * data HB_UNUSED)827 static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
828 {
829 return glyphs->has (value);
830 }
intersects_class(const hb_set_t * glyphs,const HBUINT16 & value,const void * data)831 static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
832 {
833 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
834 return class_def.intersects_class (glyphs, value);
835 }
intersects_coverage(const hb_set_t * glyphs,const HBUINT16 & value,const void * data)836 static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
837 {
838 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
839 return (data+coverage).intersects (glyphs);
840 }
841
842
intersected_glyph(const hb_set_t * glyphs HB_UNUSED,const void * data,unsigned value,hb_set_t * intersected_glyphs)843 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs)
844 {
845 unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
846 intersected_glyphs->add (g);
847 }
intersected_class_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs)848 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
849 {
850 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
851 class_def.intersected_class_glyphs (glyphs, value, intersected_glyphs);
852 }
intersected_coverage_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs)853 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
854 {
855 Offset16To<Coverage> coverage;
856 coverage = value;
857 (data+coverage).intersected_coverage_glyphs (glyphs, intersected_glyphs);
858 }
859
860
array_is_subset_of(const hb_set_t * glyphs,unsigned int count,const HBUINT16 values[],intersects_func_t intersects_func,const void * intersects_data)861 static inline bool array_is_subset_of (const hb_set_t *glyphs,
862 unsigned int count,
863 const HBUINT16 values[],
864 intersects_func_t intersects_func,
865 const void *intersects_data)
866 {
867 for (const HBUINT16 &_ : + hb_iter (values, count))
868 if (!intersects_func (glyphs, _, intersects_data)) return false;
869 return true;
870 }
871
872
collect_glyph(hb_set_t * glyphs,const HBUINT16 & value,const void * data HB_UNUSED)873 static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
874 {
875 glyphs->add (value);
876 }
collect_class(hb_set_t * glyphs,const HBUINT16 & value,const void * data)877 static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
878 {
879 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
880 class_def.collect_class (glyphs, value);
881 }
collect_coverage(hb_set_t * glyphs,const HBUINT16 & value,const void * data)882 static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
883 {
884 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
885 (data+coverage).collect_coverage (glyphs);
886 }
collect_array(hb_collect_glyphs_context_t * c HB_UNUSED,hb_set_t * glyphs,unsigned int count,const HBUINT16 values[],collect_glyphs_func_t collect_func,const void * collect_data)887 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
888 hb_set_t *glyphs,
889 unsigned int count,
890 const HBUINT16 values[],
891 collect_glyphs_func_t collect_func,
892 const void *collect_data)
893 {
894 return
895 + hb_iter (values, count)
896 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
897 ;
898 }
899
900
match_glyph(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data HB_UNUSED)901 static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
902 {
903 return glyph_id == value;
904 }
match_class(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data)905 static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
906 {
907 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
908 return class_def.get_class (glyph_id) == value;
909 }
match_coverage(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data)910 static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
911 {
912 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
913 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
914 }
915
would_match_input(hb_would_apply_context_t * c,unsigned int count,const HBUINT16 input[],match_func_t match_func,const void * match_data)916 static inline bool would_match_input (hb_would_apply_context_t *c,
917 unsigned int count, /* Including the first glyph (not matched) */
918 const HBUINT16 input[], /* Array of input values--start with second glyph */
919 match_func_t match_func,
920 const void *match_data)
921 {
922 if (count != c->len)
923 return false;
924
925 for (unsigned int i = 1; i < count; i++)
926 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
927 return false;
928
929 return true;
930 }
match_input(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 input[],match_func_t match_func,const void * match_data,unsigned int * end_offset,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int * p_total_component_count=nullptr)931 static inline bool match_input (hb_ot_apply_context_t *c,
932 unsigned int count, /* Including the first glyph (not matched) */
933 const HBUINT16 input[], /* Array of input values--start with second glyph */
934 match_func_t match_func,
935 const void *match_data,
936 unsigned int *end_offset,
937 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
938 unsigned int *p_total_component_count = nullptr)
939 {
940 TRACE_APPLY (nullptr);
941
942 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
943
944 hb_buffer_t *buffer = c->buffer;
945
946 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
947 skippy_iter.reset (buffer->idx, count - 1);
948 skippy_iter.set_match_func (match_func, match_data, input);
949
950 /*
951 * This is perhaps the trickiest part of OpenType... Remarks:
952 *
953 * - If all components of the ligature were marks, we call this a mark ligature.
954 *
955 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
956 * it as a ligature glyph.
957 *
958 * - Ligatures cannot be formed across glyphs attached to different components
959 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
960 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
961 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
962 * There are a couple of exceptions to this:
963 *
964 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
965 * assuming that the font designer knows what they are doing (otherwise it can
966 * break Indic stuff when a matra wants to ligate with a conjunct,
967 *
968 * o If two marks want to ligate and they belong to different components of the
969 * same ligature glyph, and said ligature glyph is to be ignored according to
970 * mark-filtering rules, then allow.
971 * https://github.com/harfbuzz/harfbuzz/issues/545
972 */
973
974 unsigned int total_component_count = 0;
975 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
976
977 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
978 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
979
980 enum {
981 LIGBASE_NOT_CHECKED,
982 LIGBASE_MAY_NOT_SKIP,
983 LIGBASE_MAY_SKIP
984 } ligbase = LIGBASE_NOT_CHECKED;
985
986 match_positions[0] = buffer->idx;
987 for (unsigned int i = 1; i < count; i++)
988 {
989 if (!skippy_iter.next ()) return_trace (false);
990
991 match_positions[i] = skippy_iter.idx;
992
993 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
994 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
995
996 if (first_lig_id && first_lig_comp)
997 {
998 /* If first component was attached to a previous ligature component,
999 * all subsequent components should be attached to the same ligature
1000 * component, otherwise we shouldn't ligate them... */
1001 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1002 {
1003 /* ...unless, we are attached to a base ligature and that base
1004 * ligature is ignorable. */
1005 if (ligbase == LIGBASE_NOT_CHECKED)
1006 {
1007 bool found = false;
1008 const auto *out = buffer->out_info;
1009 unsigned int j = buffer->out_len;
1010 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1011 {
1012 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1013 {
1014 j--;
1015 found = true;
1016 break;
1017 }
1018 j--;
1019 }
1020
1021 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1022 ligbase = LIGBASE_MAY_SKIP;
1023 else
1024 ligbase = LIGBASE_MAY_NOT_SKIP;
1025 }
1026
1027 if (ligbase == LIGBASE_MAY_NOT_SKIP)
1028 return_trace (false);
1029 }
1030 }
1031 else
1032 {
1033 /* If first component was NOT attached to a previous ligature component,
1034 * all subsequent components should also NOT be attached to any ligature
1035 * component, unless they are attached to the first component itself! */
1036 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1037 return_trace (false);
1038 }
1039
1040 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1041 }
1042
1043 *end_offset = skippy_iter.idx - buffer->idx + 1;
1044
1045 if (p_total_component_count)
1046 *p_total_component_count = total_component_count;
1047
1048 return_trace (true);
1049 }
ligate_input(hb_ot_apply_context_t * c,unsigned int count,const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int match_length,hb_codepoint_t lig_glyph,unsigned int total_component_count)1050 static inline bool ligate_input (hb_ot_apply_context_t *c,
1051 unsigned int count, /* Including the first glyph */
1052 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1053 unsigned int match_length,
1054 hb_codepoint_t lig_glyph,
1055 unsigned int total_component_count)
1056 {
1057 TRACE_APPLY (nullptr);
1058
1059 hb_buffer_t *buffer = c->buffer;
1060
1061 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
1062
1063 /* - If a base and one or more marks ligate, consider that as a base, NOT
1064 * ligature, such that all following marks can still attach to it.
1065 * https://github.com/harfbuzz/harfbuzz/issues/1109
1066 *
1067 * - If all components of the ligature were marks, we call this a mark ligature.
1068 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1069 * the ligature to keep its old ligature id. This will allow it to attach to
1070 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1071 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1072 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1073 * later, we don't want them to lose their ligature id/component, otherwise
1074 * GPOS will fail to correctly position the mark ligature on top of the
1075 * LAM,LAM,HEH ligature. See:
1076 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1077 *
1078 * - If a ligature is formed of components that some of which are also ligatures
1079 * themselves, and those ligature components had marks attached to *their*
1080 * components, we have to attach the marks to the new ligature component
1081 * positions! Now *that*'s tricky! And these marks may be following the
1082 * last component of the whole sequence, so we should loop forward looking
1083 * for them and update them.
1084 *
1085 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1086 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1087 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1088 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1089 * the new ligature with a component value of 2.
1090 *
1091 * This in fact happened to a font... See:
1092 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1093 */
1094
1095 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1096 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1097 for (unsigned int i = 1; i < count; i++)
1098 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1099 {
1100 is_base_ligature = false;
1101 is_mark_ligature = false;
1102 break;
1103 }
1104 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1105
1106 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1107 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1108 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1109 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1110 unsigned int components_so_far = last_num_components;
1111
1112 if (is_ligature)
1113 {
1114 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1115 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1116 {
1117 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1118 }
1119 }
1120 c->replace_glyph_with_ligature (lig_glyph, klass);
1121
1122 for (unsigned int i = 1; i < count; i++)
1123 {
1124 while (buffer->idx < match_positions[i] && buffer->successful)
1125 {
1126 if (is_ligature)
1127 {
1128 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1129 if (this_comp == 0)
1130 this_comp = last_num_components;
1131 unsigned int new_lig_comp = components_so_far - last_num_components +
1132 hb_min (this_comp, last_num_components);
1133 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1134 }
1135 (void) buffer->next_glyph ();
1136 }
1137
1138 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1139 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1140 components_so_far += last_num_components;
1141
1142 /* Skip the base glyph */
1143 buffer->idx++;
1144 }
1145
1146 if (!is_mark_ligature && last_lig_id)
1147 {
1148 /* Re-adjust components for any marks following. */
1149 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1150 {
1151 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1152
1153 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1154 if (!this_comp) break;
1155
1156 unsigned new_lig_comp = components_so_far - last_num_components +
1157 hb_min (this_comp, last_num_components);
1158 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1159 }
1160 }
1161 return_trace (true);
1162 }
1163
match_backtrack(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 backtrack[],match_func_t match_func,const void * match_data,unsigned int * match_start)1164 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1165 unsigned int count,
1166 const HBUINT16 backtrack[],
1167 match_func_t match_func,
1168 const void *match_data,
1169 unsigned int *match_start)
1170 {
1171 TRACE_APPLY (nullptr);
1172
1173 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1174 skippy_iter.reset (c->buffer->backtrack_len (), count);
1175 skippy_iter.set_match_func (match_func, match_data, backtrack);
1176
1177 for (unsigned int i = 0; i < count; i++)
1178 if (!skippy_iter.prev ())
1179 return_trace (false);
1180
1181 *match_start = skippy_iter.idx;
1182
1183 return_trace (true);
1184 }
1185
match_lookahead(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 lookahead[],match_func_t match_func,const void * match_data,unsigned int offset,unsigned int * end_index)1186 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1187 unsigned int count,
1188 const HBUINT16 lookahead[],
1189 match_func_t match_func,
1190 const void *match_data,
1191 unsigned int offset,
1192 unsigned int *end_index)
1193 {
1194 TRACE_APPLY (nullptr);
1195
1196 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1197 skippy_iter.reset (c->buffer->idx + offset - 1, count);
1198 skippy_iter.set_match_func (match_func, match_data, lookahead);
1199
1200 for (unsigned int i = 0; i < count; i++)
1201 if (!skippy_iter.next ())
1202 return_trace (false);
1203
1204 *end_index = skippy_iter.idx + 1;
1205
1206 return_trace (true);
1207 }
1208
1209
1210
1211 struct LookupRecord
1212 {
copyOT::LookupRecord1213 LookupRecord* copy (hb_serialize_context_t *c,
1214 const hb_map_t *lookup_map) const
1215 {
1216 TRACE_SERIALIZE (this);
1217 auto *out = c->embed (*this);
1218 if (unlikely (!out)) return_trace (nullptr);
1219
1220 out->lookupListIndex = hb_map_get (lookup_map, lookupListIndex);
1221 return_trace (out);
1222 }
1223
sanitizeOT::LookupRecord1224 bool sanitize (hb_sanitize_context_t *c) const
1225 {
1226 TRACE_SANITIZE (this);
1227 return_trace (c->check_struct (this));
1228 }
1229
1230 HBUINT16 sequenceIndex; /* Index into current glyph
1231 * sequence--first glyph = 0 */
1232 HBUINT16 lookupListIndex; /* Lookup to apply to that
1233 * position--zero--based */
1234 public:
1235 DEFINE_SIZE_STATIC (4);
1236 };
1237
1238 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1239
context_closure_recurse_lookups(hb_closure_context_t * c,unsigned inputCount,const HBUINT16 input[],unsigned lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextFormat context_format,const void * data,intersected_glyphs_func_t intersected_glyphs_func)1240 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1241 unsigned inputCount, const HBUINT16 input[],
1242 unsigned lookupCount,
1243 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1244 unsigned value,
1245 ContextFormat context_format,
1246 const void *data,
1247 intersected_glyphs_func_t intersected_glyphs_func)
1248 {
1249 hb_set_t *covered_seq_indicies = hb_set_create ();
1250 for (unsigned int i = 0; i < lookupCount; i++)
1251 {
1252 unsigned seqIndex = lookupRecord[i].sequenceIndex;
1253 if (seqIndex >= inputCount) continue;
1254
1255 hb_set_t *pos_glyphs = nullptr;
1256
1257 if (hb_set_is_empty (covered_seq_indicies) || !hb_set_has (covered_seq_indicies, seqIndex))
1258 {
1259 pos_glyphs = hb_set_create ();
1260 if (seqIndex == 0)
1261 {
1262 switch (context_format) {
1263 case ContextFormat::SimpleContext:
1264 pos_glyphs->add (value);
1265 break;
1266 case ContextFormat::ClassBasedContext:
1267 intersected_glyphs_func (c->cur_intersected_glyphs, data, value, pos_glyphs);
1268 break;
1269 case ContextFormat::CoverageBasedContext:
1270 hb_set_set (pos_glyphs, c->cur_intersected_glyphs);
1271 break;
1272 }
1273 }
1274 else
1275 {
1276 const void *input_data = input;
1277 unsigned input_value = seqIndex - 1;
1278 if (context_format != ContextFormat::SimpleContext)
1279 {
1280 input_data = data;
1281 input_value = input[seqIndex - 1];
1282 }
1283
1284 intersected_glyphs_func (c->glyphs, input_data, input_value, pos_glyphs);
1285 }
1286 }
1287
1288 hb_set_add (covered_seq_indicies, seqIndex);
1289 if (pos_glyphs)
1290 c->push_cur_active_glyphs (pos_glyphs);
1291
1292 unsigned endIndex = inputCount;
1293 if (context_format == ContextFormat::CoverageBasedContext)
1294 endIndex += 1;
1295
1296 c->recurse (lookupRecord[i].lookupListIndex, covered_seq_indicies, seqIndex, endIndex);
1297
1298 if (pos_glyphs) {
1299 c->pop_cur_done_glyphs ();
1300 hb_set_destroy (pos_glyphs);
1301 }
1302 }
1303
1304 hb_set_destroy (covered_seq_indicies);
1305 }
1306
1307 template <typename context_t>
recurse_lookups(context_t * c,unsigned int lookupCount,const LookupRecord lookupRecord[])1308 static inline void recurse_lookups (context_t *c,
1309 unsigned int lookupCount,
1310 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1311 {
1312 for (unsigned int i = 0; i < lookupCount; i++)
1313 c->recurse (lookupRecord[i].lookupListIndex);
1314 }
1315
apply_lookup(hb_ot_apply_context_t * c,unsigned int count,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned int match_length)1316 static inline bool apply_lookup (hb_ot_apply_context_t *c,
1317 unsigned int count, /* Including the first glyph */
1318 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1319 unsigned int lookupCount,
1320 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1321 unsigned int match_length)
1322 {
1323 TRACE_APPLY (nullptr);
1324
1325 hb_buffer_t *buffer = c->buffer;
1326 int end;
1327
1328 /* All positions are distance from beginning of *output* buffer.
1329 * Adjust. */
1330 {
1331 unsigned int bl = buffer->backtrack_len ();
1332 end = bl + match_length;
1333
1334 int delta = bl - buffer->idx;
1335 /* Convert positions to new indexing. */
1336 for (unsigned int j = 0; j < count; j++)
1337 match_positions[j] += delta;
1338 }
1339
1340 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1341 {
1342 unsigned int idx = lookupRecord[i].sequenceIndex;
1343 if (idx >= count)
1344 continue;
1345
1346 /* Don't recurse to ourself at same position.
1347 * Note that this test is too naive, it doesn't catch longer loops. */
1348 if (unlikely (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index))
1349 continue;
1350
1351 if (unlikely (!buffer->move_to (match_positions[idx])))
1352 break;
1353
1354 if (unlikely (buffer->max_ops <= 0))
1355 break;
1356
1357 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1358 if (!c->recurse (lookupRecord[i].lookupListIndex))
1359 continue;
1360
1361 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1362 int delta = new_len - orig_len;
1363
1364 if (!delta)
1365 continue;
1366
1367 /* Recursed lookup changed buffer len. Adjust.
1368 *
1369 * TODO:
1370 *
1371 * Right now, if buffer length increased by n, we assume n new glyphs
1372 * were added right after the current position, and if buffer length
1373 * was decreased by n, we assume n match positions after the current
1374 * one where removed. The former (buffer length increased) case is
1375 * fine, but the decrease case can be improved in at least two ways,
1376 * both of which are significant:
1377 *
1378 * - If recursed-to lookup is MultipleSubst and buffer length
1379 * decreased, then it's current match position that was deleted,
1380 * NOT the one after it.
1381 *
1382 * - If buffer length was decreased by n, it does not necessarily
1383 * mean that n match positions where removed, as there might
1384 * have been marks and default-ignorables in the sequence. We
1385 * should instead drop match positions between current-position
1386 * and current-position + n instead. Though, am not sure which
1387 * one is better. Both cases have valid uses. Sigh.
1388 *
1389 * It should be possible to construct tests for both of these cases.
1390 */
1391
1392 end += delta;
1393 if (end <= int (match_positions[idx]))
1394 {
1395 /* End might end up being smaller than match_positions[idx] if the recursed
1396 * lookup ended up removing many items, more than we have had matched.
1397 * Just never rewind end back and get out of here.
1398 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1399 end = match_positions[idx];
1400 /* There can't be any further changes. */
1401 break;
1402 }
1403
1404 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1405
1406 if (delta > 0)
1407 {
1408 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1409 break;
1410 }
1411 else
1412 {
1413 /* NOTE: delta is negative. */
1414 delta = hb_max (delta, (int) next - (int) count);
1415 next -= delta;
1416 }
1417
1418 /* Shift! */
1419 memmove (match_positions + next + delta, match_positions + next,
1420 (count - next) * sizeof (match_positions[0]));
1421 next += delta;
1422 count += delta;
1423
1424 /* Fill in new entries. */
1425 for (unsigned int j = idx + 1; j < next; j++)
1426 match_positions[j] = match_positions[j - 1] + 1;
1427
1428 /* And fixup the rest. */
1429 for (; next < count; next++)
1430 match_positions[next] += delta;
1431 }
1432
1433 (void) buffer->move_to (end);
1434
1435 return_trace (true);
1436 }
1437
1438
1439
1440 /* Contextual lookups */
1441
1442 struct ContextClosureLookupContext
1443 {
1444 ContextClosureFuncs funcs;
1445 ContextFormat context_format;
1446 const void *intersects_data;
1447 };
1448
1449 struct ContextCollectGlyphsLookupContext
1450 {
1451 ContextCollectGlyphsFuncs funcs;
1452 const void *collect_data;
1453 };
1454
1455 struct ContextApplyLookupContext
1456 {
1457 ContextApplyFuncs funcs;
1458 const void *match_data;
1459 };
1460
context_intersects(const hb_set_t * glyphs,unsigned int inputCount,const HBUINT16 input[],ContextClosureLookupContext & lookup_context)1461 static inline bool context_intersects (const hb_set_t *glyphs,
1462 unsigned int inputCount, /* Including the first glyph (not matched) */
1463 const HBUINT16 input[], /* Array of input values--start with second glyph */
1464 ContextClosureLookupContext &lookup_context)
1465 {
1466 return array_is_subset_of (glyphs,
1467 inputCount ? inputCount - 1 : 0, input,
1468 lookup_context.funcs.intersects, lookup_context.intersects_data);
1469 }
1470
context_closure_lookup(hb_closure_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextClosureLookupContext & lookup_context)1471 static inline void context_closure_lookup (hb_closure_context_t *c,
1472 unsigned int inputCount, /* Including the first glyph (not matched) */
1473 const HBUINT16 input[], /* Array of input values--start with second glyph */
1474 unsigned int lookupCount,
1475 const LookupRecord lookupRecord[],
1476 unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1477 ContextClosureLookupContext &lookup_context)
1478 {
1479 if (context_intersects (c->glyphs,
1480 inputCount, input,
1481 lookup_context))
1482 context_closure_recurse_lookups (c,
1483 inputCount, input,
1484 lookupCount, lookupRecord,
1485 value,
1486 lookup_context.context_format,
1487 lookup_context.intersects_data,
1488 lookup_context.funcs.intersected_glyphs);
1489 }
1490
context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextCollectGlyphsLookupContext & lookup_context)1491 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1492 unsigned int inputCount, /* Including the first glyph (not matched) */
1493 const HBUINT16 input[], /* Array of input values--start with second glyph */
1494 unsigned int lookupCount,
1495 const LookupRecord lookupRecord[],
1496 ContextCollectGlyphsLookupContext &lookup_context)
1497 {
1498 collect_array (c, c->input,
1499 inputCount ? inputCount - 1 : 0, input,
1500 lookup_context.funcs.collect, lookup_context.collect_data);
1501 recurse_lookups (c,
1502 lookupCount, lookupRecord);
1503 }
1504
context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,ContextApplyLookupContext & lookup_context)1505 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1506 unsigned int inputCount, /* Including the first glyph (not matched) */
1507 const HBUINT16 input[], /* Array of input values--start with second glyph */
1508 unsigned int lookupCount HB_UNUSED,
1509 const LookupRecord lookupRecord[] HB_UNUSED,
1510 ContextApplyLookupContext &lookup_context)
1511 {
1512 return would_match_input (c,
1513 inputCount, input,
1514 lookup_context.funcs.match, lookup_context.match_data);
1515 }
context_apply_lookup(hb_ot_apply_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextApplyLookupContext & lookup_context)1516 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1517 unsigned int inputCount, /* Including the first glyph (not matched) */
1518 const HBUINT16 input[], /* Array of input values--start with second glyph */
1519 unsigned int lookupCount,
1520 const LookupRecord lookupRecord[],
1521 ContextApplyLookupContext &lookup_context)
1522 {
1523 unsigned int match_length = 0;
1524 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1525 return match_input (c,
1526 inputCount, input,
1527 lookup_context.funcs.match, lookup_context.match_data,
1528 &match_length, match_positions)
1529 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length),
1530 apply_lookup (c,
1531 inputCount, match_positions,
1532 lookupCount, lookupRecord,
1533 match_length));
1534 }
1535
1536 struct Rule
1537 {
intersectsOT::Rule1538 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1539 {
1540 return context_intersects (glyphs,
1541 inputCount, inputZ.arrayZ,
1542 lookup_context);
1543 }
1544
closureOT::Rule1545 void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
1546 {
1547 if (unlikely (c->lookup_limit_exceeded ())) return;
1548
1549 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1550 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1551 context_closure_lookup (c,
1552 inputCount, inputZ.arrayZ,
1553 lookupCount, lookupRecord.arrayZ,
1554 value, lookup_context);
1555 }
1556
closure_lookupsOT::Rule1557 void closure_lookups (hb_closure_lookups_context_t *c,
1558 ContextClosureLookupContext &lookup_context) const
1559 {
1560 if (unlikely (c->lookup_limit_exceeded ())) return;
1561 if (!intersects (c->glyphs, lookup_context)) return;
1562
1563 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1564 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1565 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1566 }
1567
collect_glyphsOT::Rule1568 void collect_glyphs (hb_collect_glyphs_context_t *c,
1569 ContextCollectGlyphsLookupContext &lookup_context) const
1570 {
1571 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1572 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1573 context_collect_glyphs_lookup (c,
1574 inputCount, inputZ.arrayZ,
1575 lookupCount, lookupRecord.arrayZ,
1576 lookup_context);
1577 }
1578
would_applyOT::Rule1579 bool would_apply (hb_would_apply_context_t *c,
1580 ContextApplyLookupContext &lookup_context) const
1581 {
1582 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1583 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1584 return context_would_apply_lookup (c,
1585 inputCount, inputZ.arrayZ,
1586 lookupCount, lookupRecord.arrayZ,
1587 lookup_context);
1588 }
1589
applyOT::Rule1590 bool apply (hb_ot_apply_context_t *c,
1591 ContextApplyLookupContext &lookup_context) const
1592 {
1593 TRACE_APPLY (this);
1594 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1595 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1596 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1597 }
1598
serializeOT::Rule1599 bool serialize (hb_serialize_context_t *c,
1600 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1601 const hb_map_t *lookup_map) const
1602 {
1603 TRACE_SERIALIZE (this);
1604 auto *out = c->start_embed (this);
1605 if (unlikely (!c->extend_min (out))) return_trace (false);
1606
1607 out->inputCount = inputCount;
1608 out->lookupCount = lookupCount;
1609
1610 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1611 for (const auto org : input)
1612 {
1613 HBUINT16 d;
1614 d = input_mapping->get (org);
1615 c->copy (d);
1616 }
1617
1618 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1619 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1620 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
1621 c->copy (lookupRecord[i], lookup_map);
1622
1623 return_trace (true);
1624 }
1625
subsetOT::Rule1626 bool subset (hb_subset_context_t *c,
1627 const hb_map_t *lookup_map,
1628 const hb_map_t *klass_map = nullptr) const
1629 {
1630 TRACE_SUBSET (this);
1631
1632 const hb_array_t<const HBUINT16> input = inputZ.as_array ((inputCount ? inputCount - 1 : 0));
1633 if (!input.length) return_trace (false);
1634
1635 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1636 if (!hb_all (input, mapping)) return_trace (false);
1637 return_trace (serialize (c->serializer, mapping, lookup_map));
1638 }
1639
1640 public:
sanitizeOT::Rule1641 bool sanitize (hb_sanitize_context_t *c) const
1642 {
1643 TRACE_SANITIZE (this);
1644 return_trace (inputCount.sanitize (c) &&
1645 lookupCount.sanitize (c) &&
1646 c->check_range (inputZ.arrayZ,
1647 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1648 LookupRecord::static_size * lookupCount));
1649 }
1650
1651 protected:
1652 HBUINT16 inputCount; /* Total number of glyphs in input
1653 * glyph sequence--includes the first
1654 * glyph */
1655 HBUINT16 lookupCount; /* Number of LookupRecords */
1656 UnsizedArrayOf<HBUINT16>
1657 inputZ; /* Array of match inputs--start with
1658 * second glyph */
1659 /*UnsizedArrayOf<LookupRecord>
1660 lookupRecordX;*/ /* Array of LookupRecords--in
1661 * design order */
1662 public:
1663 DEFINE_SIZE_ARRAY (4, inputZ);
1664 };
1665
1666 struct RuleSet
1667 {
intersectsOT::RuleSet1668 bool intersects (const hb_set_t *glyphs,
1669 ContextClosureLookupContext &lookup_context) const
1670 {
1671 return
1672 + hb_iter (rule)
1673 | hb_map (hb_add (this))
1674 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1675 | hb_any
1676 ;
1677 }
1678
closureOT::RuleSet1679 void closure (hb_closure_context_t *c, unsigned value,
1680 ContextClosureLookupContext &lookup_context) const
1681 {
1682 if (unlikely (c->lookup_limit_exceeded ())) return;
1683
1684 return
1685 + hb_iter (rule)
1686 | hb_map (hb_add (this))
1687 | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
1688 ;
1689 }
1690
closure_lookupsOT::RuleSet1691 void closure_lookups (hb_closure_lookups_context_t *c,
1692 ContextClosureLookupContext &lookup_context) const
1693 {
1694 if (unlikely (c->lookup_limit_exceeded ())) return;
1695 + hb_iter (rule)
1696 | hb_map (hb_add (this))
1697 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
1698 ;
1699 }
1700
collect_glyphsOT::RuleSet1701 void collect_glyphs (hb_collect_glyphs_context_t *c,
1702 ContextCollectGlyphsLookupContext &lookup_context) const
1703 {
1704 return
1705 + hb_iter (rule)
1706 | hb_map (hb_add (this))
1707 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1708 ;
1709 }
1710
would_applyOT::RuleSet1711 bool would_apply (hb_would_apply_context_t *c,
1712 ContextApplyLookupContext &lookup_context) const
1713 {
1714 return
1715 + hb_iter (rule)
1716 | hb_map (hb_add (this))
1717 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1718 | hb_any
1719 ;
1720 }
1721
applyOT::RuleSet1722 bool apply (hb_ot_apply_context_t *c,
1723 ContextApplyLookupContext &lookup_context) const
1724 {
1725 TRACE_APPLY (this);
1726 return_trace (
1727 + hb_iter (rule)
1728 | hb_map (hb_add (this))
1729 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1730 | hb_any
1731 )
1732 ;
1733 }
1734
subsetOT::RuleSet1735 bool subset (hb_subset_context_t *c,
1736 const hb_map_t *lookup_map,
1737 const hb_map_t *klass_map = nullptr) const
1738 {
1739 TRACE_SUBSET (this);
1740
1741 auto snap = c->serializer->snapshot ();
1742 auto *out = c->serializer->start_embed (*this);
1743 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1744
1745 for (const Offset16To<Rule>& _ : rule)
1746 {
1747 if (!_) continue;
1748 auto *o = out->rule.serialize_append (c->serializer);
1749 if (unlikely (!o)) continue;
1750
1751 auto o_snap = c->serializer->snapshot ();
1752 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1753 {
1754 out->rule.pop ();
1755 c->serializer->revert (o_snap);
1756 }
1757 }
1758
1759 bool ret = bool (out->rule);
1760 if (!ret) c->serializer->revert (snap);
1761
1762 return_trace (ret);
1763 }
1764
sanitizeOT::RuleSet1765 bool sanitize (hb_sanitize_context_t *c) const
1766 {
1767 TRACE_SANITIZE (this);
1768 return_trace (rule.sanitize (c, this));
1769 }
1770
1771 protected:
1772 Array16OfOffset16To<Rule>
1773 rule; /* Array of Rule tables
1774 * ordered by preference */
1775 public:
1776 DEFINE_SIZE_ARRAY (2, rule);
1777 };
1778
1779
1780 struct ContextFormat1
1781 {
intersectsOT::ContextFormat11782 bool intersects (const hb_set_t *glyphs) const
1783 {
1784 struct ContextClosureLookupContext lookup_context = {
1785 {intersects_glyph, intersected_glyph},
1786 ContextFormat::SimpleContext,
1787 nullptr
1788 };
1789
1790 return
1791 + hb_zip (this+coverage, ruleSet)
1792 | hb_filter (*glyphs, hb_first)
1793 | hb_map (hb_second)
1794 | hb_map (hb_add (this))
1795 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1796 | hb_any
1797 ;
1798 }
1799
may_have_non_1to1OT::ContextFormat11800 bool may_have_non_1to1 () const
1801 { return true; }
1802
closureOT::ContextFormat11803 void closure (hb_closure_context_t *c) const
1804 {
1805 c->cur_intersected_glyphs->clear ();
1806 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
1807
1808 struct ContextClosureLookupContext lookup_context = {
1809 {intersects_glyph, intersected_glyph},
1810 ContextFormat::SimpleContext,
1811 nullptr
1812 };
1813
1814 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
1815 | hb_filter (c->parent_active_glyphs (), hb_first)
1816 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
1817 | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
1818 ;
1819 }
1820
closure_lookupsOT::ContextFormat11821 void closure_lookups (hb_closure_lookups_context_t *c) const
1822 {
1823 struct ContextClosureLookupContext lookup_context = {
1824 {intersects_glyph, intersected_glyph},
1825 ContextFormat::SimpleContext,
1826 nullptr
1827 };
1828
1829 + hb_zip (this+coverage, ruleSet)
1830 | hb_filter (*c->glyphs, hb_first)
1831 | hb_map (hb_second)
1832 | hb_map (hb_add (this))
1833 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
1834 ;
1835 }
1836
collect_variation_indicesOT::ContextFormat11837 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1838
collect_glyphsOT::ContextFormat11839 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1840 {
1841 (this+coverage).collect_coverage (c->input);
1842
1843 struct ContextCollectGlyphsLookupContext lookup_context = {
1844 {collect_glyph},
1845 nullptr
1846 };
1847
1848 + hb_iter (ruleSet)
1849 | hb_map (hb_add (this))
1850 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1851 ;
1852 }
1853
would_applyOT::ContextFormat11854 bool would_apply (hb_would_apply_context_t *c) const
1855 {
1856 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1857 struct ContextApplyLookupContext lookup_context = {
1858 {match_glyph},
1859 nullptr
1860 };
1861 return rule_set.would_apply (c, lookup_context);
1862 }
1863
get_coverageOT::ContextFormat11864 const Coverage &get_coverage () const { return this+coverage; }
1865
applyOT::ContextFormat11866 bool apply (hb_ot_apply_context_t *c) const
1867 {
1868 TRACE_APPLY (this);
1869 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1870 if (likely (index == NOT_COVERED))
1871 return_trace (false);
1872
1873 const RuleSet &rule_set = this+ruleSet[index];
1874 struct ContextApplyLookupContext lookup_context = {
1875 {match_glyph},
1876 nullptr
1877 };
1878 return_trace (rule_set.apply (c, lookup_context));
1879 }
1880
subsetOT::ContextFormat11881 bool subset (hb_subset_context_t *c) const
1882 {
1883 TRACE_SUBSET (this);
1884 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1885 const hb_map_t &glyph_map = *c->plan->glyph_map;
1886
1887 auto *out = c->serializer->start_embed (*this);
1888 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1889 out->format = format;
1890
1891 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1892 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1893 + hb_zip (this+coverage, ruleSet)
1894 | hb_filter (glyphset, hb_first)
1895 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1896 | hb_map (hb_first)
1897 | hb_map (glyph_map)
1898 | hb_sink (new_coverage)
1899 ;
1900
1901 out->coverage.serialize (c->serializer, out)
1902 .serialize (c->serializer, new_coverage.iter ());
1903 return_trace (bool (new_coverage));
1904 }
1905
sanitizeOT::ContextFormat11906 bool sanitize (hb_sanitize_context_t *c) const
1907 {
1908 TRACE_SANITIZE (this);
1909 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1910 }
1911
1912 protected:
1913 HBUINT16 format; /* Format identifier--format = 1 */
1914 Offset16To<Coverage>
1915 coverage; /* Offset to Coverage table--from
1916 * beginning of table */
1917 Array16OfOffset16To<RuleSet>
1918 ruleSet; /* Array of RuleSet tables
1919 * ordered by Coverage Index */
1920 public:
1921 DEFINE_SIZE_ARRAY (6, ruleSet);
1922 };
1923
1924
1925 struct ContextFormat2
1926 {
intersectsOT::ContextFormat21927 bool intersects (const hb_set_t *glyphs) const
1928 {
1929 if (!(this+coverage).intersects (glyphs))
1930 return false;
1931
1932 const ClassDef &class_def = this+classDef;
1933
1934 struct ContextClosureLookupContext lookup_context = {
1935 {intersects_class, intersected_class_glyphs},
1936 ContextFormat::ClassBasedContext,
1937 &class_def
1938 };
1939
1940 return
1941 + hb_iter (ruleSet)
1942 | hb_map (hb_add (this))
1943 | hb_enumerate
1944 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
1945 { return class_def.intersects_class (glyphs, p.first) &&
1946 p.second.intersects (glyphs, lookup_context); })
1947 | hb_any
1948 ;
1949 }
1950
may_have_non_1to1OT::ContextFormat21951 bool may_have_non_1to1 () const
1952 { return true; }
1953
closureOT::ContextFormat21954 void closure (hb_closure_context_t *c) const
1955 {
1956 if (!(this+coverage).intersects (c->glyphs))
1957 return;
1958
1959 c->cur_intersected_glyphs->clear ();
1960 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
1961
1962 const ClassDef &class_def = this+classDef;
1963
1964 struct ContextClosureLookupContext lookup_context = {
1965 {intersects_class, intersected_class_glyphs},
1966 ContextFormat::ClassBasedContext,
1967 &class_def
1968 };
1969
1970 return
1971 + hb_enumerate (ruleSet)
1972 | hb_filter ([&] (unsigned _)
1973 { return class_def.intersects_class (c->cur_intersected_glyphs, _); },
1974 hb_first)
1975 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<RuleSet>&> _)
1976 {
1977 const RuleSet& rule_set = this+_.second;
1978 rule_set.closure (c, _.first, lookup_context);
1979 })
1980 ;
1981 }
1982
closure_lookupsOT::ContextFormat21983 void closure_lookups (hb_closure_lookups_context_t *c) const
1984 {
1985 if (!(this+coverage).intersects (c->glyphs))
1986 return;
1987
1988 const ClassDef &class_def = this+classDef;
1989
1990 struct ContextClosureLookupContext lookup_context = {
1991 {intersects_class, intersected_class_glyphs},
1992 ContextFormat::ClassBasedContext,
1993 &class_def
1994 };
1995
1996 + hb_iter (ruleSet)
1997 | hb_map (hb_add (this))
1998 | hb_enumerate
1999 | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2000 { return class_def.intersects_class (c->glyphs, p.first); })
2001 | hb_map (hb_second)
2002 | hb_apply ([&] (const RuleSet & _)
2003 { _.closure_lookups (c, lookup_context); });
2004 }
2005
collect_variation_indicesOT::ContextFormat22006 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2007
collect_glyphsOT::ContextFormat22008 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2009 {
2010 (this+coverage).collect_coverage (c->input);
2011
2012 const ClassDef &class_def = this+classDef;
2013 struct ContextCollectGlyphsLookupContext lookup_context = {
2014 {collect_class},
2015 &class_def
2016 };
2017
2018 + hb_iter (ruleSet)
2019 | hb_map (hb_add (this))
2020 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2021 ;
2022 }
2023
would_applyOT::ContextFormat22024 bool would_apply (hb_would_apply_context_t *c) const
2025 {
2026 const ClassDef &class_def = this+classDef;
2027 unsigned int index = class_def.get_class (c->glyphs[0]);
2028 const RuleSet &rule_set = this+ruleSet[index];
2029 struct ContextApplyLookupContext lookup_context = {
2030 {match_class},
2031 &class_def
2032 };
2033 return rule_set.would_apply (c, lookup_context);
2034 }
2035
get_coverageOT::ContextFormat22036 const Coverage &get_coverage () const { return this+coverage; }
2037
applyOT::ContextFormat22038 bool apply (hb_ot_apply_context_t *c) const
2039 {
2040 TRACE_APPLY (this);
2041 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2042 if (likely (index == NOT_COVERED)) return_trace (false);
2043
2044 const ClassDef &class_def = this+classDef;
2045 index = class_def.get_class (c->buffer->cur().codepoint);
2046 const RuleSet &rule_set = this+ruleSet[index];
2047 struct ContextApplyLookupContext lookup_context = {
2048 {match_class},
2049 &class_def
2050 };
2051 return_trace (rule_set.apply (c, lookup_context));
2052 }
2053
subsetOT::ContextFormat22054 bool subset (hb_subset_context_t *c) const
2055 {
2056 TRACE_SUBSET (this);
2057 auto *out = c->serializer->start_embed (*this);
2058 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2059 out->format = format;
2060 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2061 return_trace (false);
2062
2063 hb_map_t klass_map;
2064 out->classDef.serialize_subset (c, classDef, this, &klass_map);
2065
2066 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2067 bool ret = true;
2068 int non_zero_index = 0, index = 0;
2069 for (const auto& _ : + hb_enumerate (ruleSet)
2070 | hb_filter (klass_map, hb_first))
2071 {
2072 auto *o = out->ruleSet.serialize_append (c->serializer);
2073 if (unlikely (!o))
2074 {
2075 ret = false;
2076 break;
2077 }
2078
2079 if (o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
2080 non_zero_index = index;
2081
2082 index++;
2083 }
2084
2085 if (!ret) return_trace (ret);
2086
2087 //prune empty trailing ruleSets
2088 --index;
2089 while (index > non_zero_index)
2090 {
2091 out->ruleSet.pop ();
2092 index--;
2093 }
2094
2095 return_trace (bool (out->ruleSet));
2096 }
2097
sanitizeOT::ContextFormat22098 bool sanitize (hb_sanitize_context_t *c) const
2099 {
2100 TRACE_SANITIZE (this);
2101 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2102 }
2103
2104 protected:
2105 HBUINT16 format; /* Format identifier--format = 2 */
2106 Offset16To<Coverage>
2107 coverage; /* Offset to Coverage table--from
2108 * beginning of table */
2109 Offset16To<ClassDef>
2110 classDef; /* Offset to glyph ClassDef table--from
2111 * beginning of table */
2112 Array16OfOffset16To<RuleSet>
2113 ruleSet; /* Array of RuleSet tables
2114 * ordered by class */
2115 public:
2116 DEFINE_SIZE_ARRAY (8, ruleSet);
2117 };
2118
2119
2120 struct ContextFormat3
2121 {
intersectsOT::ContextFormat32122 bool intersects (const hb_set_t *glyphs) const
2123 {
2124 if (!(this+coverageZ[0]).intersects (glyphs))
2125 return false;
2126
2127 struct ContextClosureLookupContext lookup_context = {
2128 {intersects_coverage, intersected_coverage_glyphs},
2129 ContextFormat::CoverageBasedContext,
2130 this
2131 };
2132 return context_intersects (glyphs,
2133 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2134 lookup_context);
2135 }
2136
may_have_non_1to1OT::ContextFormat32137 bool may_have_non_1to1 () const
2138 { return true; }
2139
closureOT::ContextFormat32140 void closure (hb_closure_context_t *c) const
2141 {
2142 if (!(this+coverageZ[0]).intersects (c->glyphs))
2143 return;
2144
2145 c->cur_intersected_glyphs->clear ();
2146 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2147
2148 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2149 struct ContextClosureLookupContext lookup_context = {
2150 {intersects_coverage, intersected_coverage_glyphs},
2151 ContextFormat::CoverageBasedContext,
2152 this
2153 };
2154 context_closure_lookup (c,
2155 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2156 lookupCount, lookupRecord,
2157 0, lookup_context);
2158 }
2159
closure_lookupsOT::ContextFormat32160 void closure_lookups (hb_closure_lookups_context_t *c) const
2161 {
2162 if (!intersects (c->glyphs))
2163 return;
2164 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2165 recurse_lookups (c, lookupCount, lookupRecord);
2166 }
2167
collect_variation_indicesOT::ContextFormat32168 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2169
collect_glyphsOT::ContextFormat32170 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2171 {
2172 (this+coverageZ[0]).collect_coverage (c->input);
2173
2174 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2175 struct ContextCollectGlyphsLookupContext lookup_context = {
2176 {collect_coverage},
2177 this
2178 };
2179
2180 context_collect_glyphs_lookup (c,
2181 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2182 lookupCount, lookupRecord,
2183 lookup_context);
2184 }
2185
would_applyOT::ContextFormat32186 bool would_apply (hb_would_apply_context_t *c) const
2187 {
2188 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2189 struct ContextApplyLookupContext lookup_context = {
2190 {match_coverage},
2191 this
2192 };
2193 return context_would_apply_lookup (c,
2194 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2195 lookupCount, lookupRecord,
2196 lookup_context);
2197 }
2198
get_coverageOT::ContextFormat32199 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2200
applyOT::ContextFormat32201 bool apply (hb_ot_apply_context_t *c) const
2202 {
2203 TRACE_APPLY (this);
2204 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2205 if (likely (index == NOT_COVERED)) return_trace (false);
2206
2207 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2208 struct ContextApplyLookupContext lookup_context = {
2209 {match_coverage},
2210 this
2211 };
2212 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2213 }
2214
subsetOT::ContextFormat32215 bool subset (hb_subset_context_t *c) const
2216 {
2217 TRACE_SUBSET (this);
2218 auto *out = c->serializer->start_embed (this);
2219 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2220
2221 out->format = format;
2222 out->glyphCount = glyphCount;
2223 out->lookupCount = lookupCount;
2224
2225 auto coverages = coverageZ.as_array (glyphCount);
2226
2227 for (const Offset16To<Coverage>& offset : coverages)
2228 {
2229 /* TODO(subset) This looks like should not be necessary to write this way. */
2230 auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2231 if (unlikely (!o)) return_trace (false);
2232 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2233 }
2234
2235 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2236 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2237 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
2238 c->serializer->copy (lookupRecord[i], lookup_map);
2239
2240 return_trace (true);
2241 }
2242
sanitizeOT::ContextFormat32243 bool sanitize (hb_sanitize_context_t *c) const
2244 {
2245 TRACE_SANITIZE (this);
2246 if (!c->check_struct (this)) return_trace (false);
2247 unsigned int count = glyphCount;
2248 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2249 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2250 for (unsigned int i = 0; i < count; i++)
2251 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2252 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2253 return_trace (c->check_array (lookupRecord, lookupCount));
2254 }
2255
2256 protected:
2257 HBUINT16 format; /* Format identifier--format = 3 */
2258 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2259 * sequence */
2260 HBUINT16 lookupCount; /* Number of LookupRecords */
2261 UnsizedArrayOf<Offset16To<Coverage>>
2262 coverageZ; /* Array of offsets to Coverage
2263 * table in glyph sequence order */
2264 /*UnsizedArrayOf<LookupRecord>
2265 lookupRecordX;*/ /* Array of LookupRecords--in
2266 * design order */
2267 public:
2268 DEFINE_SIZE_ARRAY (6, coverageZ);
2269 };
2270
2271 struct Context
2272 {
2273 template <typename context_t, typename ...Ts>
dispatchOT::Context2274 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2275 {
2276 TRACE_DISPATCH (this, u.format);
2277 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2278 switch (u.format) {
2279 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2280 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
2281 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
2282 default:return_trace (c->default_return_value ());
2283 }
2284 }
2285
2286 protected:
2287 union {
2288 HBUINT16 format; /* Format identifier */
2289 ContextFormat1 format1;
2290 ContextFormat2 format2;
2291 ContextFormat3 format3;
2292 } u;
2293 };
2294
2295
2296 /* Chaining Contextual lookups */
2297
2298 struct ChainContextClosureLookupContext
2299 {
2300 ContextClosureFuncs funcs;
2301 ContextFormat context_format;
2302 const void *intersects_data[3];
2303 };
2304
2305 struct ChainContextCollectGlyphsLookupContext
2306 {
2307 ContextCollectGlyphsFuncs funcs;
2308 const void *collect_data[3];
2309 };
2310
2311 struct ChainContextApplyLookupContext
2312 {
2313 ContextApplyFuncs funcs;
2314 const void *match_data[3];
2315 };
2316
chain_context_intersects(const hb_set_t * glyphs,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],ChainContextClosureLookupContext & lookup_context)2317 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2318 unsigned int backtrackCount,
2319 const HBUINT16 backtrack[],
2320 unsigned int inputCount, /* Including the first glyph (not matched) */
2321 const HBUINT16 input[], /* Array of input values--start with second glyph */
2322 unsigned int lookaheadCount,
2323 const HBUINT16 lookahead[],
2324 ChainContextClosureLookupContext &lookup_context)
2325 {
2326 return array_is_subset_of (glyphs,
2327 backtrackCount, backtrack,
2328 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2329 && array_is_subset_of (glyphs,
2330 inputCount ? inputCount - 1 : 0, input,
2331 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2332 && array_is_subset_of (glyphs,
2333 lookaheadCount, lookahead,
2334 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2335 }
2336
chain_context_closure_lookup(hb_closure_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ChainContextClosureLookupContext & lookup_context)2337 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2338 unsigned int backtrackCount,
2339 const HBUINT16 backtrack[],
2340 unsigned int inputCount, /* Including the first glyph (not matched) */
2341 const HBUINT16 input[], /* Array of input values--start with second glyph */
2342 unsigned int lookaheadCount,
2343 const HBUINT16 lookahead[],
2344 unsigned int lookupCount,
2345 const LookupRecord lookupRecord[],
2346 unsigned value,
2347 ChainContextClosureLookupContext &lookup_context)
2348 {
2349 if (chain_context_intersects (c->glyphs,
2350 backtrackCount, backtrack,
2351 inputCount, input,
2352 lookaheadCount, lookahead,
2353 lookup_context))
2354 context_closure_recurse_lookups (c,
2355 inputCount, input,
2356 lookupCount, lookupRecord,
2357 value,
2358 lookup_context.context_format,
2359 lookup_context.intersects_data[1],
2360 lookup_context.funcs.intersected_glyphs);
2361 }
2362
chain_context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextCollectGlyphsLookupContext & lookup_context)2363 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2364 unsigned int backtrackCount,
2365 const HBUINT16 backtrack[],
2366 unsigned int inputCount, /* Including the first glyph (not matched) */
2367 const HBUINT16 input[], /* Array of input values--start with second glyph */
2368 unsigned int lookaheadCount,
2369 const HBUINT16 lookahead[],
2370 unsigned int lookupCount,
2371 const LookupRecord lookupRecord[],
2372 ChainContextCollectGlyphsLookupContext &lookup_context)
2373 {
2374 collect_array (c, c->before,
2375 backtrackCount, backtrack,
2376 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2377 collect_array (c, c->input,
2378 inputCount ? inputCount - 1 : 0, input,
2379 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2380 collect_array (c, c->after,
2381 lookaheadCount, lookahead,
2382 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2383 recurse_lookups (c,
2384 lookupCount, lookupRecord);
2385 }
2386
chain_context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[]HB_UNUSED,unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[]HB_UNUSED,unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,ChainContextApplyLookupContext & lookup_context)2387 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2388 unsigned int backtrackCount,
2389 const HBUINT16 backtrack[] HB_UNUSED,
2390 unsigned int inputCount, /* Including the first glyph (not matched) */
2391 const HBUINT16 input[], /* Array of input values--start with second glyph */
2392 unsigned int lookaheadCount,
2393 const HBUINT16 lookahead[] HB_UNUSED,
2394 unsigned int lookupCount HB_UNUSED,
2395 const LookupRecord lookupRecord[] HB_UNUSED,
2396 ChainContextApplyLookupContext &lookup_context)
2397 {
2398 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2399 && would_match_input (c,
2400 inputCount, input,
2401 lookup_context.funcs.match, lookup_context.match_data[1]);
2402 }
2403
chain_context_apply_lookup(hb_ot_apply_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextApplyLookupContext & lookup_context)2404 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2405 unsigned int backtrackCount,
2406 const HBUINT16 backtrack[],
2407 unsigned int inputCount, /* Including the first glyph (not matched) */
2408 const HBUINT16 input[], /* Array of input values--start with second glyph */
2409 unsigned int lookaheadCount,
2410 const HBUINT16 lookahead[],
2411 unsigned int lookupCount,
2412 const LookupRecord lookupRecord[],
2413 ChainContextApplyLookupContext &lookup_context)
2414 {
2415 unsigned int start_index = 0, match_length = 0, end_index = 0;
2416 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
2417 return match_input (c,
2418 inputCount, input,
2419 lookup_context.funcs.match, lookup_context.match_data[1],
2420 &match_length, match_positions)
2421 && match_backtrack (c,
2422 backtrackCount, backtrack,
2423 lookup_context.funcs.match, lookup_context.match_data[0],
2424 &start_index)
2425 && match_lookahead (c,
2426 lookaheadCount, lookahead,
2427 lookup_context.funcs.match, lookup_context.match_data[2],
2428 match_length, &end_index)
2429 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index),
2430 apply_lookup (c,
2431 inputCount, match_positions,
2432 lookupCount, lookupRecord,
2433 match_length));
2434 }
2435
2436 struct ChainRule
2437 {
intersectsOT::ChainRule2438 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2439 {
2440 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2441 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2442 return chain_context_intersects (glyphs,
2443 backtrack.len, backtrack.arrayZ,
2444 input.lenP1, input.arrayZ,
2445 lookahead.len, lookahead.arrayZ,
2446 lookup_context);
2447 }
2448
closureOT::ChainRule2449 void closure (hb_closure_context_t *c, unsigned value,
2450 ChainContextClosureLookupContext &lookup_context) const
2451 {
2452 if (unlikely (c->lookup_limit_exceeded ())) return;
2453
2454 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2455 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2456 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2457 chain_context_closure_lookup (c,
2458 backtrack.len, backtrack.arrayZ,
2459 input.lenP1, input.arrayZ,
2460 lookahead.len, lookahead.arrayZ,
2461 lookup.len, lookup.arrayZ,
2462 value,
2463 lookup_context);
2464 }
2465
closure_lookupsOT::ChainRule2466 void closure_lookups (hb_closure_lookups_context_t *c,
2467 ChainContextClosureLookupContext &lookup_context) const
2468 {
2469 if (unlikely (c->lookup_limit_exceeded ())) return;
2470 if (!intersects (c->glyphs, lookup_context)) return;
2471
2472 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2473 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2474 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2475 recurse_lookups (c, lookup.len, lookup.arrayZ);
2476 }
2477
collect_glyphsOT::ChainRule2478 void collect_glyphs (hb_collect_glyphs_context_t *c,
2479 ChainContextCollectGlyphsLookupContext &lookup_context) const
2480 {
2481 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2482 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2483 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2484 chain_context_collect_glyphs_lookup (c,
2485 backtrack.len, backtrack.arrayZ,
2486 input.lenP1, input.arrayZ,
2487 lookahead.len, lookahead.arrayZ,
2488 lookup.len, lookup.arrayZ,
2489 lookup_context);
2490 }
2491
would_applyOT::ChainRule2492 bool would_apply (hb_would_apply_context_t *c,
2493 ChainContextApplyLookupContext &lookup_context) const
2494 {
2495 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2496 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2497 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2498 return chain_context_would_apply_lookup (c,
2499 backtrack.len, backtrack.arrayZ,
2500 input.lenP1, input.arrayZ,
2501 lookahead.len, lookahead.arrayZ, lookup.len,
2502 lookup.arrayZ, lookup_context);
2503 }
2504
applyOT::ChainRule2505 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2506 {
2507 TRACE_APPLY (this);
2508 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2509 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2510 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2511 return_trace (chain_context_apply_lookup (c,
2512 backtrack.len, backtrack.arrayZ,
2513 input.lenP1, input.arrayZ,
2514 lookahead.len, lookahead.arrayZ, lookup.len,
2515 lookup.arrayZ, lookup_context));
2516 }
2517
2518 template<typename Iterator,
2519 hb_requires (hb_is_iterator (Iterator))>
serialize_arrayOT::ChainRule2520 void serialize_array (hb_serialize_context_t *c,
2521 HBUINT16 len,
2522 Iterator it) const
2523 {
2524 c->copy (len);
2525 for (const auto g : it)
2526 c->copy ((HBUINT16) g);
2527 }
2528
copyOT::ChainRule2529 ChainRule* copy (hb_serialize_context_t *c,
2530 const hb_map_t *lookup_map,
2531 const hb_map_t *backtrack_map,
2532 const hb_map_t *input_map = nullptr,
2533 const hb_map_t *lookahead_map = nullptr) const
2534 {
2535 TRACE_SERIALIZE (this);
2536 auto *out = c->start_embed (this);
2537 if (unlikely (!out)) return_trace (nullptr);
2538
2539 const hb_map_t *mapping = backtrack_map;
2540 serialize_array (c, backtrack.len, + backtrack.iter ()
2541 | hb_map (mapping));
2542
2543 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2544 if (input_map) mapping = input_map;
2545 serialize_array (c, input.lenP1, + input.iter ()
2546 | hb_map (mapping));
2547
2548 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2549 if (lookahead_map) mapping = lookahead_map;
2550 serialize_array (c, lookahead.len, + lookahead.iter ()
2551 | hb_map (mapping));
2552
2553 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
2554
2555 HBUINT16* lookupCount = c->embed (&(lookupRecord.len));
2556 if (!lookupCount) return_trace (nullptr);
2557
2558 for (unsigned i = 0; i < lookupRecord.len; i++)
2559 {
2560 if (!lookup_map->has (lookupRecord[i].lookupListIndex))
2561 {
2562 (*lookupCount)--;
2563 continue;
2564 }
2565 if (!c->copy (lookupRecord[i], lookup_map)) return_trace (nullptr);
2566 }
2567
2568 return_trace (out);
2569 }
2570
subsetOT::ChainRule2571 bool subset (hb_subset_context_t *c,
2572 const hb_map_t *lookup_map,
2573 const hb_map_t *backtrack_map = nullptr,
2574 const hb_map_t *input_map = nullptr,
2575 const hb_map_t *lookahead_map = nullptr) const
2576 {
2577 TRACE_SUBSET (this);
2578
2579 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2580 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2581
2582 if (!backtrack_map)
2583 {
2584 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2585 if (!hb_all (backtrack, glyphset) ||
2586 !hb_all (input, glyphset) ||
2587 !hb_all (lookahead, glyphset))
2588 return_trace (false);
2589
2590 copy (c->serializer, lookup_map, c->plan->glyph_map);
2591 }
2592 else
2593 {
2594 if (!hb_all (backtrack, backtrack_map) ||
2595 !hb_all (input, input_map) ||
2596 !hb_all (lookahead, lookahead_map))
2597 return_trace (false);
2598
2599 copy (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2600 }
2601
2602 return_trace (true);
2603 }
2604
sanitizeOT::ChainRule2605 bool sanitize (hb_sanitize_context_t *c) const
2606 {
2607 TRACE_SANITIZE (this);
2608 if (!backtrack.sanitize (c)) return_trace (false);
2609 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2610 if (!input.sanitize (c)) return_trace (false);
2611 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2612 if (!lookahead.sanitize (c)) return_trace (false);
2613 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2614 return_trace (lookup.sanitize (c));
2615 }
2616
2617 protected:
2618 Array16Of<HBUINT16>
2619 backtrack; /* Array of backtracking values
2620 * (to be matched before the input
2621 * sequence) */
2622 HeadlessArrayOf<HBUINT16>
2623 inputX; /* Array of input values (start with
2624 * second glyph) */
2625 Array16Of<HBUINT16>
2626 lookaheadX; /* Array of lookahead values's (to be
2627 * matched after the input sequence) */
2628 Array16Of<LookupRecord>
2629 lookupX; /* Array of LookupRecords--in
2630 * design order) */
2631 public:
2632 DEFINE_SIZE_MIN (8);
2633 };
2634
2635 struct ChainRuleSet
2636 {
intersectsOT::ChainRuleSet2637 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2638 {
2639 return
2640 + hb_iter (rule)
2641 | hb_map (hb_add (this))
2642 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2643 | hb_any
2644 ;
2645 }
closureOT::ChainRuleSet2646 void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
2647 {
2648 if (unlikely (c->lookup_limit_exceeded ())) return;
2649
2650 return
2651 + hb_iter (rule)
2652 | hb_map (hb_add (this))
2653 | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
2654 ;
2655 }
2656
closure_lookupsOT::ChainRuleSet2657 void closure_lookups (hb_closure_lookups_context_t *c,
2658 ChainContextClosureLookupContext &lookup_context) const
2659 {
2660 if (unlikely (c->lookup_limit_exceeded ())) return;
2661
2662 + hb_iter (rule)
2663 | hb_map (hb_add (this))
2664 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
2665 ;
2666 }
2667
collect_glyphsOT::ChainRuleSet2668 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2669 {
2670 return
2671 + hb_iter (rule)
2672 | hb_map (hb_add (this))
2673 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2674 ;
2675 }
2676
would_applyOT::ChainRuleSet2677 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2678 {
2679 return
2680 + hb_iter (rule)
2681 | hb_map (hb_add (this))
2682 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2683 | hb_any
2684 ;
2685 }
2686
applyOT::ChainRuleSet2687 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2688 {
2689 TRACE_APPLY (this);
2690 return_trace (
2691 + hb_iter (rule)
2692 | hb_map (hb_add (this))
2693 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2694 | hb_any
2695 )
2696 ;
2697 }
2698
subsetOT::ChainRuleSet2699 bool subset (hb_subset_context_t *c,
2700 const hb_map_t *lookup_map,
2701 const hb_map_t *backtrack_klass_map = nullptr,
2702 const hb_map_t *input_klass_map = nullptr,
2703 const hb_map_t *lookahead_klass_map = nullptr) const
2704 {
2705 TRACE_SUBSET (this);
2706
2707 auto snap = c->serializer->snapshot ();
2708 auto *out = c->serializer->start_embed (*this);
2709 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2710
2711 for (const Offset16To<ChainRule>& _ : rule)
2712 {
2713 if (!_) continue;
2714 auto *o = out->rule.serialize_append (c->serializer);
2715 if (unlikely (!o)) continue;
2716
2717 auto o_snap = c->serializer->snapshot ();
2718 if (!o->serialize_subset (c, _, this,
2719 lookup_map,
2720 backtrack_klass_map,
2721 input_klass_map,
2722 lookahead_klass_map))
2723 {
2724 out->rule.pop ();
2725 c->serializer->revert (o_snap);
2726 }
2727 }
2728
2729 bool ret = bool (out->rule);
2730 if (!ret) c->serializer->revert (snap);
2731
2732 return_trace (ret);
2733 }
2734
sanitizeOT::ChainRuleSet2735 bool sanitize (hb_sanitize_context_t *c) const
2736 {
2737 TRACE_SANITIZE (this);
2738 return_trace (rule.sanitize (c, this));
2739 }
2740
2741 protected:
2742 Array16OfOffset16To<ChainRule>
2743 rule; /* Array of ChainRule tables
2744 * ordered by preference */
2745 public:
2746 DEFINE_SIZE_ARRAY (2, rule);
2747 };
2748
2749 struct ChainContextFormat1
2750 {
intersectsOT::ChainContextFormat12751 bool intersects (const hb_set_t *glyphs) const
2752 {
2753 struct ChainContextClosureLookupContext lookup_context = {
2754 {intersects_glyph, intersected_glyph},
2755 ContextFormat::SimpleContext,
2756 {nullptr, nullptr, nullptr}
2757 };
2758
2759 return
2760 + hb_zip (this+coverage, ruleSet)
2761 | hb_filter (*glyphs, hb_first)
2762 | hb_map (hb_second)
2763 | hb_map (hb_add (this))
2764 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2765 | hb_any
2766 ;
2767 }
2768
may_have_non_1to1OT::ChainContextFormat12769 bool may_have_non_1to1 () const
2770 { return true; }
2771
closureOT::ChainContextFormat12772 void closure (hb_closure_context_t *c) const
2773 {
2774 c->cur_intersected_glyphs->clear ();
2775 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2776
2777 struct ChainContextClosureLookupContext lookup_context = {
2778 {intersects_glyph, intersected_glyph},
2779 ContextFormat::SimpleContext,
2780 {nullptr, nullptr, nullptr}
2781 };
2782
2783 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2784 | hb_filter (c->parent_active_glyphs (), hb_first)
2785 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
2786 | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2787 ;
2788 }
2789
closure_lookupsOT::ChainContextFormat12790 void closure_lookups (hb_closure_lookups_context_t *c) const
2791 {
2792 struct ChainContextClosureLookupContext lookup_context = {
2793 {intersects_glyph, intersected_glyph},
2794 ContextFormat::SimpleContext,
2795 {nullptr, nullptr, nullptr}
2796 };
2797
2798 + hb_zip (this+coverage, ruleSet)
2799 | hb_filter (*c->glyphs, hb_first)
2800 | hb_map (hb_second)
2801 | hb_map (hb_add (this))
2802 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
2803 ;
2804 }
2805
collect_variation_indicesOT::ChainContextFormat12806 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2807
collect_glyphsOT::ChainContextFormat12808 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2809 {
2810 (this+coverage).collect_coverage (c->input);
2811
2812 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2813 {collect_glyph},
2814 {nullptr, nullptr, nullptr}
2815 };
2816
2817 + hb_iter (ruleSet)
2818 | hb_map (hb_add (this))
2819 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2820 ;
2821 }
2822
would_applyOT::ChainContextFormat12823 bool would_apply (hb_would_apply_context_t *c) const
2824 {
2825 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2826 struct ChainContextApplyLookupContext lookup_context = {
2827 {match_glyph},
2828 {nullptr, nullptr, nullptr}
2829 };
2830 return rule_set.would_apply (c, lookup_context);
2831 }
2832
get_coverageOT::ChainContextFormat12833 const Coverage &get_coverage () const { return this+coverage; }
2834
applyOT::ChainContextFormat12835 bool apply (hb_ot_apply_context_t *c) const
2836 {
2837 TRACE_APPLY (this);
2838 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2839 if (likely (index == NOT_COVERED)) return_trace (false);
2840
2841 const ChainRuleSet &rule_set = this+ruleSet[index];
2842 struct ChainContextApplyLookupContext lookup_context = {
2843 {match_glyph},
2844 {nullptr, nullptr, nullptr}
2845 };
2846 return_trace (rule_set.apply (c, lookup_context));
2847 }
2848
subsetOT::ChainContextFormat12849 bool subset (hb_subset_context_t *c) const
2850 {
2851 TRACE_SUBSET (this);
2852 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2853 const hb_map_t &glyph_map = *c->plan->glyph_map;
2854
2855 auto *out = c->serializer->start_embed (*this);
2856 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2857 out->format = format;
2858
2859 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2860 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2861 + hb_zip (this+coverage, ruleSet)
2862 | hb_filter (glyphset, hb_first)
2863 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2864 | hb_map (hb_first)
2865 | hb_map (glyph_map)
2866 | hb_sink (new_coverage)
2867 ;
2868
2869 out->coverage.serialize (c->serializer, out)
2870 .serialize (c->serializer, new_coverage.iter ());
2871 return_trace (bool (new_coverage));
2872 }
2873
sanitizeOT::ChainContextFormat12874 bool sanitize (hb_sanitize_context_t *c) const
2875 {
2876 TRACE_SANITIZE (this);
2877 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2878 }
2879
2880 protected:
2881 HBUINT16 format; /* Format identifier--format = 1 */
2882 Offset16To<Coverage>
2883 coverage; /* Offset to Coverage table--from
2884 * beginning of table */
2885 Array16OfOffset16To<ChainRuleSet>
2886 ruleSet; /* Array of ChainRuleSet tables
2887 * ordered by Coverage Index */
2888 public:
2889 DEFINE_SIZE_ARRAY (6, ruleSet);
2890 };
2891
2892 struct ChainContextFormat2
2893 {
intersectsOT::ChainContextFormat22894 bool intersects (const hb_set_t *glyphs) const
2895 {
2896 if (!(this+coverage).intersects (glyphs))
2897 return false;
2898
2899 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2900 const ClassDef &input_class_def = this+inputClassDef;
2901 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2902
2903 struct ChainContextClosureLookupContext lookup_context = {
2904 {intersects_class, intersected_class_glyphs},
2905 ContextFormat::ClassBasedContext,
2906 {&backtrack_class_def,
2907 &input_class_def,
2908 &lookahead_class_def}
2909 };
2910
2911 return
2912 + hb_iter (ruleSet)
2913 | hb_map (hb_add (this))
2914 | hb_enumerate
2915 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
2916 { return input_class_def.intersects_class (glyphs, p.first) &&
2917 p.second.intersects (glyphs, lookup_context); })
2918 | hb_any
2919 ;
2920 }
2921
may_have_non_1to1OT::ChainContextFormat22922 bool may_have_non_1to1 () const
2923 { return true; }
2924
closureOT::ChainContextFormat22925 void closure (hb_closure_context_t *c) const
2926 {
2927 if (!(this+coverage).intersects (c->glyphs))
2928 return;
2929
2930 c->cur_intersected_glyphs->clear ();
2931 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2932
2933 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2934 const ClassDef &input_class_def = this+inputClassDef;
2935 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2936
2937 struct ChainContextClosureLookupContext lookup_context = {
2938 {intersects_class, intersected_class_glyphs},
2939 ContextFormat::ClassBasedContext,
2940 {&backtrack_class_def,
2941 &input_class_def,
2942 &lookahead_class_def}
2943 };
2944
2945 return
2946 + hb_enumerate (ruleSet)
2947 | hb_filter ([&] (unsigned _)
2948 { return input_class_def.intersects_class (c->cur_intersected_glyphs, _); },
2949 hb_first)
2950 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<ChainRuleSet>&> _)
2951 {
2952 const ChainRuleSet& chainrule_set = this+_.second;
2953 chainrule_set.closure (c, _.first, lookup_context);
2954 })
2955 ;
2956 }
2957
closure_lookupsOT::ChainContextFormat22958 void closure_lookups (hb_closure_lookups_context_t *c) const
2959 {
2960 if (!(this+coverage).intersects (c->glyphs))
2961 return;
2962
2963 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2964 const ClassDef &input_class_def = this+inputClassDef;
2965 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2966
2967 struct ChainContextClosureLookupContext lookup_context = {
2968 {intersects_class, intersected_class_glyphs},
2969 ContextFormat::ClassBasedContext,
2970 {&backtrack_class_def,
2971 &input_class_def,
2972 &lookahead_class_def}
2973 };
2974
2975 + hb_iter (ruleSet)
2976 | hb_map (hb_add (this))
2977 | hb_enumerate
2978 | hb_filter([&] (unsigned klass)
2979 { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
2980 | hb_map (hb_second)
2981 | hb_apply ([&] (const ChainRuleSet &_)
2982 { _.closure_lookups (c, lookup_context); })
2983 ;
2984 }
2985
collect_variation_indicesOT::ChainContextFormat22986 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2987
collect_glyphsOT::ChainContextFormat22988 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2989 {
2990 (this+coverage).collect_coverage (c->input);
2991
2992 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2993 const ClassDef &input_class_def = this+inputClassDef;
2994 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2995
2996 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2997 {collect_class},
2998 {&backtrack_class_def,
2999 &input_class_def,
3000 &lookahead_class_def}
3001 };
3002
3003 + hb_iter (ruleSet)
3004 | hb_map (hb_add (this))
3005 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3006 ;
3007 }
3008
would_applyOT::ChainContextFormat23009 bool would_apply (hb_would_apply_context_t *c) const
3010 {
3011 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3012 const ClassDef &input_class_def = this+inputClassDef;
3013 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3014
3015 unsigned int index = input_class_def.get_class (c->glyphs[0]);
3016 const ChainRuleSet &rule_set = this+ruleSet[index];
3017 struct ChainContextApplyLookupContext lookup_context = {
3018 {match_class},
3019 {&backtrack_class_def,
3020 &input_class_def,
3021 &lookahead_class_def}
3022 };
3023 return rule_set.would_apply (c, lookup_context);
3024 }
3025
get_coverageOT::ChainContextFormat23026 const Coverage &get_coverage () const { return this+coverage; }
3027
applyOT::ChainContextFormat23028 bool apply (hb_ot_apply_context_t *c) const
3029 {
3030 TRACE_APPLY (this);
3031 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3032 if (likely (index == NOT_COVERED)) return_trace (false);
3033
3034 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3035 const ClassDef &input_class_def = this+inputClassDef;
3036 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3037
3038 index = input_class_def.get_class (c->buffer->cur().codepoint);
3039 const ChainRuleSet &rule_set = this+ruleSet[index];
3040 struct ChainContextApplyLookupContext lookup_context = {
3041 {match_class},
3042 {&backtrack_class_def,
3043 &input_class_def,
3044 &lookahead_class_def}
3045 };
3046 return_trace (rule_set.apply (c, lookup_context));
3047 }
3048
subsetOT::ChainContextFormat23049 bool subset (hb_subset_context_t *c) const
3050 {
3051 TRACE_SUBSET (this);
3052 auto *out = c->serializer->start_embed (*this);
3053 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3054 out->format = format;
3055 out->coverage.serialize_subset (c, coverage, this);
3056
3057 hb_map_t backtrack_klass_map;
3058 hb_map_t input_klass_map;
3059 hb_map_t lookahead_klass_map;
3060
3061 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3062 // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3063 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3064 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3065
3066 if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3067 input_klass_map,
3068 lookahead_klass_map)))
3069 return_trace (false);
3070
3071 int non_zero_index = -1, index = 0;
3072 bool ret = true;
3073 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3074 auto last_non_zero = c->serializer->snapshot ();
3075 for (const Offset16To<ChainRuleSet>& _ : + hb_enumerate (ruleSet)
3076 | hb_filter (input_klass_map, hb_first)
3077 | hb_map (hb_second))
3078 {
3079 auto *o = out->ruleSet.serialize_append (c->serializer);
3080 if (unlikely (!o))
3081 {
3082 ret = false;
3083 break;
3084 }
3085 if (o->serialize_subset (c, _, this,
3086 lookup_map,
3087 &backtrack_klass_map,
3088 &input_klass_map,
3089 &lookahead_klass_map))
3090 {
3091 last_non_zero = c->serializer->snapshot ();
3092 non_zero_index = index;
3093 }
3094
3095 index++;
3096 }
3097
3098 if (!ret) return_trace (ret);
3099
3100 // prune empty trailing ruleSets
3101 if (index > non_zero_index) {
3102 c->serializer->revert (last_non_zero);
3103 out->ruleSet.len = non_zero_index + 1;
3104 }
3105
3106 return_trace (bool (out->ruleSet));
3107 }
3108
sanitizeOT::ChainContextFormat23109 bool sanitize (hb_sanitize_context_t *c) const
3110 {
3111 TRACE_SANITIZE (this);
3112 return_trace (coverage.sanitize (c, this) &&
3113 backtrackClassDef.sanitize (c, this) &&
3114 inputClassDef.sanitize (c, this) &&
3115 lookaheadClassDef.sanitize (c, this) &&
3116 ruleSet.sanitize (c, this));
3117 }
3118
3119 protected:
3120 HBUINT16 format; /* Format identifier--format = 2 */
3121 Offset16To<Coverage>
3122 coverage; /* Offset to Coverage table--from
3123 * beginning of table */
3124 Offset16To<ClassDef>
3125 backtrackClassDef; /* Offset to glyph ClassDef table
3126 * containing backtrack sequence
3127 * data--from beginning of table */
3128 Offset16To<ClassDef>
3129 inputClassDef; /* Offset to glyph ClassDef
3130 * table containing input sequence
3131 * data--from beginning of table */
3132 Offset16To<ClassDef>
3133 lookaheadClassDef; /* Offset to glyph ClassDef table
3134 * containing lookahead sequence
3135 * data--from beginning of table */
3136 Array16OfOffset16To<ChainRuleSet>
3137 ruleSet; /* Array of ChainRuleSet tables
3138 * ordered by class */
3139 public:
3140 DEFINE_SIZE_ARRAY (12, ruleSet);
3141 };
3142
3143 struct ChainContextFormat3
3144 {
intersectsOT::ChainContextFormat33145 bool intersects (const hb_set_t *glyphs) const
3146 {
3147 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3148
3149 if (!(this+input[0]).intersects (glyphs))
3150 return false;
3151
3152 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3153 struct ChainContextClosureLookupContext lookup_context = {
3154 {intersects_coverage, intersected_coverage_glyphs},
3155 ContextFormat::CoverageBasedContext,
3156 {this, this, this}
3157 };
3158 return chain_context_intersects (glyphs,
3159 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3160 input.len, (const HBUINT16 *) input.arrayZ + 1,
3161 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3162 lookup_context);
3163 }
3164
may_have_non_1to1OT::ChainContextFormat33165 bool may_have_non_1to1 () const
3166 { return true; }
3167
closureOT::ChainContextFormat33168 void closure (hb_closure_context_t *c) const
3169 {
3170 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3171
3172 if (!(this+input[0]).intersects (c->glyphs))
3173 return;
3174
3175 c->cur_intersected_glyphs->clear ();
3176 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
3177
3178 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3179 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3180 struct ChainContextClosureLookupContext lookup_context = {
3181 {intersects_coverage, intersected_coverage_glyphs},
3182 ContextFormat::CoverageBasedContext,
3183 {this, this, this}
3184 };
3185 chain_context_closure_lookup (c,
3186 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3187 input.len, (const HBUINT16 *) input.arrayZ + 1,
3188 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3189 lookup.len, lookup.arrayZ,
3190 0, lookup_context);
3191 }
3192
closure_lookupsOT::ChainContextFormat33193 void closure_lookups (hb_closure_lookups_context_t *c) const
3194 {
3195 if (!intersects (c->glyphs))
3196 return;
3197
3198 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3199 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3200 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3201 recurse_lookups (c, lookup.len, lookup.arrayZ);
3202 }
3203
collect_variation_indicesOT::ChainContextFormat33204 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3205
collect_glyphsOT::ChainContextFormat33206 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3207 {
3208 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3209
3210 (this+input[0]).collect_coverage (c->input);
3211
3212 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3213 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3214 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3215 {collect_coverage},
3216 {this, this, this}
3217 };
3218 chain_context_collect_glyphs_lookup (c,
3219 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3220 input.len, (const HBUINT16 *) input.arrayZ + 1,
3221 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3222 lookup.len, lookup.arrayZ,
3223 lookup_context);
3224 }
3225
would_applyOT::ChainContextFormat33226 bool would_apply (hb_would_apply_context_t *c) const
3227 {
3228 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3229 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3230 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3231 struct ChainContextApplyLookupContext lookup_context = {
3232 {match_coverage},
3233 {this, this, this}
3234 };
3235 return chain_context_would_apply_lookup (c,
3236 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3237 input.len, (const HBUINT16 *) input.arrayZ + 1,
3238 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3239 lookup.len, lookup.arrayZ, lookup_context);
3240 }
3241
get_coverageOT::ChainContextFormat33242 const Coverage &get_coverage () const
3243 {
3244 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3245 return this+input[0];
3246 }
3247
applyOT::ChainContextFormat33248 bool apply (hb_ot_apply_context_t *c) const
3249 {
3250 TRACE_APPLY (this);
3251 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3252
3253 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
3254 if (likely (index == NOT_COVERED)) return_trace (false);
3255
3256 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3257 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3258 struct ChainContextApplyLookupContext lookup_context = {
3259 {match_coverage},
3260 {this, this, this}
3261 };
3262 return_trace (chain_context_apply_lookup (c,
3263 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3264 input.len, (const HBUINT16 *) input.arrayZ + 1,
3265 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3266 lookup.len, lookup.arrayZ, lookup_context));
3267 }
3268
3269 template<typename Iterator,
3270 hb_requires (hb_is_iterator (Iterator))>
serialize_coverage_offsetsOT::ChainContextFormat33271 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
3272 {
3273 TRACE_SERIALIZE (this);
3274 auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
3275
3276 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
3277 return_trace (false);
3278
3279 for (auto& offset : it) {
3280 auto *o = out->serialize_append (c->serializer);
3281 if (unlikely (!o) || !o->serialize_subset (c, offset, base))
3282 return_trace (false);
3283 }
3284
3285 return_trace (true);
3286 }
3287
subsetOT::ChainContextFormat33288 bool subset (hb_subset_context_t *c) const
3289 {
3290 TRACE_SUBSET (this);
3291
3292 auto *out = c->serializer->start_embed (this);
3293 if (unlikely (!out)) return_trace (false);
3294 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3295
3296 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3297 return_trace (false);
3298
3299 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3300 if (!serialize_coverage_offsets (c, input.iter (), this))
3301 return_trace (false);
3302
3303 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3304 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3305 return_trace (false);
3306
3307 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
3308 HBUINT16 lookupCount;
3309 lookupCount = lookupRecord.len;
3310 if (!c->serializer->copy (lookupCount)) return_trace (false);
3311
3312 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3313 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
3314 if (!c->serializer->copy (lookupRecord[i], lookup_map)) return_trace (false);
3315
3316 return_trace (true);
3317 }
3318
sanitizeOT::ChainContextFormat33319 bool sanitize (hb_sanitize_context_t *c) const
3320 {
3321 TRACE_SANITIZE (this);
3322 if (!backtrack.sanitize (c, this)) return_trace (false);
3323 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3324 if (!input.sanitize (c, this)) return_trace (false);
3325 if (!input.len) return_trace (false); /* To be consistent with Context. */
3326 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3327 if (!lookahead.sanitize (c, this)) return_trace (false);
3328 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3329 return_trace (lookup.sanitize (c));
3330 }
3331
3332 protected:
3333 HBUINT16 format; /* Format identifier--format = 3 */
3334 Array16OfOffset16To<Coverage>
3335 backtrack; /* Array of coverage tables
3336 * in backtracking sequence, in glyph
3337 * sequence order */
3338 Array16OfOffset16To<Coverage>
3339 inputX ; /* Array of coverage
3340 * tables in input sequence, in glyph
3341 * sequence order */
3342 Array16OfOffset16To<Coverage>
3343 lookaheadX; /* Array of coverage tables
3344 * in lookahead sequence, in glyph
3345 * sequence order */
3346 Array16Of<LookupRecord>
3347 lookupX; /* Array of LookupRecords--in
3348 * design order) */
3349 public:
3350 DEFINE_SIZE_MIN (10);
3351 };
3352
3353 struct ChainContext
3354 {
3355 template <typename context_t, typename ...Ts>
dispatchOT::ChainContext3356 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3357 {
3358 TRACE_DISPATCH (this, u.format);
3359 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3360 switch (u.format) {
3361 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
3362 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
3363 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
3364 default:return_trace (c->default_return_value ());
3365 }
3366 }
3367
3368 protected:
3369 union {
3370 HBUINT16 format; /* Format identifier */
3371 ChainContextFormat1 format1;
3372 ChainContextFormat2 format2;
3373 ChainContextFormat3 format3;
3374 } u;
3375 };
3376
3377
3378 template <typename T>
3379 struct ExtensionFormat1
3380 {
get_typeOT::ExtensionFormat13381 unsigned int get_type () const { return extensionLookupType; }
3382
3383 template <typename X>
get_subtableOT::ExtensionFormat13384 const X& get_subtable () const
3385 { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
3386
3387 template <typename context_t, typename ...Ts>
dispatchOT::ExtensionFormat13388 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3389 {
3390 TRACE_DISPATCH (this, format);
3391 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3392 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), hb_forward<Ts> (ds)...));
3393 }
3394
collect_variation_indicesOT::ExtensionFormat13395 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3396 { dispatch (c); }
3397
3398 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
sanitizeOT::ExtensionFormat13399 bool sanitize (hb_sanitize_context_t *c) const
3400 {
3401 TRACE_SANITIZE (this);
3402 return_trace (c->check_struct (this) &&
3403 extensionLookupType != T::SubTable::Extension);
3404 }
3405
subsetOT::ExtensionFormat13406 bool subset (hb_subset_context_t *c) const
3407 {
3408 TRACE_SUBSET (this);
3409
3410 auto *out = c->serializer->start_embed (this);
3411 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3412
3413 out->format = format;
3414 out->extensionLookupType = extensionLookupType;
3415
3416 const auto& src_offset =
3417 reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
3418 auto& dest_offset =
3419 reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
3420
3421 return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
3422 }
3423
3424 protected:
3425 HBUINT16 format; /* Format identifier. Set to 1. */
3426 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3427 * by ExtensionOffset (i.e. the
3428 * extension subtable). */
3429 Offset32 extensionOffset; /* Offset to the extension subtable,
3430 * of lookup type subtable. */
3431 public:
3432 DEFINE_SIZE_STATIC (8);
3433 };
3434
3435 template <typename T>
3436 struct Extension
3437 {
get_typeOT::Extension3438 unsigned int get_type () const
3439 {
3440 switch (u.format) {
3441 case 1: return u.format1.get_type ();
3442 default:return 0;
3443 }
3444 }
3445 template <typename X>
get_subtableOT::Extension3446 const X& get_subtable () const
3447 {
3448 switch (u.format) {
3449 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3450 default:return Null (typename T::SubTable);
3451 }
3452 }
3453
3454 // Specialization of dispatch for subset. dispatch() normally just
3455 // dispatches to the sub table this points too, but for subset
3456 // we need to run subset on this subtable too.
3457 template <typename ...Ts>
dispatchOT::Extension3458 typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
3459 {
3460 switch (u.format) {
3461 case 1: return u.format1.subset (c);
3462 default: return c->default_return_value ();
3463 }
3464 }
3465
3466 template <typename context_t, typename ...Ts>
dispatchOT::Extension3467 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3468 {
3469 TRACE_DISPATCH (this, u.format);
3470 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3471 switch (u.format) {
3472 case 1: return_trace (u.format1.dispatch (c, hb_forward<Ts> (ds)...));
3473 default:return_trace (c->default_return_value ());
3474 }
3475 }
3476
3477 protected:
3478 union {
3479 HBUINT16 format; /* Format identifier */
3480 ExtensionFormat1<T> format1;
3481 } u;
3482 };
3483
3484
3485 /*
3486 * GSUB/GPOS Common
3487 */
3488
3489 struct hb_ot_layout_lookup_accelerator_t
3490 {
3491 template <typename TLookup>
initOT::hb_ot_layout_lookup_accelerator_t3492 void init (const TLookup &lookup)
3493 {
3494 digest.init ();
3495 lookup.collect_coverage (&digest);
3496
3497 subtables.init ();
3498 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3499 lookup.dispatch (&c_get_subtables);
3500 }
finiOT::hb_ot_layout_lookup_accelerator_t3501 void fini () { subtables.fini (); }
3502
may_haveOT::hb_ot_layout_lookup_accelerator_t3503 bool may_have (hb_codepoint_t g) const
3504 { return digest.may_have (g); }
3505
applyOT::hb_ot_layout_lookup_accelerator_t3506 bool apply (hb_ot_apply_context_t *c) const
3507 {
3508 for (unsigned int i = 0; i < subtables.length; i++)
3509 if (subtables[i].apply (c))
3510 return true;
3511 return false;
3512 }
3513
3514 private:
3515 hb_set_digest_t digest;
3516 hb_get_subtables_context_t::array_t subtables;
3517 };
3518
3519 struct GSUBGPOS
3520 {
has_dataOT::GSUBGPOS3521 bool has_data () const { return version.to_int (); }
get_script_countOT::GSUBGPOS3522 unsigned int get_script_count () const
3523 { return (this+scriptList).len; }
get_script_tagOT::GSUBGPOS3524 const Tag& get_script_tag (unsigned int i) const
3525 { return (this+scriptList).get_tag (i); }
get_script_tagsOT::GSUBGPOS3526 unsigned int get_script_tags (unsigned int start_offset,
3527 unsigned int *script_count /* IN/OUT */,
3528 hb_tag_t *script_tags /* OUT */) const
3529 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
get_scriptOT::GSUBGPOS3530 const Script& get_script (unsigned int i) const
3531 { return (this+scriptList)[i]; }
find_script_indexOT::GSUBGPOS3532 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3533 { return (this+scriptList).find_index (tag, index); }
3534
get_feature_countOT::GSUBGPOS3535 unsigned int get_feature_count () const
3536 { return (this+featureList).len; }
get_feature_tagOT::GSUBGPOS3537 hb_tag_t get_feature_tag (unsigned int i) const
3538 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
get_feature_tagsOT::GSUBGPOS3539 unsigned int get_feature_tags (unsigned int start_offset,
3540 unsigned int *feature_count /* IN/OUT */,
3541 hb_tag_t *feature_tags /* OUT */) const
3542 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
get_featureOT::GSUBGPOS3543 const Feature& get_feature (unsigned int i) const
3544 { return (this+featureList)[i]; }
find_feature_indexOT::GSUBGPOS3545 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3546 { return (this+featureList).find_index (tag, index); }
3547
get_lookup_countOT::GSUBGPOS3548 unsigned int get_lookup_count () const
3549 { return (this+lookupList).len; }
get_lookupOT::GSUBGPOS3550 const Lookup& get_lookup (unsigned int i) const
3551 { return (this+lookupList)[i]; }
3552
find_variations_indexOT::GSUBGPOS3553 bool find_variations_index (const int *coords, unsigned int num_coords,
3554 unsigned int *index) const
3555 {
3556 #ifdef HB_NO_VAR
3557 *index = FeatureVariations::NOT_FOUND_INDEX;
3558 return false;
3559 #endif
3560 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3561 .find_index (coords, num_coords, index);
3562 }
get_feature_variationOT::GSUBGPOS3563 const Feature& get_feature_variation (unsigned int feature_index,
3564 unsigned int variations_index) const
3565 {
3566 #ifndef HB_NO_VAR
3567 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3568 version.to_int () >= 0x00010001u)
3569 {
3570 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3571 feature_index);
3572 if (feature)
3573 return *feature;
3574 }
3575 #endif
3576 return get_feature (feature_index);
3577 }
3578
feature_variation_collect_lookupsOT::GSUBGPOS3579 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3580 hb_set_t *lookup_indexes /* OUT */) const
3581 {
3582 #ifndef HB_NO_VAR
3583 if (version.to_int () >= 0x00010001u)
3584 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3585 #endif
3586 }
3587
3588 template <typename TLookup>
closure_lookupsOT::GSUBGPOS3589 void closure_lookups (hb_face_t *face,
3590 const hb_set_t *glyphs,
3591 hb_set_t *lookup_indexes /* IN/OUT */) const
3592 {
3593 hb_set_t visited_lookups, inactive_lookups;
3594 OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
3595
3596 for (unsigned lookup_index : + hb_iter (lookup_indexes))
3597 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
3598
3599 hb_set_union (lookup_indexes, &visited_lookups);
3600 hb_set_subtract (lookup_indexes, &inactive_lookups);
3601 }
3602
prune_langsysOT::GSUBGPOS3603 void prune_langsys (const hb_map_t *duplicate_feature_map,
3604 hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map,
3605 hb_set_t *new_feature_indexes /* OUT */) const
3606 {
3607 hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
3608
3609 unsigned count = get_script_count ();
3610 for (unsigned script_index = 0; script_index < count; script_index++)
3611 {
3612 const Script& s = get_script (script_index);
3613 s.prune_langsys (&c, script_index);
3614 }
3615 }
3616
3617 template <typename TLookup>
subsetOT::GSUBGPOS3618 bool subset (hb_subset_layout_context_t *c) const
3619 {
3620 TRACE_SUBSET (this);
3621 auto *out = c->subset_context->serializer->embed (*this);
3622 if (unlikely (!out)) return_trace (false);
3623
3624 typedef LookupOffsetList<TLookup> TLookupList;
3625 reinterpret_cast<Offset16To<TLookupList> &> (out->lookupList)
3626 .serialize_subset (c->subset_context,
3627 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList),
3628 this,
3629 c);
3630
3631 reinterpret_cast<Offset16To<RecordListOfFeature> &> (out->featureList)
3632 .serialize_subset (c->subset_context,
3633 reinterpret_cast<const Offset16To<RecordListOfFeature> &> (featureList),
3634 this,
3635 c);
3636
3637 out->scriptList.serialize_subset (c->subset_context,
3638 scriptList,
3639 this,
3640 c);
3641
3642 #ifndef HB_NO_VAR
3643 if (version.to_int () >= 0x00010001u)
3644 {
3645 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3646 if (!ret)
3647 {
3648 out->version.major = 1;
3649 out->version.minor = 0;
3650 }
3651 }
3652 #endif
3653
3654 return_trace (true);
3655 }
3656
find_duplicate_featuresOT::GSUBGPOS3657 void find_duplicate_features (const hb_map_t *lookup_indices,
3658 const hb_set_t *feature_indices,
3659 hb_map_t *duplicate_feature_map /* OUT */) const
3660 {
3661 //find out duplicate features after subset
3662 unsigned prev = 0xFFFFu;
3663 for (unsigned i : feature_indices->iter ())
3664 {
3665 if (prev == 0xFFFFu)
3666 {
3667 duplicate_feature_map->set (i, i);
3668 prev = i;
3669 continue;
3670 }
3671
3672 hb_tag_t t = get_feature_tag (i);
3673 hb_tag_t prev_t = get_feature_tag (prev);
3674 if (t != prev_t)
3675 {
3676 duplicate_feature_map->set (i, i);
3677 prev = i;
3678 continue;
3679 }
3680
3681 const Feature& f = get_feature (i);
3682 const Feature& prev_f = get_feature (prev);
3683
3684 auto f_iter =
3685 + hb_iter (f.lookupIndex)
3686 | hb_filter (lookup_indices)
3687 ;
3688
3689 auto prev_iter =
3690 + hb_iter (prev_f.lookupIndex)
3691 | hb_filter (lookup_indices)
3692 ;
3693
3694 if (f_iter.len () != prev_iter.len ())
3695 {
3696 duplicate_feature_map->set (i, i);
3697 prev = i;
3698 continue;
3699 }
3700
3701 bool is_equal = true;
3702 for (auto _ : + hb_zip (f_iter, prev_iter))
3703 if (_.first != _.second) { is_equal = false; break; }
3704
3705 if (is_equal == true) duplicate_feature_map->set (i, prev);
3706 else
3707 {
3708 duplicate_feature_map->set (i, i);
3709 prev = i;
3710 }
3711 }
3712 }
3713
prune_featuresOT::GSUBGPOS3714 void prune_features (const hb_map_t *lookup_indices, /* IN */
3715 hb_set_t *feature_indices /* IN/OUT */) const
3716 {
3717 #ifndef HB_NO_VAR
3718 // This is the set of feature indices which have alternate versions defined
3719 // if the FeatureVariation's table and the alternate version(s) intersect the
3720 // set of lookup indices.
3721 hb_set_t alternate_feature_indices;
3722 if (version.to_int () >= 0x00010001u)
3723 (this+featureVars).closure_features (lookup_indices, &alternate_feature_indices);
3724 if (unlikely (alternate_feature_indices.in_error())) {
3725 feature_indices->successful = false;
3726 return;
3727 }
3728 #endif
3729
3730 for (unsigned i : feature_indices->iter())
3731 {
3732 const Feature& f = get_feature (i);
3733 hb_tag_t tag = get_feature_tag (i);
3734 if (tag == HB_TAG ('p', 'r', 'e', 'f'))
3735 // Note: Never ever drop feature 'pref', even if it's empty.
3736 // HarfBuzz chooses shaper for Khmer based on presence of this
3737 // feature. See thread at:
3738 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
3739 continue;
3740
3741 if (f.featureParams.is_null ()
3742 && !f.intersects_lookup_indexes (lookup_indices)
3743 #ifndef HB_NO_VAR
3744 && !alternate_feature_indices.has (i)
3745 #endif
3746 )
3747 feature_indices->del (i);
3748 }
3749 }
3750
get_sizeOT::GSUBGPOS3751 unsigned int get_size () const
3752 {
3753 return min_size +
3754 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3755 }
3756
3757 template <typename TLookup>
sanitizeOT::GSUBGPOS3758 bool sanitize (hb_sanitize_context_t *c) const
3759 {
3760 TRACE_SANITIZE (this);
3761 typedef List16OfOffset16To<TLookup> TLookupList;
3762 if (unlikely (!(version.sanitize (c) &&
3763 likely (version.major == 1) &&
3764 scriptList.sanitize (c, this) &&
3765 featureList.sanitize (c, this) &&
3766 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList).sanitize (c, this))))
3767 return_trace (false);
3768
3769 #ifndef HB_NO_VAR
3770 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3771 return_trace (false);
3772 #endif
3773
3774 return_trace (true);
3775 }
3776
3777 template <typename T>
3778 struct accelerator_t
3779 {
initOT::GSUBGPOS::accelerator_t3780 void init (hb_face_t *face)
3781 {
3782 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3783 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
3784 {
3785 hb_blob_destroy (this->table.get_blob ());
3786 this->table = hb_blob_get_empty ();
3787 }
3788
3789 this->lookup_count = table->get_lookup_count ();
3790
3791 this->accels = (hb_ot_layout_lookup_accelerator_t *) calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3792 if (unlikely (!this->accels))
3793 {
3794 this->lookup_count = 0;
3795 this->table.destroy ();
3796 this->table = hb_blob_get_empty ();
3797 }
3798
3799 for (unsigned int i = 0; i < this->lookup_count; i++)
3800 this->accels[i].init (table->get_lookup (i));
3801 }
3802
finiOT::GSUBGPOS::accelerator_t3803 void fini ()
3804 {
3805 for (unsigned int i = 0; i < this->lookup_count; i++)
3806 this->accels[i].fini ();
3807 free (this->accels);
3808 this->table.destroy ();
3809 }
3810
3811 hb_blob_ptr_t<T> table;
3812 unsigned int lookup_count;
3813 hb_ot_layout_lookup_accelerator_t *accels;
3814 };
3815
3816 protected:
3817 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3818 * to 0x00010000u */
3819 Offset16To<ScriptList>
3820 scriptList; /* ScriptList table */
3821 Offset16To<FeatureList>
3822 featureList; /* FeatureList table */
3823 Offset16To<LookupList>
3824 lookupList; /* LookupList table */
3825 Offset32To<FeatureVariations>
3826 featureVars; /* Offset to Feature Variations
3827 table--from beginning of table
3828 * (may be NULL). Introduced
3829 * in version 0x00010001. */
3830 public:
3831 DEFINE_SIZE_MIN (10);
3832 };
3833
3834
3835 } /* namespace OT */
3836
3837
3838 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */
3839