1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
31
32 #include "hb.hh"
33 #include "hb-buffer.hh"
34 #include "hb-map.hh"
35 #include "hb-set.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
39
40
41 namespace OT {
42
43
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
46 {
47 template <typename T>
dispatchOT::hb_intersects_context_t48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
default_return_valueOT::hb_intersects_context_t49 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_intersects_context_t50 bool stop_sublookup_iteration (return_t r) const { return r; }
51
52 const hb_set_t *glyphs;
53
hb_intersects_context_tOT::hb_intersects_context_t54 hb_intersects_context_t (const hb_set_t *glyphs_) :
55 glyphs (glyphs_) {}
56 };
57
58 struct hb_have_non_1to1_context_t :
59 hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
60 {
61 template <typename T>
dispatchOT::hb_have_non_1to1_context_t62 return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
default_return_valueOT::hb_have_non_1to1_context_t63 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_have_non_1to1_context_t64 bool stop_sublookup_iteration (return_t r) const { return r; }
65 };
66
67 struct hb_closure_context_t :
68 hb_dispatch_context_t<hb_closure_context_t>
69 {
70 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
71 template <typename T>
dispatchOT::hb_closure_context_t72 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_context_t73 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_context_t74 void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
75 {
76 if (unlikely (nesting_level_left == 0 || !recurse_func))
77 return;
78
79 nesting_level_left--;
80 recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
81 nesting_level_left++;
82 }
83
lookup_limit_exceededOT::hb_closure_context_t84 bool lookup_limit_exceeded ()
85 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
86
should_visit_lookupOT::hb_closure_context_t87 bool should_visit_lookup (unsigned int lookup_index)
88 {
89 if (lookup_count++ > HB_MAX_LOOKUP_INDICES)
90 return false;
91
92 if (is_lookup_done (lookup_index))
93 return false;
94
95 return true;
96 }
97
is_lookup_doneOT::hb_closure_context_t98 bool is_lookup_done (unsigned int lookup_index)
99 {
100 if (done_lookups_glyph_count->in_error () ||
101 done_lookups_glyph_set->in_error ())
102 return true;
103
104 /* Have we visited this lookup with the current set of glyphs? */
105 if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
106 {
107 done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
108
109 if (!done_lookups_glyph_set->get (lookup_index))
110 {
111 hb_set_t* empty_set = hb_set_create ();
112 if (unlikely (!done_lookups_glyph_set->set (lookup_index, empty_set)))
113 {
114 hb_set_destroy (empty_set);
115 return true;
116 }
117 }
118
119 hb_set_clear (done_lookups_glyph_set->get (lookup_index));
120 }
121
122 hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
123 if (unlikely (covered_glyph_set->in_error ()))
124 return true;
125 if (parent_active_glyphs ()->is_subset (covered_glyph_set))
126 return true;
127
128 hb_set_union (covered_glyph_set, parent_active_glyphs ());
129 return false;
130 }
131
parent_active_glyphsOT::hb_closure_context_t132 hb_set_t* parent_active_glyphs ()
133 {
134 if (active_glyphs_stack.length < 1)
135 return glyphs;
136
137 return active_glyphs_stack.tail ();
138 }
139
push_cur_active_glyphsOT::hb_closure_context_t140 void push_cur_active_glyphs (hb_set_t* cur_active_glyph_set)
141 {
142 active_glyphs_stack.push (cur_active_glyph_set);
143 }
144
pop_cur_done_glyphsOT::hb_closure_context_t145 bool pop_cur_done_glyphs ()
146 {
147 if (active_glyphs_stack.length < 1)
148 return false;
149
150 active_glyphs_stack.pop ();
151 return true;
152 }
153
154 hb_face_t *face;
155 hb_set_t *glyphs;
156 hb_set_t *cur_intersected_glyphs;
157 hb_set_t output[1];
158 hb_vector_t<hb_set_t *> active_glyphs_stack;
159 recurse_func_t recurse_func;
160 unsigned int nesting_level_left;
161
hb_closure_context_tOT::hb_closure_context_t162 hb_closure_context_t (hb_face_t *face_,
163 hb_set_t *glyphs_,
164 hb_set_t *cur_intersected_glyphs_,
165 hb_map_t *done_lookups_glyph_count_,
166 hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *done_lookups_glyph_set_,
167 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
168 face (face_),
169 glyphs (glyphs_),
170 cur_intersected_glyphs (cur_intersected_glyphs_),
171 recurse_func (nullptr),
172 nesting_level_left (nesting_level_left_),
173 done_lookups_glyph_count (done_lookups_glyph_count_),
174 done_lookups_glyph_set (done_lookups_glyph_set_),
175 lookup_count (0)
176 {
177 push_cur_active_glyphs (glyphs_);
178 }
179
~hb_closure_context_tOT::hb_closure_context_t180 ~hb_closure_context_t () { flush (); }
181
set_recurse_funcOT::hb_closure_context_t182 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
183
flushOT::hb_closure_context_t184 void flush ()
185 {
186 hb_set_del_range (output, face->get_num_glyphs (), hb_set_get_max (output)); /* Remove invalid glyphs. */
187 hb_set_union (glyphs, output);
188 hb_set_clear (output);
189 active_glyphs_stack.pop ();
190 active_glyphs_stack.fini ();
191 }
192
193 private:
194 hb_map_t *done_lookups_glyph_count;
195 hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *done_lookups_glyph_set;
196 unsigned int lookup_count;
197 };
198
199
200
201 struct hb_closure_lookups_context_t :
202 hb_dispatch_context_t<hb_closure_lookups_context_t>
203 {
204 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
205 template <typename T>
dispatchOT::hb_closure_lookups_context_t206 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_lookups_context_t207 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_lookups_context_t208 void recurse (unsigned lookup_index)
209 {
210 if (unlikely (nesting_level_left == 0 || !recurse_func))
211 return;
212
213 /* Return if new lookup was recursed to before. */
214 if (is_lookup_visited (lookup_index))
215 return;
216
217 nesting_level_left--;
218 recurse_func (this, lookup_index);
219 nesting_level_left++;
220 }
221
set_lookup_visitedOT::hb_closure_lookups_context_t222 void set_lookup_visited (unsigned lookup_index)
223 { visited_lookups->add (lookup_index); }
224
set_lookup_inactiveOT::hb_closure_lookups_context_t225 void set_lookup_inactive (unsigned lookup_index)
226 { inactive_lookups->add (lookup_index); }
227
lookup_limit_exceededOT::hb_closure_lookups_context_t228 bool lookup_limit_exceeded ()
229 { return lookup_count > HB_MAX_LOOKUP_INDICES; }
230
is_lookup_visitedOT::hb_closure_lookups_context_t231 bool is_lookup_visited (unsigned lookup_index)
232 {
233 if (unlikely (lookup_count++ > HB_MAX_LOOKUP_INDICES))
234 return true;
235
236 if (unlikely (visited_lookups->in_error ()))
237 return true;
238
239 return visited_lookups->has (lookup_index);
240 }
241
242 hb_face_t *face;
243 const hb_set_t *glyphs;
244 recurse_func_t recurse_func;
245 unsigned int nesting_level_left;
246
hb_closure_lookups_context_tOT::hb_closure_lookups_context_t247 hb_closure_lookups_context_t (hb_face_t *face_,
248 const hb_set_t *glyphs_,
249 hb_set_t *visited_lookups_,
250 hb_set_t *inactive_lookups_,
251 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
252 face (face_),
253 glyphs (glyphs_),
254 recurse_func (nullptr),
255 nesting_level_left (nesting_level_left_),
256 visited_lookups (visited_lookups_),
257 inactive_lookups (inactive_lookups_),
258 lookup_count (0) {}
259
set_recurse_funcOT::hb_closure_lookups_context_t260 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
261
262 private:
263 hb_set_t *visited_lookups;
264 hb_set_t *inactive_lookups;
265 unsigned int lookup_count;
266 };
267
268 struct hb_would_apply_context_t :
269 hb_dispatch_context_t<hb_would_apply_context_t, bool>
270 {
271 template <typename T>
dispatchOT::hb_would_apply_context_t272 return_t dispatch (const T &obj) { return obj.would_apply (this); }
default_return_valueOT::hb_would_apply_context_t273 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_would_apply_context_t274 bool stop_sublookup_iteration (return_t r) const { return r; }
275
276 hb_face_t *face;
277 const hb_codepoint_t *glyphs;
278 unsigned int len;
279 bool zero_context;
280
hb_would_apply_context_tOT::hb_would_apply_context_t281 hb_would_apply_context_t (hb_face_t *face_,
282 const hb_codepoint_t *glyphs_,
283 unsigned int len_,
284 bool zero_context_) :
285 face (face_),
286 glyphs (glyphs_),
287 len (len_),
288 zero_context (zero_context_) {}
289 };
290
291 struct hb_collect_glyphs_context_t :
292 hb_dispatch_context_t<hb_collect_glyphs_context_t>
293 {
294 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
295 template <typename T>
dispatchOT::hb_collect_glyphs_context_t296 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_glyphs_context_t297 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_collect_glyphs_context_t298 void recurse (unsigned int lookup_index)
299 {
300 if (unlikely (nesting_level_left == 0 || !recurse_func))
301 return;
302
303 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
304 * past the previous check. For GSUB, we only want to collect the output
305 * glyphs in the recursion. If output is not requested, we can go home now.
306 *
307 * Note further, that the above is not exactly correct. A recursed lookup
308 * is allowed to match input that is not matched in the context, but that's
309 * not how most fonts are built. It's possible to relax that and recurse
310 * with all sets here if it proves to be an issue.
311 */
312
313 if (output == hb_set_get_empty ())
314 return;
315
316 /* Return if new lookup was recursed to before. */
317 if (recursed_lookups->has (lookup_index))
318 return;
319
320 hb_set_t *old_before = before;
321 hb_set_t *old_input = input;
322 hb_set_t *old_after = after;
323 before = input = after = hb_set_get_empty ();
324
325 nesting_level_left--;
326 recurse_func (this, lookup_index);
327 nesting_level_left++;
328
329 before = old_before;
330 input = old_input;
331 after = old_after;
332
333 recursed_lookups->add (lookup_index);
334 }
335
336 hb_face_t *face;
337 hb_set_t *before;
338 hb_set_t *input;
339 hb_set_t *after;
340 hb_set_t *output;
341 recurse_func_t recurse_func;
342 hb_set_t *recursed_lookups;
343 unsigned int nesting_level_left;
344
hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t345 hb_collect_glyphs_context_t (hb_face_t *face_,
346 hb_set_t *glyphs_before, /* OUT. May be NULL */
347 hb_set_t *glyphs_input, /* OUT. May be NULL */
348 hb_set_t *glyphs_after, /* OUT. May be NULL */
349 hb_set_t *glyphs_output, /* OUT. May be NULL */
350 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
351 face (face_),
352 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
353 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
354 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
355 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
356 recurse_func (nullptr),
357 recursed_lookups (hb_set_create ()),
358 nesting_level_left (nesting_level_left_) {}
~hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t359 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
360
set_recurse_funcOT::hb_collect_glyphs_context_t361 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
362 };
363
364
365
366 template <typename set_t>
367 struct hb_collect_coverage_context_t :
368 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
369 {
370 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
371 template <typename T>
dispatchOT::hb_collect_coverage_context_t372 return_t dispatch (const T &obj) { return obj.get_coverage (); }
default_return_valueOT::hb_collect_coverage_context_t373 static return_t default_return_value () { return Null (Coverage); }
stop_sublookup_iterationOT::hb_collect_coverage_context_t374 bool stop_sublookup_iteration (return_t r) const
375 {
376 r.collect_coverage (set);
377 return false;
378 }
379
hb_collect_coverage_context_tOT::hb_collect_coverage_context_t380 hb_collect_coverage_context_t (set_t *set_) :
381 set (set_) {}
382
383 set_t *set;
384 };
385
386
387 struct hb_ot_apply_context_t :
388 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
389 {
390 struct matcher_t
391 {
matcher_tOT::hb_ot_apply_context_t::matcher_t392 matcher_t () :
393 lookup_props (0),
394 ignore_zwnj (false),
395 ignore_zwj (false),
396 mask (-1),
397 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
398 syllable arg1(0),
399 #undef arg1
400 match_func (nullptr),
401 match_data (nullptr) {}
402
403 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
404
set_ignore_zwnjOT::hb_ot_apply_context_t::matcher_t405 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
set_ignore_zwjOT::hb_ot_apply_context_t::matcher_t406 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
set_lookup_propsOT::hb_ot_apply_context_t::matcher_t407 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
set_maskOT::hb_ot_apply_context_t::matcher_t408 void set_mask (hb_mask_t mask_) { mask = mask_; }
set_syllableOT::hb_ot_apply_context_t::matcher_t409 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
set_match_funcOT::hb_ot_apply_context_t::matcher_t410 void set_match_func (match_func_t match_func_,
411 const void *match_data_)
412 { match_func = match_func_; match_data = match_data_; }
413
414 enum may_match_t {
415 MATCH_NO,
416 MATCH_YES,
417 MATCH_MAYBE
418 };
419
may_matchOT::hb_ot_apply_context_t::matcher_t420 may_match_t may_match (const hb_glyph_info_t &info,
421 const HBUINT16 *glyph_data) const
422 {
423 if (!(info.mask & mask) ||
424 (syllable && syllable != info.syllable ()))
425 return MATCH_NO;
426
427 if (match_func)
428 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
429
430 return MATCH_MAYBE;
431 }
432
433 enum may_skip_t {
434 SKIP_NO,
435 SKIP_YES,
436 SKIP_MAYBE
437 };
438
may_skipOT::hb_ot_apply_context_t::matcher_t439 may_skip_t may_skip (const hb_ot_apply_context_t *c,
440 const hb_glyph_info_t &info) const
441 {
442 if (!c->check_glyph_property (&info, lookup_props))
443 return SKIP_YES;
444
445 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
446 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
447 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
448 return SKIP_MAYBE;
449
450 return SKIP_NO;
451 }
452
453 protected:
454 unsigned int lookup_props;
455 bool ignore_zwnj;
456 bool ignore_zwj;
457 hb_mask_t mask;
458 uint8_t syllable;
459 match_func_t match_func;
460 const void *match_data;
461 };
462
463 struct skipping_iterator_t
464 {
initOT::hb_ot_apply_context_t::skipping_iterator_t465 void init (hb_ot_apply_context_t *c_, bool context_match = false)
466 {
467 c = c_;
468 match_glyph_data = nullptr;
469 matcher.set_match_func (nullptr, nullptr);
470 matcher.set_lookup_props (c->lookup_props);
471 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
472 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
473 /* Ignore ZWJ if we are matching context, or asked to. */
474 matcher.set_ignore_zwj (context_match || c->auto_zwj);
475 matcher.set_mask (context_match ? -1 : c->lookup_mask);
476 }
set_lookup_propsOT::hb_ot_apply_context_t::skipping_iterator_t477 void set_lookup_props (unsigned int lookup_props)
478 {
479 matcher.set_lookup_props (lookup_props);
480 }
set_match_funcOT::hb_ot_apply_context_t::skipping_iterator_t481 void set_match_func (matcher_t::match_func_t match_func_,
482 const void *match_data_,
483 const HBUINT16 glyph_data[])
484 {
485 matcher.set_match_func (match_func_, match_data_);
486 match_glyph_data = glyph_data;
487 }
488
resetOT::hb_ot_apply_context_t::skipping_iterator_t489 void reset (unsigned int start_index_,
490 unsigned int num_items_)
491 {
492 idx = start_index_;
493 num_items = num_items_;
494 end = c->buffer->len;
495 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
496 }
497
rejectOT::hb_ot_apply_context_t::skipping_iterator_t498 void reject ()
499 {
500 num_items++;
501 if (match_glyph_data) match_glyph_data--;
502 }
503
504 matcher_t::may_skip_t
may_skipOT::hb_ot_apply_context_t::skipping_iterator_t505 may_skip (const hb_glyph_info_t &info) const
506 { return matcher.may_skip (c, info); }
507
nextOT::hb_ot_apply_context_t::skipping_iterator_t508 bool next ()
509 {
510 assert (num_items > 0);
511 while (idx + num_items < end)
512 {
513 idx++;
514 const hb_glyph_info_t &info = c->buffer->info[idx];
515
516 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
517 if (unlikely (skip == matcher_t::SKIP_YES))
518 continue;
519
520 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
521 if (match == matcher_t::MATCH_YES ||
522 (match == matcher_t::MATCH_MAYBE &&
523 skip == matcher_t::SKIP_NO))
524 {
525 num_items--;
526 if (match_glyph_data) match_glyph_data++;
527 return true;
528 }
529
530 if (skip == matcher_t::SKIP_NO)
531 return false;
532 }
533 return false;
534 }
prevOT::hb_ot_apply_context_t::skipping_iterator_t535 bool prev ()
536 {
537 assert (num_items > 0);
538 /* The alternate condition below is faster at string boundaries,
539 * but produces subpar "unsafe-to-concat" values. */
540 unsigned stop = num_items - 1;
541 if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
542 stop = 1 - 1;
543 /*When looking back, limit how far we search; this function is mostly
544 * used for looking back for base glyphs when attaching marks. If we
545 * don't limit, we can get O(n^2) behavior where n is the number of
546 * consecutive marks. */
547 stop = (unsigned) hb_max ((int) stop, (int) idx - HB_MAX_CONTEXT_LENGTH);
548 while (idx > stop)
549 {
550 idx--;
551 const hb_glyph_info_t &info = c->buffer->out_info[idx];
552
553 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
554 if (unlikely (skip == matcher_t::SKIP_YES))
555 continue;
556
557 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
558 if (match == matcher_t::MATCH_YES ||
559 (match == matcher_t::MATCH_MAYBE &&
560 skip == matcher_t::SKIP_NO))
561 {
562 num_items--;
563 if (match_glyph_data) match_glyph_data++;
564 return true;
565 }
566
567 if (skip == matcher_t::SKIP_NO)
568 return false;
569 }
570 return false;
571 }
572
573 unsigned int idx;
574 protected:
575 hb_ot_apply_context_t *c;
576 matcher_t matcher;
577 const HBUINT16 *match_glyph_data;
578
579 unsigned int num_items;
580 unsigned int end;
581 };
582
583
get_nameOT::hb_ot_apply_context_t584 const char *get_name () { return "APPLY"; }
585 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
586 template <typename T>
dispatchOT::hb_ot_apply_context_t587 return_t dispatch (const T &obj) { return obj.apply (this); }
default_return_valueOT::hb_ot_apply_context_t588 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_ot_apply_context_t589 bool stop_sublookup_iteration (return_t r) const { return r; }
recurseOT::hb_ot_apply_context_t590 return_t recurse (unsigned int sub_lookup_index)
591 {
592 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
593 return default_return_value ();
594
595 nesting_level_left--;
596 bool ret = recurse_func (this, sub_lookup_index);
597 nesting_level_left++;
598 return ret;
599 }
600
601 skipping_iterator_t iter_input, iter_context;
602
603 hb_font_t *font;
604 hb_face_t *face;
605 hb_buffer_t *buffer;
606 recurse_func_t recurse_func;
607 const GDEF &gdef;
608 const VariationStore &var_store;
609
610 hb_direction_t direction;
611 hb_mask_t lookup_mask;
612 unsigned int table_index; /* GSUB/GPOS */
613 unsigned int lookup_index;
614 unsigned int lookup_props;
615 unsigned int nesting_level_left;
616
617 bool has_glyph_classes;
618 bool auto_zwnj;
619 bool auto_zwj;
620 bool random;
621
622 uint32_t random_state;
623
624
hb_ot_apply_context_tOT::hb_ot_apply_context_t625 hb_ot_apply_context_t (unsigned int table_index_,
626 hb_font_t *font_,
627 hb_buffer_t *buffer_) :
628 iter_input (), iter_context (),
629 font (font_), face (font->face), buffer (buffer_),
630 recurse_func (nullptr),
631 gdef (
632 #ifndef HB_NO_OT_LAYOUT
633 *face->table.GDEF->table
634 #else
635 Null (GDEF)
636 #endif
637 ),
638 var_store (gdef.get_var_store ()),
639 direction (buffer_->props.direction),
640 lookup_mask (1),
641 table_index (table_index_),
642 lookup_index ((unsigned int) -1),
643 lookup_props (0),
644 nesting_level_left (HB_MAX_NESTING_LEVEL),
645 has_glyph_classes (gdef.has_glyph_classes ()),
646 auto_zwnj (true),
647 auto_zwj (true),
648 random (false),
649 random_state (1) { init_iters (); }
650
init_itersOT::hb_ot_apply_context_t651 void init_iters ()
652 {
653 iter_input.init (this, false);
654 iter_context.init (this, true);
655 }
656
set_lookup_maskOT::hb_ot_apply_context_t657 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; init_iters (); }
set_auto_zwjOT::hb_ot_apply_context_t658 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
set_auto_zwnjOT::hb_ot_apply_context_t659 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
set_randomOT::hb_ot_apply_context_t660 void set_random (bool random_) { random = random_; }
set_recurse_funcOT::hb_ot_apply_context_t661 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
set_lookup_indexOT::hb_ot_apply_context_t662 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
set_lookup_propsOT::hb_ot_apply_context_t663 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
664
random_numberOT::hb_ot_apply_context_t665 uint32_t random_number ()
666 {
667 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
668 random_state = random_state * 48271 % 2147483647;
669 return random_state;
670 }
671
match_properties_markOT::hb_ot_apply_context_t672 bool match_properties_mark (hb_codepoint_t glyph,
673 unsigned int glyph_props,
674 unsigned int match_props) const
675 {
676 /* If using mark filtering sets, the high short of
677 * match_props has the set index.
678 */
679 if (match_props & LookupFlag::UseMarkFilteringSet)
680 return gdef.mark_set_covers (match_props >> 16, glyph);
681
682 /* The second byte of match_props has the meaning
683 * "ignore marks of attachment type different than
684 * the attachment type specified."
685 */
686 if (match_props & LookupFlag::MarkAttachmentType)
687 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
688
689 return true;
690 }
691
check_glyph_propertyOT::hb_ot_apply_context_t692 bool check_glyph_property (const hb_glyph_info_t *info,
693 unsigned int match_props) const
694 {
695 hb_codepoint_t glyph = info->codepoint;
696 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
697
698 /* Not covered, if, for example, glyph class is ligature and
699 * match_props includes LookupFlags::IgnoreLigatures
700 */
701 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
702 return false;
703
704 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
705 return match_properties_mark (glyph, glyph_props, match_props);
706
707 return true;
708 }
709
_set_glyph_propsOT::hb_ot_apply_context_t710 void _set_glyph_props (hb_codepoint_t glyph_index,
711 unsigned int class_guess = 0,
712 bool ligature = false,
713 bool component = false) const
714 {
715 unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) &
716 HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
717 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
718 if (ligature)
719 {
720 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
721 /* In the only place that the MULTIPLIED bit is used, Uniscribe
722 * seems to only care about the "last" transformation between
723 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
724 * and ligate again, it forgives the multiplication and acts as
725 * if only ligation happened. As such, clear MULTIPLIED bit.
726 */
727 add_in &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
728 }
729 if (component)
730 add_in |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
731 if (likely (has_glyph_classes))
732 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index));
733 else if (class_guess)
734 _hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess);
735 }
736
replace_glyphOT::hb_ot_apply_context_t737 void replace_glyph (hb_codepoint_t glyph_index) const
738 {
739 _set_glyph_props (glyph_index);
740 (void) buffer->replace_glyph (glyph_index);
741 }
replace_glyph_inplaceOT::hb_ot_apply_context_t742 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
743 {
744 _set_glyph_props (glyph_index);
745 buffer->cur().codepoint = glyph_index;
746 }
replace_glyph_with_ligatureOT::hb_ot_apply_context_t747 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
748 unsigned int class_guess) const
749 {
750 _set_glyph_props (glyph_index, class_guess, true);
751 (void) buffer->replace_glyph (glyph_index);
752 }
output_glyph_for_componentOT::hb_ot_apply_context_t753 void output_glyph_for_component (hb_codepoint_t glyph_index,
754 unsigned int class_guess) const
755 {
756 _set_glyph_props (glyph_index, class_guess, false, true);
757 (void) buffer->output_glyph (glyph_index);
758 }
759 };
760
761
762 struct hb_get_subtables_context_t :
763 hb_dispatch_context_t<hb_get_subtables_context_t>
764 {
765 template <typename Type>
apply_toOT::hb_get_subtables_context_t766 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
767 {
768 const Type *typed_obj = (const Type *) obj;
769 return typed_obj->apply (c);
770 }
771
772 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
773
774 struct hb_applicable_t
775 {
776 template <typename T>
initOT::hb_get_subtables_context_t::hb_applicable_t777 void init (const T &obj_, hb_apply_func_t apply_func_)
778 {
779 obj = &obj_;
780 apply_func = apply_func_;
781 digest.init ();
782 obj_.get_coverage ().collect_coverage (&digest);
783 }
784
applyOT::hb_get_subtables_context_t::hb_applicable_t785 bool apply (OT::hb_ot_apply_context_t *c) const
786 {
787 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
788 }
789
790 private:
791 const void *obj;
792 hb_apply_func_t apply_func;
793 hb_set_digest_t digest;
794 };
795
796 typedef hb_vector_t<hb_applicable_t> array_t;
797
798 /* Dispatch interface. */
799 template <typename T>
dispatchOT::hb_get_subtables_context_t800 return_t dispatch (const T &obj)
801 {
802 hb_applicable_t *entry = array.push();
803 entry->init (obj, apply_to<T>);
804 return hb_empty_t ();
805 }
default_return_valueOT::hb_get_subtables_context_t806 static return_t default_return_value () { return hb_empty_t (); }
807
hb_get_subtables_context_tOT::hb_get_subtables_context_t808 hb_get_subtables_context_t (array_t &array_) :
809 array (array_) {}
810
811 array_t &array;
812 };
813
814
815
816
817 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
818 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
819 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
820 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
821
822 struct ContextClosureFuncs
823 {
824 intersects_func_t intersects;
825 intersected_glyphs_func_t intersected_glyphs;
826 };
827 struct ContextCollectGlyphsFuncs
828 {
829 collect_glyphs_func_t collect;
830 };
831 struct ContextApplyFuncs
832 {
833 match_func_t match;
834 };
835
836
intersects_glyph(const hb_set_t * glyphs,const HBUINT16 & value,const void * data HB_UNUSED)837 static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
838 {
839 return glyphs->has (value);
840 }
intersects_class(const hb_set_t * glyphs,const HBUINT16 & value,const void * data)841 static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
842 {
843 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
844 return class_def.intersects_class (glyphs, value);
845 }
intersects_coverage(const hb_set_t * glyphs,const HBUINT16 & value,const void * data)846 static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
847 {
848 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
849 return (data+coverage).intersects (glyphs);
850 }
851
852
intersected_glyph(const hb_set_t * glyphs HB_UNUSED,const void * data,unsigned value,hb_set_t * intersected_glyphs)853 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs)
854 {
855 unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
856 intersected_glyphs->add (g);
857 }
intersected_class_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs)858 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
859 {
860 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
861 class_def.intersected_class_glyphs (glyphs, value, intersected_glyphs);
862 }
intersected_coverage_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs)863 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
864 {
865 Offset16To<Coverage> coverage;
866 coverage = value;
867 (data+coverage).intersected_coverage_glyphs (glyphs, intersected_glyphs);
868 }
869
870
array_is_subset_of(const hb_set_t * glyphs,unsigned int count,const HBUINT16 values[],intersects_func_t intersects_func,const void * intersects_data)871 static inline bool array_is_subset_of (const hb_set_t *glyphs,
872 unsigned int count,
873 const HBUINT16 values[],
874 intersects_func_t intersects_func,
875 const void *intersects_data)
876 {
877 for (const HBUINT16 &_ : + hb_iter (values, count))
878 if (!intersects_func (glyphs, _, intersects_data)) return false;
879 return true;
880 }
881
882
collect_glyph(hb_set_t * glyphs,const HBUINT16 & value,const void * data HB_UNUSED)883 static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
884 {
885 glyphs->add (value);
886 }
collect_class(hb_set_t * glyphs,const HBUINT16 & value,const void * data)887 static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
888 {
889 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
890 class_def.collect_class (glyphs, value);
891 }
collect_coverage(hb_set_t * glyphs,const HBUINT16 & value,const void * data)892 static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
893 {
894 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
895 (data+coverage).collect_coverage (glyphs);
896 }
collect_array(hb_collect_glyphs_context_t * c HB_UNUSED,hb_set_t * glyphs,unsigned int count,const HBUINT16 values[],collect_glyphs_func_t collect_func,const void * collect_data)897 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
898 hb_set_t *glyphs,
899 unsigned int count,
900 const HBUINT16 values[],
901 collect_glyphs_func_t collect_func,
902 const void *collect_data)
903 {
904 return
905 + hb_iter (values, count)
906 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
907 ;
908 }
909
910
match_glyph(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data HB_UNUSED)911 static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
912 {
913 return glyph_id == value;
914 }
match_class(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data)915 static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
916 {
917 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
918 return class_def.get_class (glyph_id) == value;
919 }
match_coverage(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data)920 static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
921 {
922 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
923 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
924 }
925
would_match_input(hb_would_apply_context_t * c,unsigned int count,const HBUINT16 input[],match_func_t match_func,const void * match_data)926 static inline bool would_match_input (hb_would_apply_context_t *c,
927 unsigned int count, /* Including the first glyph (not matched) */
928 const HBUINT16 input[], /* Array of input values--start with second glyph */
929 match_func_t match_func,
930 const void *match_data)
931 {
932 if (count != c->len)
933 return false;
934
935 for (unsigned int i = 1; i < count; i++)
936 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
937 return false;
938
939 return true;
940 }
match_input(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 input[],match_func_t match_func,const void * match_data,unsigned int * end_offset,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int * p_total_component_count=nullptr)941 static inline bool match_input (hb_ot_apply_context_t *c,
942 unsigned int count, /* Including the first glyph (not matched) */
943 const HBUINT16 input[], /* Array of input values--start with second glyph */
944 match_func_t match_func,
945 const void *match_data,
946 unsigned int *end_offset,
947 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
948 unsigned int *p_total_component_count = nullptr)
949 {
950 TRACE_APPLY (nullptr);
951
952 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
953
954 hb_buffer_t *buffer = c->buffer;
955
956 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
957 skippy_iter.reset (buffer->idx, count - 1);
958 skippy_iter.set_match_func (match_func, match_data, input);
959
960 /*
961 * This is perhaps the trickiest part of OpenType... Remarks:
962 *
963 * - If all components of the ligature were marks, we call this a mark ligature.
964 *
965 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
966 * it as a ligature glyph.
967 *
968 * - Ligatures cannot be formed across glyphs attached to different components
969 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
970 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
971 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
972 * There are a couple of exceptions to this:
973 *
974 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
975 * assuming that the font designer knows what they are doing (otherwise it can
976 * break Indic stuff when a matra wants to ligate with a conjunct,
977 *
978 * o If two marks want to ligate and they belong to different components of the
979 * same ligature glyph, and said ligature glyph is to be ignored according to
980 * mark-filtering rules, then allow.
981 * https://github.com/harfbuzz/harfbuzz/issues/545
982 */
983
984 unsigned int total_component_count = 0;
985 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
986
987 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
988 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
989
990 enum {
991 LIGBASE_NOT_CHECKED,
992 LIGBASE_MAY_NOT_SKIP,
993 LIGBASE_MAY_SKIP
994 } ligbase = LIGBASE_NOT_CHECKED;
995
996 match_positions[0] = buffer->idx;
997 for (unsigned int i = 1; i < count; i++)
998 {
999 if (!skippy_iter.next ()) return_trace (false);
1000
1001 match_positions[i] = skippy_iter.idx;
1002
1003 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
1004 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
1005
1006 if (first_lig_id && first_lig_comp)
1007 {
1008 /* If first component was attached to a previous ligature component,
1009 * all subsequent components should be attached to the same ligature
1010 * component, otherwise we shouldn't ligate them... */
1011 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1012 {
1013 /* ...unless, we are attached to a base ligature and that base
1014 * ligature is ignorable. */
1015 if (ligbase == LIGBASE_NOT_CHECKED)
1016 {
1017 bool found = false;
1018 const auto *out = buffer->out_info;
1019 unsigned int j = buffer->out_len;
1020 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1021 {
1022 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1023 {
1024 j--;
1025 found = true;
1026 break;
1027 }
1028 j--;
1029 }
1030
1031 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1032 ligbase = LIGBASE_MAY_SKIP;
1033 else
1034 ligbase = LIGBASE_MAY_NOT_SKIP;
1035 }
1036
1037 if (ligbase == LIGBASE_MAY_NOT_SKIP)
1038 return_trace (false);
1039 }
1040 }
1041 else
1042 {
1043 /* If first component was NOT attached to a previous ligature component,
1044 * all subsequent components should also NOT be attached to any ligature
1045 * component, unless they are attached to the first component itself! */
1046 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1047 return_trace (false);
1048 }
1049
1050 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1051 }
1052
1053 *end_offset = skippy_iter.idx - buffer->idx + 1;
1054
1055 if (p_total_component_count)
1056 *p_total_component_count = total_component_count;
1057
1058 return_trace (true);
1059 }
ligate_input(hb_ot_apply_context_t * c,unsigned int count,const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int match_length,hb_codepoint_t lig_glyph,unsigned int total_component_count)1060 static inline bool ligate_input (hb_ot_apply_context_t *c,
1061 unsigned int count, /* Including the first glyph */
1062 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1063 unsigned int match_length,
1064 hb_codepoint_t lig_glyph,
1065 unsigned int total_component_count)
1066 {
1067 TRACE_APPLY (nullptr);
1068
1069 hb_buffer_t *buffer = c->buffer;
1070
1071 buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
1072
1073 /* - If a base and one or more marks ligate, consider that as a base, NOT
1074 * ligature, such that all following marks can still attach to it.
1075 * https://github.com/harfbuzz/harfbuzz/issues/1109
1076 *
1077 * - If all components of the ligature were marks, we call this a mark ligature.
1078 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1079 * the ligature to keep its old ligature id. This will allow it to attach to
1080 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1081 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1082 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1083 * later, we don't want them to lose their ligature id/component, otherwise
1084 * GPOS will fail to correctly position the mark ligature on top of the
1085 * LAM,LAM,HEH ligature. See:
1086 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1087 *
1088 * - If a ligature is formed of components that some of which are also ligatures
1089 * themselves, and those ligature components had marks attached to *their*
1090 * components, we have to attach the marks to the new ligature component
1091 * positions! Now *that*'s tricky! And these marks may be following the
1092 * last component of the whole sequence, so we should loop forward looking
1093 * for them and update them.
1094 *
1095 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1096 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1097 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1098 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1099 * the new ligature with a component value of 2.
1100 *
1101 * This in fact happened to a font... See:
1102 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1103 */
1104
1105 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1106 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1107 for (unsigned int i = 1; i < count; i++)
1108 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1109 {
1110 is_base_ligature = false;
1111 is_mark_ligature = false;
1112 break;
1113 }
1114 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1115
1116 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1117 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1118 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1119 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1120 unsigned int components_so_far = last_num_components;
1121
1122 if (is_ligature)
1123 {
1124 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1125 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1126 {
1127 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1128 }
1129 }
1130 c->replace_glyph_with_ligature (lig_glyph, klass);
1131
1132 for (unsigned int i = 1; i < count; i++)
1133 {
1134 while (buffer->idx < match_positions[i] && buffer->successful)
1135 {
1136 if (is_ligature)
1137 {
1138 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1139 if (this_comp == 0)
1140 this_comp = last_num_components;
1141 unsigned int new_lig_comp = components_so_far - last_num_components +
1142 hb_min (this_comp, last_num_components);
1143 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1144 }
1145 (void) buffer->next_glyph ();
1146 }
1147
1148 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1149 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1150 components_so_far += last_num_components;
1151
1152 /* Skip the base glyph */
1153 buffer->idx++;
1154 }
1155
1156 if (!is_mark_ligature && last_lig_id)
1157 {
1158 /* Re-adjust components for any marks following. */
1159 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1160 {
1161 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1162
1163 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1164 if (!this_comp) break;
1165
1166 unsigned new_lig_comp = components_so_far - last_num_components +
1167 hb_min (this_comp, last_num_components);
1168 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1169 }
1170 }
1171 return_trace (true);
1172 }
1173
match_backtrack(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 backtrack[],match_func_t match_func,const void * match_data,unsigned int * match_start)1174 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1175 unsigned int count,
1176 const HBUINT16 backtrack[],
1177 match_func_t match_func,
1178 const void *match_data,
1179 unsigned int *match_start)
1180 {
1181 TRACE_APPLY (nullptr);
1182
1183 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1184 skippy_iter.reset (c->buffer->backtrack_len (), count);
1185 skippy_iter.set_match_func (match_func, match_data, backtrack);
1186
1187 for (unsigned int i = 0; i < count; i++)
1188 if (!skippy_iter.prev ())
1189 return_trace (false);
1190
1191 *match_start = skippy_iter.idx;
1192
1193 return_trace (true);
1194 }
1195
match_lookahead(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 lookahead[],match_func_t match_func,const void * match_data,unsigned int offset,unsigned int * end_index)1196 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1197 unsigned int count,
1198 const HBUINT16 lookahead[],
1199 match_func_t match_func,
1200 const void *match_data,
1201 unsigned int offset,
1202 unsigned int *end_index)
1203 {
1204 TRACE_APPLY (nullptr);
1205
1206 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1207 skippy_iter.reset (c->buffer->idx + offset - 1, count);
1208 skippy_iter.set_match_func (match_func, match_data, lookahead);
1209
1210 for (unsigned int i = 0; i < count; i++)
1211 if (!skippy_iter.next ())
1212 return_trace (false);
1213
1214 *end_index = skippy_iter.idx + 1;
1215
1216 return_trace (true);
1217 }
1218
1219
1220
1221 struct LookupRecord
1222 {
copyOT::LookupRecord1223 LookupRecord* copy (hb_serialize_context_t *c,
1224 const hb_map_t *lookup_map) const
1225 {
1226 TRACE_SERIALIZE (this);
1227 auto *out = c->embed (*this);
1228 if (unlikely (!out)) return_trace (nullptr);
1229
1230 out->lookupListIndex = hb_map_get (lookup_map, lookupListIndex);
1231 return_trace (out);
1232 }
1233
sanitizeOT::LookupRecord1234 bool sanitize (hb_sanitize_context_t *c) const
1235 {
1236 TRACE_SANITIZE (this);
1237 return_trace (c->check_struct (this));
1238 }
1239
1240 HBUINT16 sequenceIndex; /* Index into current glyph
1241 * sequence--first glyph = 0 */
1242 HBUINT16 lookupListIndex; /* Lookup to apply to that
1243 * position--zero--based */
1244 public:
1245 DEFINE_SIZE_STATIC (4);
1246 };
1247
1248 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1249
context_closure_recurse_lookups(hb_closure_context_t * c,unsigned inputCount,const HBUINT16 input[],unsigned lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextFormat context_format,const void * data,intersected_glyphs_func_t intersected_glyphs_func)1250 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1251 unsigned inputCount, const HBUINT16 input[],
1252 unsigned lookupCount,
1253 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1254 unsigned value,
1255 ContextFormat context_format,
1256 const void *data,
1257 intersected_glyphs_func_t intersected_glyphs_func)
1258 {
1259 hb_set_t *covered_seq_indicies = hb_set_create ();
1260 for (unsigned int i = 0; i < lookupCount; i++)
1261 {
1262 unsigned seqIndex = lookupRecord[i].sequenceIndex;
1263 if (seqIndex >= inputCount) continue;
1264
1265 hb_set_t *pos_glyphs = nullptr;
1266
1267 if (hb_set_is_empty (covered_seq_indicies) || !hb_set_has (covered_seq_indicies, seqIndex))
1268 {
1269 pos_glyphs = hb_set_create ();
1270 if (seqIndex == 0)
1271 {
1272 switch (context_format) {
1273 case ContextFormat::SimpleContext:
1274 pos_glyphs->add (value);
1275 break;
1276 case ContextFormat::ClassBasedContext:
1277 intersected_glyphs_func (c->cur_intersected_glyphs, data, value, pos_glyphs);
1278 break;
1279 case ContextFormat::CoverageBasedContext:
1280 hb_set_set (pos_glyphs, c->cur_intersected_glyphs);
1281 break;
1282 }
1283 }
1284 else
1285 {
1286 const void *input_data = input;
1287 unsigned input_value = seqIndex - 1;
1288 if (context_format != ContextFormat::SimpleContext)
1289 {
1290 input_data = data;
1291 input_value = input[seqIndex - 1];
1292 }
1293
1294 intersected_glyphs_func (c->glyphs, input_data, input_value, pos_glyphs);
1295 }
1296 }
1297
1298 hb_set_add (covered_seq_indicies, seqIndex);
1299 if (pos_glyphs)
1300 c->push_cur_active_glyphs (pos_glyphs);
1301
1302 unsigned endIndex = inputCount;
1303 if (context_format == ContextFormat::CoverageBasedContext)
1304 endIndex += 1;
1305
1306 c->recurse (lookupRecord[i].lookupListIndex, covered_seq_indicies, seqIndex, endIndex);
1307
1308 if (pos_glyphs) {
1309 c->pop_cur_done_glyphs ();
1310 hb_set_destroy (pos_glyphs);
1311 }
1312 }
1313
1314 hb_set_destroy (covered_seq_indicies);
1315 }
1316
1317 template <typename context_t>
recurse_lookups(context_t * c,unsigned int lookupCount,const LookupRecord lookupRecord[])1318 static inline void recurse_lookups (context_t *c,
1319 unsigned int lookupCount,
1320 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1321 {
1322 for (unsigned int i = 0; i < lookupCount; i++)
1323 c->recurse (lookupRecord[i].lookupListIndex);
1324 }
1325
apply_lookup(hb_ot_apply_context_t * c,unsigned int count,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned int match_length)1326 static inline bool apply_lookup (hb_ot_apply_context_t *c,
1327 unsigned int count, /* Including the first glyph */
1328 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1329 unsigned int lookupCount,
1330 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1331 unsigned int match_length)
1332 {
1333 TRACE_APPLY (nullptr);
1334
1335 hb_buffer_t *buffer = c->buffer;
1336 int end;
1337
1338 /* All positions are distance from beginning of *output* buffer.
1339 * Adjust. */
1340 {
1341 unsigned int bl = buffer->backtrack_len ();
1342 end = bl + match_length;
1343
1344 int delta = bl - buffer->idx;
1345 /* Convert positions to new indexing. */
1346 for (unsigned int j = 0; j < count; j++)
1347 match_positions[j] += delta;
1348 }
1349
1350 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1351 {
1352 unsigned int idx = lookupRecord[i].sequenceIndex;
1353 if (idx >= count)
1354 continue;
1355
1356 /* Don't recurse to ourself at same position.
1357 * Note that this test is too naive, it doesn't catch longer loops. */
1358 if (unlikely (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index))
1359 continue;
1360
1361 if (unlikely (!buffer->move_to (match_positions[idx])))
1362 break;
1363
1364 if (unlikely (buffer->max_ops <= 0))
1365 break;
1366
1367 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1368 if (!c->recurse (lookupRecord[i].lookupListIndex))
1369 continue;
1370
1371 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1372 int delta = new_len - orig_len;
1373
1374 if (!delta)
1375 continue;
1376
1377 /* Recursed lookup changed buffer len. Adjust.
1378 *
1379 * TODO:
1380 *
1381 * Right now, if buffer length increased by n, we assume n new glyphs
1382 * were added right after the current position, and if buffer length
1383 * was decreased by n, we assume n match positions after the current
1384 * one where removed. The former (buffer length increased) case is
1385 * fine, but the decrease case can be improved in at least two ways,
1386 * both of which are significant:
1387 *
1388 * - If recursed-to lookup is MultipleSubst and buffer length
1389 * decreased, then it's current match position that was deleted,
1390 * NOT the one after it.
1391 *
1392 * - If buffer length was decreased by n, it does not necessarily
1393 * mean that n match positions where removed, as there might
1394 * have been marks and default-ignorables in the sequence. We
1395 * should instead drop match positions between current-position
1396 * and current-position + n instead. Though, am not sure which
1397 * one is better. Both cases have valid uses. Sigh.
1398 *
1399 * It should be possible to construct tests for both of these cases.
1400 */
1401
1402 end += delta;
1403 if (end <= int (match_positions[idx]))
1404 {
1405 /* End might end up being smaller than match_positions[idx] if the recursed
1406 * lookup ended up removing many items, more than we have had matched.
1407 * Just never rewind end back and get out of here.
1408 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1409 end = match_positions[idx];
1410 /* There can't be any further changes. */
1411 break;
1412 }
1413
1414 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1415
1416 if (delta > 0)
1417 {
1418 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1419 break;
1420 }
1421 else
1422 {
1423 /* NOTE: delta is negative. */
1424 delta = hb_max (delta, (int) next - (int) count);
1425 next -= delta;
1426 }
1427
1428 /* Shift! */
1429 memmove (match_positions + next + delta, match_positions + next,
1430 (count - next) * sizeof (match_positions[0]));
1431 next += delta;
1432 count += delta;
1433
1434 /* Fill in new entries. */
1435 for (unsigned int j = idx + 1; j < next; j++)
1436 match_positions[j] = match_positions[j - 1] + 1;
1437
1438 /* And fixup the rest. */
1439 for (; next < count; next++)
1440 match_positions[next] += delta;
1441 }
1442
1443 (void) buffer->move_to (end);
1444
1445 return_trace (true);
1446 }
1447
1448
1449
1450 /* Contextual lookups */
1451
1452 struct ContextClosureLookupContext
1453 {
1454 ContextClosureFuncs funcs;
1455 ContextFormat context_format;
1456 const void *intersects_data;
1457 };
1458
1459 struct ContextCollectGlyphsLookupContext
1460 {
1461 ContextCollectGlyphsFuncs funcs;
1462 const void *collect_data;
1463 };
1464
1465 struct ContextApplyLookupContext
1466 {
1467 ContextApplyFuncs funcs;
1468 const void *match_data;
1469 };
1470
context_intersects(const hb_set_t * glyphs,unsigned int inputCount,const HBUINT16 input[],ContextClosureLookupContext & lookup_context)1471 static inline bool context_intersects (const hb_set_t *glyphs,
1472 unsigned int inputCount, /* Including the first glyph (not matched) */
1473 const HBUINT16 input[], /* Array of input values--start with second glyph */
1474 ContextClosureLookupContext &lookup_context)
1475 {
1476 return array_is_subset_of (glyphs,
1477 inputCount ? inputCount - 1 : 0, input,
1478 lookup_context.funcs.intersects, lookup_context.intersects_data);
1479 }
1480
context_closure_lookup(hb_closure_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextClosureLookupContext & lookup_context)1481 static inline void context_closure_lookup (hb_closure_context_t *c,
1482 unsigned int inputCount, /* Including the first glyph (not matched) */
1483 const HBUINT16 input[], /* Array of input values--start with second glyph */
1484 unsigned int lookupCount,
1485 const LookupRecord lookupRecord[],
1486 unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1487 ContextClosureLookupContext &lookup_context)
1488 {
1489 if (context_intersects (c->glyphs,
1490 inputCount, input,
1491 lookup_context))
1492 context_closure_recurse_lookups (c,
1493 inputCount, input,
1494 lookupCount, lookupRecord,
1495 value,
1496 lookup_context.context_format,
1497 lookup_context.intersects_data,
1498 lookup_context.funcs.intersected_glyphs);
1499 }
1500
context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextCollectGlyphsLookupContext & lookup_context)1501 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1502 unsigned int inputCount, /* Including the first glyph (not matched) */
1503 const HBUINT16 input[], /* Array of input values--start with second glyph */
1504 unsigned int lookupCount,
1505 const LookupRecord lookupRecord[],
1506 ContextCollectGlyphsLookupContext &lookup_context)
1507 {
1508 collect_array (c, c->input,
1509 inputCount ? inputCount - 1 : 0, input,
1510 lookup_context.funcs.collect, lookup_context.collect_data);
1511 recurse_lookups (c,
1512 lookupCount, lookupRecord);
1513 }
1514
context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,ContextApplyLookupContext & lookup_context)1515 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1516 unsigned int inputCount, /* Including the first glyph (not matched) */
1517 const HBUINT16 input[], /* Array of input values--start with second glyph */
1518 unsigned int lookupCount HB_UNUSED,
1519 const LookupRecord lookupRecord[] HB_UNUSED,
1520 ContextApplyLookupContext &lookup_context)
1521 {
1522 return would_match_input (c,
1523 inputCount, input,
1524 lookup_context.funcs.match, lookup_context.match_data);
1525 }
context_apply_lookup(hb_ot_apply_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextApplyLookupContext & lookup_context)1526 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1527 unsigned int inputCount, /* Including the first glyph (not matched) */
1528 const HBUINT16 input[], /* Array of input values--start with second glyph */
1529 unsigned int lookupCount,
1530 const LookupRecord lookupRecord[],
1531 ContextApplyLookupContext &lookup_context)
1532 {
1533 unsigned int match_length = 0;
1534 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
1535 return match_input (c,
1536 inputCount, input,
1537 lookup_context.funcs.match, lookup_context.match_data,
1538 &match_length, match_positions)
1539 && (c->buffer->unsafe_to_break (c->buffer->idx, c->buffer->idx + match_length),
1540 apply_lookup (c,
1541 inputCount, match_positions,
1542 lookupCount, lookupRecord,
1543 match_length));
1544 }
1545
1546 struct Rule
1547 {
intersectsOT::Rule1548 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1549 {
1550 return context_intersects (glyphs,
1551 inputCount, inputZ.arrayZ,
1552 lookup_context);
1553 }
1554
closureOT::Rule1555 void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
1556 {
1557 if (unlikely (c->lookup_limit_exceeded ())) return;
1558
1559 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1560 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1561 context_closure_lookup (c,
1562 inputCount, inputZ.arrayZ,
1563 lookupCount, lookupRecord.arrayZ,
1564 value, lookup_context);
1565 }
1566
closure_lookupsOT::Rule1567 void closure_lookups (hb_closure_lookups_context_t *c,
1568 ContextClosureLookupContext &lookup_context) const
1569 {
1570 if (unlikely (c->lookup_limit_exceeded ())) return;
1571 if (!intersects (c->glyphs, lookup_context)) return;
1572
1573 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1574 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1575 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1576 }
1577
collect_glyphsOT::Rule1578 void collect_glyphs (hb_collect_glyphs_context_t *c,
1579 ContextCollectGlyphsLookupContext &lookup_context) const
1580 {
1581 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1582 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1583 context_collect_glyphs_lookup (c,
1584 inputCount, inputZ.arrayZ,
1585 lookupCount, lookupRecord.arrayZ,
1586 lookup_context);
1587 }
1588
would_applyOT::Rule1589 bool would_apply (hb_would_apply_context_t *c,
1590 ContextApplyLookupContext &lookup_context) const
1591 {
1592 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1593 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1594 return context_would_apply_lookup (c,
1595 inputCount, inputZ.arrayZ,
1596 lookupCount, lookupRecord.arrayZ,
1597 lookup_context);
1598 }
1599
applyOT::Rule1600 bool apply (hb_ot_apply_context_t *c,
1601 ContextApplyLookupContext &lookup_context) const
1602 {
1603 TRACE_APPLY (this);
1604 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1605 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1606 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1607 }
1608
serializeOT::Rule1609 bool serialize (hb_serialize_context_t *c,
1610 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1611 const hb_map_t *lookup_map) const
1612 {
1613 TRACE_SERIALIZE (this);
1614 auto *out = c->start_embed (this);
1615 if (unlikely (!c->extend_min (out))) return_trace (false);
1616
1617 out->inputCount = inputCount;
1618 out->lookupCount = lookupCount;
1619
1620 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1621 for (const auto org : input)
1622 {
1623 HBUINT16 d;
1624 d = input_mapping->get (org);
1625 c->copy (d);
1626 }
1627
1628 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1629 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1630 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
1631 c->copy (lookupRecord[i], lookup_map);
1632
1633 return_trace (true);
1634 }
1635
subsetOT::Rule1636 bool subset (hb_subset_context_t *c,
1637 const hb_map_t *lookup_map,
1638 const hb_map_t *klass_map = nullptr) const
1639 {
1640 TRACE_SUBSET (this);
1641
1642 const hb_array_t<const HBUINT16> input = inputZ.as_array ((inputCount ? inputCount - 1 : 0));
1643 if (!input.length) return_trace (false);
1644
1645 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1646 if (!hb_all (input, mapping)) return_trace (false);
1647 return_trace (serialize (c->serializer, mapping, lookup_map));
1648 }
1649
1650 public:
sanitizeOT::Rule1651 bool sanitize (hb_sanitize_context_t *c) const
1652 {
1653 TRACE_SANITIZE (this);
1654 return_trace (inputCount.sanitize (c) &&
1655 lookupCount.sanitize (c) &&
1656 c->check_range (inputZ.arrayZ,
1657 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1658 LookupRecord::static_size * lookupCount));
1659 }
1660
1661 protected:
1662 HBUINT16 inputCount; /* Total number of glyphs in input
1663 * glyph sequence--includes the first
1664 * glyph */
1665 HBUINT16 lookupCount; /* Number of LookupRecords */
1666 UnsizedArrayOf<HBUINT16>
1667 inputZ; /* Array of match inputs--start with
1668 * second glyph */
1669 /*UnsizedArrayOf<LookupRecord>
1670 lookupRecordX;*/ /* Array of LookupRecords--in
1671 * design order */
1672 public:
1673 DEFINE_SIZE_ARRAY (4, inputZ);
1674 };
1675
1676 struct RuleSet
1677 {
intersectsOT::RuleSet1678 bool intersects (const hb_set_t *glyphs,
1679 ContextClosureLookupContext &lookup_context) const
1680 {
1681 return
1682 + hb_iter (rule)
1683 | hb_map (hb_add (this))
1684 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1685 | hb_any
1686 ;
1687 }
1688
closureOT::RuleSet1689 void closure (hb_closure_context_t *c, unsigned value,
1690 ContextClosureLookupContext &lookup_context) const
1691 {
1692 if (unlikely (c->lookup_limit_exceeded ())) return;
1693
1694 return
1695 + hb_iter (rule)
1696 | hb_map (hb_add (this))
1697 | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
1698 ;
1699 }
1700
closure_lookupsOT::RuleSet1701 void closure_lookups (hb_closure_lookups_context_t *c,
1702 ContextClosureLookupContext &lookup_context) const
1703 {
1704 if (unlikely (c->lookup_limit_exceeded ())) return;
1705 + hb_iter (rule)
1706 | hb_map (hb_add (this))
1707 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
1708 ;
1709 }
1710
collect_glyphsOT::RuleSet1711 void collect_glyphs (hb_collect_glyphs_context_t *c,
1712 ContextCollectGlyphsLookupContext &lookup_context) const
1713 {
1714 return
1715 + hb_iter (rule)
1716 | hb_map (hb_add (this))
1717 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1718 ;
1719 }
1720
would_applyOT::RuleSet1721 bool would_apply (hb_would_apply_context_t *c,
1722 ContextApplyLookupContext &lookup_context) const
1723 {
1724 return
1725 + hb_iter (rule)
1726 | hb_map (hb_add (this))
1727 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1728 | hb_any
1729 ;
1730 }
1731
applyOT::RuleSet1732 bool apply (hb_ot_apply_context_t *c,
1733 ContextApplyLookupContext &lookup_context) const
1734 {
1735 TRACE_APPLY (this);
1736 return_trace (
1737 + hb_iter (rule)
1738 | hb_map (hb_add (this))
1739 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1740 | hb_any
1741 )
1742 ;
1743 }
1744
subsetOT::RuleSet1745 bool subset (hb_subset_context_t *c,
1746 const hb_map_t *lookup_map,
1747 const hb_map_t *klass_map = nullptr) const
1748 {
1749 TRACE_SUBSET (this);
1750
1751 auto snap = c->serializer->snapshot ();
1752 auto *out = c->serializer->start_embed (*this);
1753 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1754
1755 for (const Offset16To<Rule>& _ : rule)
1756 {
1757 if (!_) continue;
1758 auto *o = out->rule.serialize_append (c->serializer);
1759 if (unlikely (!o)) continue;
1760
1761 auto o_snap = c->serializer->snapshot ();
1762 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1763 {
1764 out->rule.pop ();
1765 c->serializer->revert (o_snap);
1766 }
1767 }
1768
1769 bool ret = bool (out->rule);
1770 if (!ret) c->serializer->revert (snap);
1771
1772 return_trace (ret);
1773 }
1774
sanitizeOT::RuleSet1775 bool sanitize (hb_sanitize_context_t *c) const
1776 {
1777 TRACE_SANITIZE (this);
1778 return_trace (rule.sanitize (c, this));
1779 }
1780
1781 protected:
1782 Array16OfOffset16To<Rule>
1783 rule; /* Array of Rule tables
1784 * ordered by preference */
1785 public:
1786 DEFINE_SIZE_ARRAY (2, rule);
1787 };
1788
1789
1790 struct ContextFormat1
1791 {
intersectsOT::ContextFormat11792 bool intersects (const hb_set_t *glyphs) const
1793 {
1794 struct ContextClosureLookupContext lookup_context = {
1795 {intersects_glyph, intersected_glyph},
1796 ContextFormat::SimpleContext,
1797 nullptr
1798 };
1799
1800 return
1801 + hb_zip (this+coverage, ruleSet)
1802 | hb_filter (*glyphs, hb_first)
1803 | hb_map (hb_second)
1804 | hb_map (hb_add (this))
1805 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1806 | hb_any
1807 ;
1808 }
1809
may_have_non_1to1OT::ContextFormat11810 bool may_have_non_1to1 () const
1811 { return true; }
1812
closureOT::ContextFormat11813 void closure (hb_closure_context_t *c) const
1814 {
1815 c->cur_intersected_glyphs->clear ();
1816 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
1817
1818 struct ContextClosureLookupContext lookup_context = {
1819 {intersects_glyph, intersected_glyph},
1820 ContextFormat::SimpleContext,
1821 nullptr
1822 };
1823
1824 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
1825 | hb_filter (c->parent_active_glyphs (), hb_first)
1826 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
1827 | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
1828 ;
1829 }
1830
closure_lookupsOT::ContextFormat11831 void closure_lookups (hb_closure_lookups_context_t *c) const
1832 {
1833 struct ContextClosureLookupContext lookup_context = {
1834 {intersects_glyph, intersected_glyph},
1835 ContextFormat::SimpleContext,
1836 nullptr
1837 };
1838
1839 + hb_zip (this+coverage, ruleSet)
1840 | hb_filter (*c->glyphs, hb_first)
1841 | hb_map (hb_second)
1842 | hb_map (hb_add (this))
1843 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
1844 ;
1845 }
1846
collect_variation_indicesOT::ContextFormat11847 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1848
collect_glyphsOT::ContextFormat11849 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1850 {
1851 (this+coverage).collect_coverage (c->input);
1852
1853 struct ContextCollectGlyphsLookupContext lookup_context = {
1854 {collect_glyph},
1855 nullptr
1856 };
1857
1858 + hb_iter (ruleSet)
1859 | hb_map (hb_add (this))
1860 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1861 ;
1862 }
1863
would_applyOT::ContextFormat11864 bool would_apply (hb_would_apply_context_t *c) const
1865 {
1866 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1867 struct ContextApplyLookupContext lookup_context = {
1868 {match_glyph},
1869 nullptr
1870 };
1871 return rule_set.would_apply (c, lookup_context);
1872 }
1873
get_coverageOT::ContextFormat11874 const Coverage &get_coverage () const { return this+coverage; }
1875
applyOT::ContextFormat11876 bool apply (hb_ot_apply_context_t *c) const
1877 {
1878 TRACE_APPLY (this);
1879 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1880 if (likely (index == NOT_COVERED))
1881 return_trace (false);
1882
1883 const RuleSet &rule_set = this+ruleSet[index];
1884 struct ContextApplyLookupContext lookup_context = {
1885 {match_glyph},
1886 nullptr
1887 };
1888 return_trace (rule_set.apply (c, lookup_context));
1889 }
1890
subsetOT::ContextFormat11891 bool subset (hb_subset_context_t *c) const
1892 {
1893 TRACE_SUBSET (this);
1894 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1895 const hb_map_t &glyph_map = *c->plan->glyph_map;
1896
1897 auto *out = c->serializer->start_embed (*this);
1898 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1899 out->format = format;
1900
1901 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1902 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1903 + hb_zip (this+coverage, ruleSet)
1904 | hb_filter (glyphset, hb_first)
1905 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1906 | hb_map (hb_first)
1907 | hb_map (glyph_map)
1908 | hb_sink (new_coverage)
1909 ;
1910
1911 out->coverage.serialize (c->serializer, out)
1912 .serialize (c->serializer, new_coverage.iter ());
1913 return_trace (bool (new_coverage));
1914 }
1915
sanitizeOT::ContextFormat11916 bool sanitize (hb_sanitize_context_t *c) const
1917 {
1918 TRACE_SANITIZE (this);
1919 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1920 }
1921
1922 protected:
1923 HBUINT16 format; /* Format identifier--format = 1 */
1924 Offset16To<Coverage>
1925 coverage; /* Offset to Coverage table--from
1926 * beginning of table */
1927 Array16OfOffset16To<RuleSet>
1928 ruleSet; /* Array of RuleSet tables
1929 * ordered by Coverage Index */
1930 public:
1931 DEFINE_SIZE_ARRAY (6, ruleSet);
1932 };
1933
1934
1935 struct ContextFormat2
1936 {
intersectsOT::ContextFormat21937 bool intersects (const hb_set_t *glyphs) const
1938 {
1939 if (!(this+coverage).intersects (glyphs))
1940 return false;
1941
1942 const ClassDef &class_def = this+classDef;
1943
1944 struct ContextClosureLookupContext lookup_context = {
1945 {intersects_class, intersected_class_glyphs},
1946 ContextFormat::ClassBasedContext,
1947 &class_def
1948 };
1949
1950 return
1951 + hb_iter (ruleSet)
1952 | hb_map (hb_add (this))
1953 | hb_enumerate
1954 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
1955 { return class_def.intersects_class (glyphs, p.first) &&
1956 p.second.intersects (glyphs, lookup_context); })
1957 | hb_any
1958 ;
1959 }
1960
may_have_non_1to1OT::ContextFormat21961 bool may_have_non_1to1 () const
1962 { return true; }
1963
closureOT::ContextFormat21964 void closure (hb_closure_context_t *c) const
1965 {
1966 if (!(this+coverage).intersects (c->glyphs))
1967 return;
1968
1969 c->cur_intersected_glyphs->clear ();
1970 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
1971
1972 const ClassDef &class_def = this+classDef;
1973
1974 struct ContextClosureLookupContext lookup_context = {
1975 {intersects_class, intersected_class_glyphs},
1976 ContextFormat::ClassBasedContext,
1977 &class_def
1978 };
1979
1980 return
1981 + hb_enumerate (ruleSet)
1982 | hb_filter ([&] (unsigned _)
1983 { return class_def.intersects_class (c->cur_intersected_glyphs, _); },
1984 hb_first)
1985 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<RuleSet>&> _)
1986 {
1987 const RuleSet& rule_set = this+_.second;
1988 rule_set.closure (c, _.first, lookup_context);
1989 })
1990 ;
1991 }
1992
closure_lookupsOT::ContextFormat21993 void closure_lookups (hb_closure_lookups_context_t *c) const
1994 {
1995 if (!(this+coverage).intersects (c->glyphs))
1996 return;
1997
1998 const ClassDef &class_def = this+classDef;
1999
2000 struct ContextClosureLookupContext lookup_context = {
2001 {intersects_class, intersected_class_glyphs},
2002 ContextFormat::ClassBasedContext,
2003 &class_def
2004 };
2005
2006 + hb_iter (ruleSet)
2007 | hb_map (hb_add (this))
2008 | hb_enumerate
2009 | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2010 { return class_def.intersects_class (c->glyphs, p.first); })
2011 | hb_map (hb_second)
2012 | hb_apply ([&] (const RuleSet & _)
2013 { _.closure_lookups (c, lookup_context); });
2014 }
2015
collect_variation_indicesOT::ContextFormat22016 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2017
collect_glyphsOT::ContextFormat22018 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2019 {
2020 (this+coverage).collect_coverage (c->input);
2021
2022 const ClassDef &class_def = this+classDef;
2023 struct ContextCollectGlyphsLookupContext lookup_context = {
2024 {collect_class},
2025 &class_def
2026 };
2027
2028 + hb_iter (ruleSet)
2029 | hb_map (hb_add (this))
2030 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2031 ;
2032 }
2033
would_applyOT::ContextFormat22034 bool would_apply (hb_would_apply_context_t *c) const
2035 {
2036 const ClassDef &class_def = this+classDef;
2037 unsigned int index = class_def.get_class (c->glyphs[0]);
2038 const RuleSet &rule_set = this+ruleSet[index];
2039 struct ContextApplyLookupContext lookup_context = {
2040 {match_class},
2041 &class_def
2042 };
2043 return rule_set.would_apply (c, lookup_context);
2044 }
2045
get_coverageOT::ContextFormat22046 const Coverage &get_coverage () const { return this+coverage; }
2047
applyOT::ContextFormat22048 bool apply (hb_ot_apply_context_t *c) const
2049 {
2050 TRACE_APPLY (this);
2051 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2052 if (likely (index == NOT_COVERED)) return_trace (false);
2053
2054 const ClassDef &class_def = this+classDef;
2055 index = class_def.get_class (c->buffer->cur().codepoint);
2056 const RuleSet &rule_set = this+ruleSet[index];
2057 struct ContextApplyLookupContext lookup_context = {
2058 {match_class},
2059 &class_def
2060 };
2061 return_trace (rule_set.apply (c, lookup_context));
2062 }
2063
subsetOT::ContextFormat22064 bool subset (hb_subset_context_t *c) const
2065 {
2066 TRACE_SUBSET (this);
2067 auto *out = c->serializer->start_embed (*this);
2068 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2069 out->format = format;
2070 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2071 return_trace (false);
2072
2073 hb_map_t klass_map;
2074 out->classDef.serialize_subset (c, classDef, this, &klass_map);
2075
2076 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2077 bool ret = true;
2078 int non_zero_index = 0, index = 0;
2079 for (const auto& _ : + hb_enumerate (ruleSet)
2080 | hb_filter (klass_map, hb_first))
2081 {
2082 auto *o = out->ruleSet.serialize_append (c->serializer);
2083 if (unlikely (!o))
2084 {
2085 ret = false;
2086 break;
2087 }
2088
2089 if (o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
2090 non_zero_index = index;
2091
2092 index++;
2093 }
2094
2095 if (!ret) return_trace (ret);
2096
2097 //prune empty trailing ruleSets
2098 --index;
2099 while (index > non_zero_index)
2100 {
2101 out->ruleSet.pop ();
2102 index--;
2103 }
2104
2105 return_trace (bool (out->ruleSet));
2106 }
2107
sanitizeOT::ContextFormat22108 bool sanitize (hb_sanitize_context_t *c) const
2109 {
2110 TRACE_SANITIZE (this);
2111 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2112 }
2113
2114 protected:
2115 HBUINT16 format; /* Format identifier--format = 2 */
2116 Offset16To<Coverage>
2117 coverage; /* Offset to Coverage table--from
2118 * beginning of table */
2119 Offset16To<ClassDef>
2120 classDef; /* Offset to glyph ClassDef table--from
2121 * beginning of table */
2122 Array16OfOffset16To<RuleSet>
2123 ruleSet; /* Array of RuleSet tables
2124 * ordered by class */
2125 public:
2126 DEFINE_SIZE_ARRAY (8, ruleSet);
2127 };
2128
2129
2130 struct ContextFormat3
2131 {
intersectsOT::ContextFormat32132 bool intersects (const hb_set_t *glyphs) const
2133 {
2134 if (!(this+coverageZ[0]).intersects (glyphs))
2135 return false;
2136
2137 struct ContextClosureLookupContext lookup_context = {
2138 {intersects_coverage, intersected_coverage_glyphs},
2139 ContextFormat::CoverageBasedContext,
2140 this
2141 };
2142 return context_intersects (glyphs,
2143 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2144 lookup_context);
2145 }
2146
may_have_non_1to1OT::ContextFormat32147 bool may_have_non_1to1 () const
2148 { return true; }
2149
closureOT::ContextFormat32150 void closure (hb_closure_context_t *c) const
2151 {
2152 if (!(this+coverageZ[0]).intersects (c->glyphs))
2153 return;
2154
2155 c->cur_intersected_glyphs->clear ();
2156 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2157
2158 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2159 struct ContextClosureLookupContext lookup_context = {
2160 {intersects_coverage, intersected_coverage_glyphs},
2161 ContextFormat::CoverageBasedContext,
2162 this
2163 };
2164 context_closure_lookup (c,
2165 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2166 lookupCount, lookupRecord,
2167 0, lookup_context);
2168 }
2169
closure_lookupsOT::ContextFormat32170 void closure_lookups (hb_closure_lookups_context_t *c) const
2171 {
2172 if (!intersects (c->glyphs))
2173 return;
2174 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2175 recurse_lookups (c, lookupCount, lookupRecord);
2176 }
2177
collect_variation_indicesOT::ContextFormat32178 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2179
collect_glyphsOT::ContextFormat32180 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2181 {
2182 (this+coverageZ[0]).collect_coverage (c->input);
2183
2184 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2185 struct ContextCollectGlyphsLookupContext lookup_context = {
2186 {collect_coverage},
2187 this
2188 };
2189
2190 context_collect_glyphs_lookup (c,
2191 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2192 lookupCount, lookupRecord,
2193 lookup_context);
2194 }
2195
would_applyOT::ContextFormat32196 bool would_apply (hb_would_apply_context_t *c) const
2197 {
2198 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2199 struct ContextApplyLookupContext lookup_context = {
2200 {match_coverage},
2201 this
2202 };
2203 return context_would_apply_lookup (c,
2204 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2205 lookupCount, lookupRecord,
2206 lookup_context);
2207 }
2208
get_coverageOT::ContextFormat32209 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2210
applyOT::ContextFormat32211 bool apply (hb_ot_apply_context_t *c) const
2212 {
2213 TRACE_APPLY (this);
2214 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2215 if (likely (index == NOT_COVERED)) return_trace (false);
2216
2217 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2218 struct ContextApplyLookupContext lookup_context = {
2219 {match_coverage},
2220 this
2221 };
2222 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2223 }
2224
subsetOT::ContextFormat32225 bool subset (hb_subset_context_t *c) const
2226 {
2227 TRACE_SUBSET (this);
2228 auto *out = c->serializer->start_embed (this);
2229 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2230
2231 out->format = format;
2232 out->glyphCount = glyphCount;
2233 out->lookupCount = lookupCount;
2234
2235 auto coverages = coverageZ.as_array (glyphCount);
2236
2237 for (const Offset16To<Coverage>& offset : coverages)
2238 {
2239 /* TODO(subset) This looks like should not be necessary to write this way. */
2240 auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2241 if (unlikely (!o)) return_trace (false);
2242 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2243 }
2244
2245 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2246 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2247 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
2248 c->serializer->copy (lookupRecord[i], lookup_map);
2249
2250 return_trace (true);
2251 }
2252
sanitizeOT::ContextFormat32253 bool sanitize (hb_sanitize_context_t *c) const
2254 {
2255 TRACE_SANITIZE (this);
2256 if (!c->check_struct (this)) return_trace (false);
2257 unsigned int count = glyphCount;
2258 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2259 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2260 for (unsigned int i = 0; i < count; i++)
2261 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2262 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2263 return_trace (c->check_array (lookupRecord, lookupCount));
2264 }
2265
2266 protected:
2267 HBUINT16 format; /* Format identifier--format = 3 */
2268 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2269 * sequence */
2270 HBUINT16 lookupCount; /* Number of LookupRecords */
2271 UnsizedArrayOf<Offset16To<Coverage>>
2272 coverageZ; /* Array of offsets to Coverage
2273 * table in glyph sequence order */
2274 /*UnsizedArrayOf<LookupRecord>
2275 lookupRecordX;*/ /* Array of LookupRecords--in
2276 * design order */
2277 public:
2278 DEFINE_SIZE_ARRAY (6, coverageZ);
2279 };
2280
2281 struct Context
2282 {
2283 template <typename context_t, typename ...Ts>
dispatchOT::Context2284 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2285 {
2286 TRACE_DISPATCH (this, u.format);
2287 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2288 switch (u.format) {
2289 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2290 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
2291 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
2292 default:return_trace (c->default_return_value ());
2293 }
2294 }
2295
2296 protected:
2297 union {
2298 HBUINT16 format; /* Format identifier */
2299 ContextFormat1 format1;
2300 ContextFormat2 format2;
2301 ContextFormat3 format3;
2302 } u;
2303 };
2304
2305
2306 /* Chaining Contextual lookups */
2307
2308 struct ChainContextClosureLookupContext
2309 {
2310 ContextClosureFuncs funcs;
2311 ContextFormat context_format;
2312 const void *intersects_data[3];
2313 };
2314
2315 struct ChainContextCollectGlyphsLookupContext
2316 {
2317 ContextCollectGlyphsFuncs funcs;
2318 const void *collect_data[3];
2319 };
2320
2321 struct ChainContextApplyLookupContext
2322 {
2323 ContextApplyFuncs funcs;
2324 const void *match_data[3];
2325 };
2326
chain_context_intersects(const hb_set_t * glyphs,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],ChainContextClosureLookupContext & lookup_context)2327 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2328 unsigned int backtrackCount,
2329 const HBUINT16 backtrack[],
2330 unsigned int inputCount, /* Including the first glyph (not matched) */
2331 const HBUINT16 input[], /* Array of input values--start with second glyph */
2332 unsigned int lookaheadCount,
2333 const HBUINT16 lookahead[],
2334 ChainContextClosureLookupContext &lookup_context)
2335 {
2336 return array_is_subset_of (glyphs,
2337 backtrackCount, backtrack,
2338 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2339 && array_is_subset_of (glyphs,
2340 inputCount ? inputCount - 1 : 0, input,
2341 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2342 && array_is_subset_of (glyphs,
2343 lookaheadCount, lookahead,
2344 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2345 }
2346
chain_context_closure_lookup(hb_closure_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ChainContextClosureLookupContext & lookup_context)2347 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2348 unsigned int backtrackCount,
2349 const HBUINT16 backtrack[],
2350 unsigned int inputCount, /* Including the first glyph (not matched) */
2351 const HBUINT16 input[], /* Array of input values--start with second glyph */
2352 unsigned int lookaheadCount,
2353 const HBUINT16 lookahead[],
2354 unsigned int lookupCount,
2355 const LookupRecord lookupRecord[],
2356 unsigned value,
2357 ChainContextClosureLookupContext &lookup_context)
2358 {
2359 if (chain_context_intersects (c->glyphs,
2360 backtrackCount, backtrack,
2361 inputCount, input,
2362 lookaheadCount, lookahead,
2363 lookup_context))
2364 context_closure_recurse_lookups (c,
2365 inputCount, input,
2366 lookupCount, lookupRecord,
2367 value,
2368 lookup_context.context_format,
2369 lookup_context.intersects_data[1],
2370 lookup_context.funcs.intersected_glyphs);
2371 }
2372
chain_context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextCollectGlyphsLookupContext & lookup_context)2373 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2374 unsigned int backtrackCount,
2375 const HBUINT16 backtrack[],
2376 unsigned int inputCount, /* Including the first glyph (not matched) */
2377 const HBUINT16 input[], /* Array of input values--start with second glyph */
2378 unsigned int lookaheadCount,
2379 const HBUINT16 lookahead[],
2380 unsigned int lookupCount,
2381 const LookupRecord lookupRecord[],
2382 ChainContextCollectGlyphsLookupContext &lookup_context)
2383 {
2384 collect_array (c, c->before,
2385 backtrackCount, backtrack,
2386 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2387 collect_array (c, c->input,
2388 inputCount ? inputCount - 1 : 0, input,
2389 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2390 collect_array (c, c->after,
2391 lookaheadCount, lookahead,
2392 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2393 recurse_lookups (c,
2394 lookupCount, lookupRecord);
2395 }
2396
chain_context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[]HB_UNUSED,unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[]HB_UNUSED,unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,ChainContextApplyLookupContext & lookup_context)2397 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2398 unsigned int backtrackCount,
2399 const HBUINT16 backtrack[] HB_UNUSED,
2400 unsigned int inputCount, /* Including the first glyph (not matched) */
2401 const HBUINT16 input[], /* Array of input values--start with second glyph */
2402 unsigned int lookaheadCount,
2403 const HBUINT16 lookahead[] HB_UNUSED,
2404 unsigned int lookupCount HB_UNUSED,
2405 const LookupRecord lookupRecord[] HB_UNUSED,
2406 ChainContextApplyLookupContext &lookup_context)
2407 {
2408 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2409 && would_match_input (c,
2410 inputCount, input,
2411 lookup_context.funcs.match, lookup_context.match_data[1]);
2412 }
2413
chain_context_apply_lookup(hb_ot_apply_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextApplyLookupContext & lookup_context)2414 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2415 unsigned int backtrackCount,
2416 const HBUINT16 backtrack[],
2417 unsigned int inputCount, /* Including the first glyph (not matched) */
2418 const HBUINT16 input[], /* Array of input values--start with second glyph */
2419 unsigned int lookaheadCount,
2420 const HBUINT16 lookahead[],
2421 unsigned int lookupCount,
2422 const LookupRecord lookupRecord[],
2423 ChainContextApplyLookupContext &lookup_context)
2424 {
2425 unsigned int start_index = 0, match_length = 0, end_index = 0;
2426 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH];
2427 return match_input (c,
2428 inputCount, input,
2429 lookup_context.funcs.match, lookup_context.match_data[1],
2430 &match_length, match_positions)
2431 && match_backtrack (c,
2432 backtrackCount, backtrack,
2433 lookup_context.funcs.match, lookup_context.match_data[0],
2434 &start_index)
2435 && match_lookahead (c,
2436 lookaheadCount, lookahead,
2437 lookup_context.funcs.match, lookup_context.match_data[2],
2438 match_length, &end_index)
2439 && (c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index),
2440 apply_lookup (c,
2441 inputCount, match_positions,
2442 lookupCount, lookupRecord,
2443 match_length));
2444 }
2445
2446 struct ChainRule
2447 {
intersectsOT::ChainRule2448 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2449 {
2450 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2451 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2452 return chain_context_intersects (glyphs,
2453 backtrack.len, backtrack.arrayZ,
2454 input.lenP1, input.arrayZ,
2455 lookahead.len, lookahead.arrayZ,
2456 lookup_context);
2457 }
2458
closureOT::ChainRule2459 void closure (hb_closure_context_t *c, unsigned value,
2460 ChainContextClosureLookupContext &lookup_context) const
2461 {
2462 if (unlikely (c->lookup_limit_exceeded ())) return;
2463
2464 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2465 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2466 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2467 chain_context_closure_lookup (c,
2468 backtrack.len, backtrack.arrayZ,
2469 input.lenP1, input.arrayZ,
2470 lookahead.len, lookahead.arrayZ,
2471 lookup.len, lookup.arrayZ,
2472 value,
2473 lookup_context);
2474 }
2475
closure_lookupsOT::ChainRule2476 void closure_lookups (hb_closure_lookups_context_t *c,
2477 ChainContextClosureLookupContext &lookup_context) const
2478 {
2479 if (unlikely (c->lookup_limit_exceeded ())) return;
2480 if (!intersects (c->glyphs, lookup_context)) return;
2481
2482 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2483 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2484 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2485 recurse_lookups (c, lookup.len, lookup.arrayZ);
2486 }
2487
collect_glyphsOT::ChainRule2488 void collect_glyphs (hb_collect_glyphs_context_t *c,
2489 ChainContextCollectGlyphsLookupContext &lookup_context) const
2490 {
2491 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2492 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2493 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2494 chain_context_collect_glyphs_lookup (c,
2495 backtrack.len, backtrack.arrayZ,
2496 input.lenP1, input.arrayZ,
2497 lookahead.len, lookahead.arrayZ,
2498 lookup.len, lookup.arrayZ,
2499 lookup_context);
2500 }
2501
would_applyOT::ChainRule2502 bool would_apply (hb_would_apply_context_t *c,
2503 ChainContextApplyLookupContext &lookup_context) const
2504 {
2505 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2506 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2507 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2508 return chain_context_would_apply_lookup (c,
2509 backtrack.len, backtrack.arrayZ,
2510 input.lenP1, input.arrayZ,
2511 lookahead.len, lookahead.arrayZ, lookup.len,
2512 lookup.arrayZ, lookup_context);
2513 }
2514
applyOT::ChainRule2515 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2516 {
2517 TRACE_APPLY (this);
2518 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2519 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2520 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2521 return_trace (chain_context_apply_lookup (c,
2522 backtrack.len, backtrack.arrayZ,
2523 input.lenP1, input.arrayZ,
2524 lookahead.len, lookahead.arrayZ, lookup.len,
2525 lookup.arrayZ, lookup_context));
2526 }
2527
2528 template<typename Iterator,
2529 hb_requires (hb_is_iterator (Iterator))>
serialize_arrayOT::ChainRule2530 void serialize_array (hb_serialize_context_t *c,
2531 HBUINT16 len,
2532 Iterator it) const
2533 {
2534 c->copy (len);
2535 for (const auto g : it)
2536 c->copy ((HBUINT16) g);
2537 }
2538
copyOT::ChainRule2539 ChainRule* copy (hb_serialize_context_t *c,
2540 const hb_map_t *lookup_map,
2541 const hb_map_t *backtrack_map,
2542 const hb_map_t *input_map = nullptr,
2543 const hb_map_t *lookahead_map = nullptr) const
2544 {
2545 TRACE_SERIALIZE (this);
2546 auto *out = c->start_embed (this);
2547 if (unlikely (!out)) return_trace (nullptr);
2548
2549 const hb_map_t *mapping = backtrack_map;
2550 serialize_array (c, backtrack.len, + backtrack.iter ()
2551 | hb_map (mapping));
2552
2553 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2554 if (input_map) mapping = input_map;
2555 serialize_array (c, input.lenP1, + input.iter ()
2556 | hb_map (mapping));
2557
2558 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2559 if (lookahead_map) mapping = lookahead_map;
2560 serialize_array (c, lookahead.len, + lookahead.iter ()
2561 | hb_map (mapping));
2562
2563 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
2564
2565 HBUINT16* lookupCount = c->embed (&(lookupRecord.len));
2566 if (!lookupCount) return_trace (nullptr);
2567
2568 for (unsigned i = 0; i < lookupRecord.len; i++)
2569 {
2570 if (!lookup_map->has (lookupRecord[i].lookupListIndex))
2571 {
2572 (*lookupCount)--;
2573 continue;
2574 }
2575 if (!c->copy (lookupRecord[i], lookup_map)) return_trace (nullptr);
2576 }
2577
2578 return_trace (out);
2579 }
2580
subsetOT::ChainRule2581 bool subset (hb_subset_context_t *c,
2582 const hb_map_t *lookup_map,
2583 const hb_map_t *backtrack_map = nullptr,
2584 const hb_map_t *input_map = nullptr,
2585 const hb_map_t *lookahead_map = nullptr) const
2586 {
2587 TRACE_SUBSET (this);
2588
2589 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2590 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2591
2592 if (!backtrack_map)
2593 {
2594 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2595 if (!hb_all (backtrack, glyphset) ||
2596 !hb_all (input, glyphset) ||
2597 !hb_all (lookahead, glyphset))
2598 return_trace (false);
2599
2600 copy (c->serializer, lookup_map, c->plan->glyph_map);
2601 }
2602 else
2603 {
2604 if (!hb_all (backtrack, backtrack_map) ||
2605 !hb_all (input, input_map) ||
2606 !hb_all (lookahead, lookahead_map))
2607 return_trace (false);
2608
2609 copy (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2610 }
2611
2612 return_trace (true);
2613 }
2614
sanitizeOT::ChainRule2615 bool sanitize (hb_sanitize_context_t *c) const
2616 {
2617 TRACE_SANITIZE (this);
2618 if (!backtrack.sanitize (c)) return_trace (false);
2619 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2620 if (!input.sanitize (c)) return_trace (false);
2621 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2622 if (!lookahead.sanitize (c)) return_trace (false);
2623 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2624 return_trace (lookup.sanitize (c));
2625 }
2626
2627 protected:
2628 Array16Of<HBUINT16>
2629 backtrack; /* Array of backtracking values
2630 * (to be matched before the input
2631 * sequence) */
2632 HeadlessArrayOf<HBUINT16>
2633 inputX; /* Array of input values (start with
2634 * second glyph) */
2635 Array16Of<HBUINT16>
2636 lookaheadX; /* Array of lookahead values's (to be
2637 * matched after the input sequence) */
2638 Array16Of<LookupRecord>
2639 lookupX; /* Array of LookupRecords--in
2640 * design order) */
2641 public:
2642 DEFINE_SIZE_MIN (8);
2643 };
2644
2645 struct ChainRuleSet
2646 {
intersectsOT::ChainRuleSet2647 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2648 {
2649 return
2650 + hb_iter (rule)
2651 | hb_map (hb_add (this))
2652 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2653 | hb_any
2654 ;
2655 }
closureOT::ChainRuleSet2656 void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
2657 {
2658 if (unlikely (c->lookup_limit_exceeded ())) return;
2659
2660 return
2661 + hb_iter (rule)
2662 | hb_map (hb_add (this))
2663 | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
2664 ;
2665 }
2666
closure_lookupsOT::ChainRuleSet2667 void closure_lookups (hb_closure_lookups_context_t *c,
2668 ChainContextClosureLookupContext &lookup_context) const
2669 {
2670 if (unlikely (c->lookup_limit_exceeded ())) return;
2671
2672 + hb_iter (rule)
2673 | hb_map (hb_add (this))
2674 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
2675 ;
2676 }
2677
collect_glyphsOT::ChainRuleSet2678 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2679 {
2680 return
2681 + hb_iter (rule)
2682 | hb_map (hb_add (this))
2683 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2684 ;
2685 }
2686
would_applyOT::ChainRuleSet2687 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2688 {
2689 return
2690 + hb_iter (rule)
2691 | hb_map (hb_add (this))
2692 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2693 | hb_any
2694 ;
2695 }
2696
applyOT::ChainRuleSet2697 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2698 {
2699 TRACE_APPLY (this);
2700 return_trace (
2701 + hb_iter (rule)
2702 | hb_map (hb_add (this))
2703 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2704 | hb_any
2705 )
2706 ;
2707 }
2708
subsetOT::ChainRuleSet2709 bool subset (hb_subset_context_t *c,
2710 const hb_map_t *lookup_map,
2711 const hb_map_t *backtrack_klass_map = nullptr,
2712 const hb_map_t *input_klass_map = nullptr,
2713 const hb_map_t *lookahead_klass_map = nullptr) const
2714 {
2715 TRACE_SUBSET (this);
2716
2717 auto snap = c->serializer->snapshot ();
2718 auto *out = c->serializer->start_embed (*this);
2719 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2720
2721 for (const Offset16To<ChainRule>& _ : rule)
2722 {
2723 if (!_) continue;
2724 auto *o = out->rule.serialize_append (c->serializer);
2725 if (unlikely (!o)) continue;
2726
2727 auto o_snap = c->serializer->snapshot ();
2728 if (!o->serialize_subset (c, _, this,
2729 lookup_map,
2730 backtrack_klass_map,
2731 input_klass_map,
2732 lookahead_klass_map))
2733 {
2734 out->rule.pop ();
2735 c->serializer->revert (o_snap);
2736 }
2737 }
2738
2739 bool ret = bool (out->rule);
2740 if (!ret) c->serializer->revert (snap);
2741
2742 return_trace (ret);
2743 }
2744
sanitizeOT::ChainRuleSet2745 bool sanitize (hb_sanitize_context_t *c) const
2746 {
2747 TRACE_SANITIZE (this);
2748 return_trace (rule.sanitize (c, this));
2749 }
2750
2751 protected:
2752 Array16OfOffset16To<ChainRule>
2753 rule; /* Array of ChainRule tables
2754 * ordered by preference */
2755 public:
2756 DEFINE_SIZE_ARRAY (2, rule);
2757 };
2758
2759 struct ChainContextFormat1
2760 {
intersectsOT::ChainContextFormat12761 bool intersects (const hb_set_t *glyphs) const
2762 {
2763 struct ChainContextClosureLookupContext lookup_context = {
2764 {intersects_glyph, intersected_glyph},
2765 ContextFormat::SimpleContext,
2766 {nullptr, nullptr, nullptr}
2767 };
2768
2769 return
2770 + hb_zip (this+coverage, ruleSet)
2771 | hb_filter (*glyphs, hb_first)
2772 | hb_map (hb_second)
2773 | hb_map (hb_add (this))
2774 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2775 | hb_any
2776 ;
2777 }
2778
may_have_non_1to1OT::ChainContextFormat12779 bool may_have_non_1to1 () const
2780 { return true; }
2781
closureOT::ChainContextFormat12782 void closure (hb_closure_context_t *c) const
2783 {
2784 c->cur_intersected_glyphs->clear ();
2785 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2786
2787 struct ChainContextClosureLookupContext lookup_context = {
2788 {intersects_glyph, intersected_glyph},
2789 ContextFormat::SimpleContext,
2790 {nullptr, nullptr, nullptr}
2791 };
2792
2793 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2794 | hb_filter (c->parent_active_glyphs (), hb_first)
2795 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
2796 | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2797 ;
2798 }
2799
closure_lookupsOT::ChainContextFormat12800 void closure_lookups (hb_closure_lookups_context_t *c) const
2801 {
2802 struct ChainContextClosureLookupContext lookup_context = {
2803 {intersects_glyph, intersected_glyph},
2804 ContextFormat::SimpleContext,
2805 {nullptr, nullptr, nullptr}
2806 };
2807
2808 + hb_zip (this+coverage, ruleSet)
2809 | hb_filter (*c->glyphs, hb_first)
2810 | hb_map (hb_second)
2811 | hb_map (hb_add (this))
2812 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
2813 ;
2814 }
2815
collect_variation_indicesOT::ChainContextFormat12816 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2817
collect_glyphsOT::ChainContextFormat12818 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2819 {
2820 (this+coverage).collect_coverage (c->input);
2821
2822 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2823 {collect_glyph},
2824 {nullptr, nullptr, nullptr}
2825 };
2826
2827 + hb_iter (ruleSet)
2828 | hb_map (hb_add (this))
2829 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2830 ;
2831 }
2832
would_applyOT::ChainContextFormat12833 bool would_apply (hb_would_apply_context_t *c) const
2834 {
2835 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2836 struct ChainContextApplyLookupContext lookup_context = {
2837 {match_glyph},
2838 {nullptr, nullptr, nullptr}
2839 };
2840 return rule_set.would_apply (c, lookup_context);
2841 }
2842
get_coverageOT::ChainContextFormat12843 const Coverage &get_coverage () const { return this+coverage; }
2844
applyOT::ChainContextFormat12845 bool apply (hb_ot_apply_context_t *c) const
2846 {
2847 TRACE_APPLY (this);
2848 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2849 if (likely (index == NOT_COVERED)) return_trace (false);
2850
2851 const ChainRuleSet &rule_set = this+ruleSet[index];
2852 struct ChainContextApplyLookupContext lookup_context = {
2853 {match_glyph},
2854 {nullptr, nullptr, nullptr}
2855 };
2856 return_trace (rule_set.apply (c, lookup_context));
2857 }
2858
subsetOT::ChainContextFormat12859 bool subset (hb_subset_context_t *c) const
2860 {
2861 TRACE_SUBSET (this);
2862 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2863 const hb_map_t &glyph_map = *c->plan->glyph_map;
2864
2865 auto *out = c->serializer->start_embed (*this);
2866 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2867 out->format = format;
2868
2869 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2870 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2871 + hb_zip (this+coverage, ruleSet)
2872 | hb_filter (glyphset, hb_first)
2873 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2874 | hb_map (hb_first)
2875 | hb_map (glyph_map)
2876 | hb_sink (new_coverage)
2877 ;
2878
2879 out->coverage.serialize (c->serializer, out)
2880 .serialize (c->serializer, new_coverage.iter ());
2881 return_trace (bool (new_coverage));
2882 }
2883
sanitizeOT::ChainContextFormat12884 bool sanitize (hb_sanitize_context_t *c) const
2885 {
2886 TRACE_SANITIZE (this);
2887 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2888 }
2889
2890 protected:
2891 HBUINT16 format; /* Format identifier--format = 1 */
2892 Offset16To<Coverage>
2893 coverage; /* Offset to Coverage table--from
2894 * beginning of table */
2895 Array16OfOffset16To<ChainRuleSet>
2896 ruleSet; /* Array of ChainRuleSet tables
2897 * ordered by Coverage Index */
2898 public:
2899 DEFINE_SIZE_ARRAY (6, ruleSet);
2900 };
2901
2902 struct ChainContextFormat2
2903 {
intersectsOT::ChainContextFormat22904 bool intersects (const hb_set_t *glyphs) const
2905 {
2906 if (!(this+coverage).intersects (glyphs))
2907 return false;
2908
2909 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2910 const ClassDef &input_class_def = this+inputClassDef;
2911 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2912
2913 struct ChainContextClosureLookupContext lookup_context = {
2914 {intersects_class, intersected_class_glyphs},
2915 ContextFormat::ClassBasedContext,
2916 {&backtrack_class_def,
2917 &input_class_def,
2918 &lookahead_class_def}
2919 };
2920
2921 return
2922 + hb_iter (ruleSet)
2923 | hb_map (hb_add (this))
2924 | hb_enumerate
2925 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
2926 { return input_class_def.intersects_class (glyphs, p.first) &&
2927 p.second.intersects (glyphs, lookup_context); })
2928 | hb_any
2929 ;
2930 }
2931
may_have_non_1to1OT::ChainContextFormat22932 bool may_have_non_1to1 () const
2933 { return true; }
2934
closureOT::ChainContextFormat22935 void closure (hb_closure_context_t *c) const
2936 {
2937 if (!(this+coverage).intersects (c->glyphs))
2938 return;
2939
2940 c->cur_intersected_glyphs->clear ();
2941 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
2942
2943 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2944 const ClassDef &input_class_def = this+inputClassDef;
2945 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2946
2947 struct ChainContextClosureLookupContext lookup_context = {
2948 {intersects_class, intersected_class_glyphs},
2949 ContextFormat::ClassBasedContext,
2950 {&backtrack_class_def,
2951 &input_class_def,
2952 &lookahead_class_def}
2953 };
2954
2955 return
2956 + hb_enumerate (ruleSet)
2957 | hb_filter ([&] (unsigned _)
2958 { return input_class_def.intersects_class (c->cur_intersected_glyphs, _); },
2959 hb_first)
2960 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<ChainRuleSet>&> _)
2961 {
2962 const ChainRuleSet& chainrule_set = this+_.second;
2963 chainrule_set.closure (c, _.first, lookup_context);
2964 })
2965 ;
2966 }
2967
closure_lookupsOT::ChainContextFormat22968 void closure_lookups (hb_closure_lookups_context_t *c) const
2969 {
2970 if (!(this+coverage).intersects (c->glyphs))
2971 return;
2972
2973 const ClassDef &backtrack_class_def = this+backtrackClassDef;
2974 const ClassDef &input_class_def = this+inputClassDef;
2975 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
2976
2977 struct ChainContextClosureLookupContext lookup_context = {
2978 {intersects_class, intersected_class_glyphs},
2979 ContextFormat::ClassBasedContext,
2980 {&backtrack_class_def,
2981 &input_class_def,
2982 &lookahead_class_def}
2983 };
2984
2985 + hb_iter (ruleSet)
2986 | hb_map (hb_add (this))
2987 | hb_enumerate
2988 | hb_filter([&] (unsigned klass)
2989 { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
2990 | hb_map (hb_second)
2991 | hb_apply ([&] (const ChainRuleSet &_)
2992 { _.closure_lookups (c, lookup_context); })
2993 ;
2994 }
2995
collect_variation_indicesOT::ChainContextFormat22996 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2997
collect_glyphsOT::ChainContextFormat22998 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2999 {
3000 (this+coverage).collect_coverage (c->input);
3001
3002 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3003 const ClassDef &input_class_def = this+inputClassDef;
3004 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3005
3006 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3007 {collect_class},
3008 {&backtrack_class_def,
3009 &input_class_def,
3010 &lookahead_class_def}
3011 };
3012
3013 + hb_iter (ruleSet)
3014 | hb_map (hb_add (this))
3015 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3016 ;
3017 }
3018
would_applyOT::ChainContextFormat23019 bool would_apply (hb_would_apply_context_t *c) const
3020 {
3021 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3022 const ClassDef &input_class_def = this+inputClassDef;
3023 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3024
3025 unsigned int index = input_class_def.get_class (c->glyphs[0]);
3026 const ChainRuleSet &rule_set = this+ruleSet[index];
3027 struct ChainContextApplyLookupContext lookup_context = {
3028 {match_class},
3029 {&backtrack_class_def,
3030 &input_class_def,
3031 &lookahead_class_def}
3032 };
3033 return rule_set.would_apply (c, lookup_context);
3034 }
3035
get_coverageOT::ChainContextFormat23036 const Coverage &get_coverage () const { return this+coverage; }
3037
applyOT::ChainContextFormat23038 bool apply (hb_ot_apply_context_t *c) const
3039 {
3040 TRACE_APPLY (this);
3041 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3042 if (likely (index == NOT_COVERED)) return_trace (false);
3043
3044 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3045 const ClassDef &input_class_def = this+inputClassDef;
3046 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3047
3048 index = input_class_def.get_class (c->buffer->cur().codepoint);
3049 const ChainRuleSet &rule_set = this+ruleSet[index];
3050 struct ChainContextApplyLookupContext lookup_context = {
3051 {match_class},
3052 {&backtrack_class_def,
3053 &input_class_def,
3054 &lookahead_class_def}
3055 };
3056 return_trace (rule_set.apply (c, lookup_context));
3057 }
3058
subsetOT::ChainContextFormat23059 bool subset (hb_subset_context_t *c) const
3060 {
3061 TRACE_SUBSET (this);
3062 auto *out = c->serializer->start_embed (*this);
3063 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3064 out->format = format;
3065 out->coverage.serialize_subset (c, coverage, this);
3066
3067 hb_map_t backtrack_klass_map;
3068 hb_map_t input_klass_map;
3069 hb_map_t lookahead_klass_map;
3070
3071 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3072 // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3073 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3074 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3075
3076 if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3077 input_klass_map,
3078 lookahead_klass_map)))
3079 return_trace (false);
3080
3081 int non_zero_index = -1, index = 0;
3082 bool ret = true;
3083 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3084 auto last_non_zero = c->serializer->snapshot ();
3085 for (const Offset16To<ChainRuleSet>& _ : + hb_enumerate (ruleSet)
3086 | hb_filter (input_klass_map, hb_first)
3087 | hb_map (hb_second))
3088 {
3089 auto *o = out->ruleSet.serialize_append (c->serializer);
3090 if (unlikely (!o))
3091 {
3092 ret = false;
3093 break;
3094 }
3095 if (o->serialize_subset (c, _, this,
3096 lookup_map,
3097 &backtrack_klass_map,
3098 &input_klass_map,
3099 &lookahead_klass_map))
3100 {
3101 last_non_zero = c->serializer->snapshot ();
3102 non_zero_index = index;
3103 }
3104
3105 index++;
3106 }
3107
3108 if (!ret) return_trace (ret);
3109
3110 // prune empty trailing ruleSets
3111 if (index > non_zero_index) {
3112 c->serializer->revert (last_non_zero);
3113 out->ruleSet.len = non_zero_index + 1;
3114 }
3115
3116 return_trace (bool (out->ruleSet));
3117 }
3118
sanitizeOT::ChainContextFormat23119 bool sanitize (hb_sanitize_context_t *c) const
3120 {
3121 TRACE_SANITIZE (this);
3122 return_trace (coverage.sanitize (c, this) &&
3123 backtrackClassDef.sanitize (c, this) &&
3124 inputClassDef.sanitize (c, this) &&
3125 lookaheadClassDef.sanitize (c, this) &&
3126 ruleSet.sanitize (c, this));
3127 }
3128
3129 protected:
3130 HBUINT16 format; /* Format identifier--format = 2 */
3131 Offset16To<Coverage>
3132 coverage; /* Offset to Coverage table--from
3133 * beginning of table */
3134 Offset16To<ClassDef>
3135 backtrackClassDef; /* Offset to glyph ClassDef table
3136 * containing backtrack sequence
3137 * data--from beginning of table */
3138 Offset16To<ClassDef>
3139 inputClassDef; /* Offset to glyph ClassDef
3140 * table containing input sequence
3141 * data--from beginning of table */
3142 Offset16To<ClassDef>
3143 lookaheadClassDef; /* Offset to glyph ClassDef table
3144 * containing lookahead sequence
3145 * data--from beginning of table */
3146 Array16OfOffset16To<ChainRuleSet>
3147 ruleSet; /* Array of ChainRuleSet tables
3148 * ordered by class */
3149 public:
3150 DEFINE_SIZE_ARRAY (12, ruleSet);
3151 };
3152
3153 struct ChainContextFormat3
3154 {
intersectsOT::ChainContextFormat33155 bool intersects (const hb_set_t *glyphs) const
3156 {
3157 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3158
3159 if (!(this+input[0]).intersects (glyphs))
3160 return false;
3161
3162 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3163 struct ChainContextClosureLookupContext lookup_context = {
3164 {intersects_coverage, intersected_coverage_glyphs},
3165 ContextFormat::CoverageBasedContext,
3166 {this, this, this}
3167 };
3168 return chain_context_intersects (glyphs,
3169 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3170 input.len, (const HBUINT16 *) input.arrayZ + 1,
3171 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3172 lookup_context);
3173 }
3174
may_have_non_1to1OT::ChainContextFormat33175 bool may_have_non_1to1 () const
3176 { return true; }
3177
closureOT::ChainContextFormat33178 void closure (hb_closure_context_t *c) const
3179 {
3180 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3181
3182 if (!(this+input[0]).intersects (c->glyphs))
3183 return;
3184
3185 c->cur_intersected_glyphs->clear ();
3186 get_coverage ().intersected_coverage_glyphs (c->parent_active_glyphs (), c->cur_intersected_glyphs);
3187
3188 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3189 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3190 struct ChainContextClosureLookupContext lookup_context = {
3191 {intersects_coverage, intersected_coverage_glyphs},
3192 ContextFormat::CoverageBasedContext,
3193 {this, this, this}
3194 };
3195 chain_context_closure_lookup (c,
3196 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3197 input.len, (const HBUINT16 *) input.arrayZ + 1,
3198 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3199 lookup.len, lookup.arrayZ,
3200 0, lookup_context);
3201 }
3202
closure_lookupsOT::ChainContextFormat33203 void closure_lookups (hb_closure_lookups_context_t *c) const
3204 {
3205 if (!intersects (c->glyphs))
3206 return;
3207
3208 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3209 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3210 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3211 recurse_lookups (c, lookup.len, lookup.arrayZ);
3212 }
3213
collect_variation_indicesOT::ChainContextFormat33214 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3215
collect_glyphsOT::ChainContextFormat33216 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3217 {
3218 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3219
3220 (this+input[0]).collect_coverage (c->input);
3221
3222 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3223 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3224 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3225 {collect_coverage},
3226 {this, this, this}
3227 };
3228 chain_context_collect_glyphs_lookup (c,
3229 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3230 input.len, (const HBUINT16 *) input.arrayZ + 1,
3231 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3232 lookup.len, lookup.arrayZ,
3233 lookup_context);
3234 }
3235
would_applyOT::ChainContextFormat33236 bool would_apply (hb_would_apply_context_t *c) const
3237 {
3238 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3239 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3240 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3241 struct ChainContextApplyLookupContext lookup_context = {
3242 {match_coverage},
3243 {this, this, this}
3244 };
3245 return chain_context_would_apply_lookup (c,
3246 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3247 input.len, (const HBUINT16 *) input.arrayZ + 1,
3248 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3249 lookup.len, lookup.arrayZ, lookup_context);
3250 }
3251
get_coverageOT::ChainContextFormat33252 const Coverage &get_coverage () const
3253 {
3254 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3255 return this+input[0];
3256 }
3257
applyOT::ChainContextFormat33258 bool apply (hb_ot_apply_context_t *c) const
3259 {
3260 TRACE_APPLY (this);
3261 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3262
3263 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
3264 if (likely (index == NOT_COVERED)) return_trace (false);
3265
3266 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3267 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3268 struct ChainContextApplyLookupContext lookup_context = {
3269 {match_coverage},
3270 {this, this, this}
3271 };
3272 return_trace (chain_context_apply_lookup (c,
3273 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3274 input.len, (const HBUINT16 *) input.arrayZ + 1,
3275 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3276 lookup.len, lookup.arrayZ, lookup_context));
3277 }
3278
3279 template<typename Iterator,
3280 hb_requires (hb_is_iterator (Iterator))>
serialize_coverage_offsetsOT::ChainContextFormat33281 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
3282 {
3283 TRACE_SERIALIZE (this);
3284 auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
3285
3286 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
3287 return_trace (false);
3288
3289 for (auto& offset : it) {
3290 auto *o = out->serialize_append (c->serializer);
3291 if (unlikely (!o) || !o->serialize_subset (c, offset, base))
3292 return_trace (false);
3293 }
3294
3295 return_trace (true);
3296 }
3297
subsetOT::ChainContextFormat33298 bool subset (hb_subset_context_t *c) const
3299 {
3300 TRACE_SUBSET (this);
3301
3302 auto *out = c->serializer->start_embed (this);
3303 if (unlikely (!out)) return_trace (false);
3304 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3305
3306 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3307 return_trace (false);
3308
3309 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3310 if (!serialize_coverage_offsets (c, input.iter (), this))
3311 return_trace (false);
3312
3313 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3314 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3315 return_trace (false);
3316
3317 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
3318 HBUINT16 lookupCount;
3319 lookupCount = lookupRecord.len;
3320 if (!c->serializer->copy (lookupCount)) return_trace (false);
3321
3322 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3323 for (unsigned i = 0; i < (unsigned) lookupCount; i++)
3324 if (!c->serializer->copy (lookupRecord[i], lookup_map)) return_trace (false);
3325
3326 return_trace (true);
3327 }
3328
sanitizeOT::ChainContextFormat33329 bool sanitize (hb_sanitize_context_t *c) const
3330 {
3331 TRACE_SANITIZE (this);
3332 if (!backtrack.sanitize (c, this)) return_trace (false);
3333 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3334 if (!input.sanitize (c, this)) return_trace (false);
3335 if (!input.len) return_trace (false); /* To be consistent with Context. */
3336 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3337 if (!lookahead.sanitize (c, this)) return_trace (false);
3338 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3339 return_trace (lookup.sanitize (c));
3340 }
3341
3342 protected:
3343 HBUINT16 format; /* Format identifier--format = 3 */
3344 Array16OfOffset16To<Coverage>
3345 backtrack; /* Array of coverage tables
3346 * in backtracking sequence, in glyph
3347 * sequence order */
3348 Array16OfOffset16To<Coverage>
3349 inputX ; /* Array of coverage
3350 * tables in input sequence, in glyph
3351 * sequence order */
3352 Array16OfOffset16To<Coverage>
3353 lookaheadX; /* Array of coverage tables
3354 * in lookahead sequence, in glyph
3355 * sequence order */
3356 Array16Of<LookupRecord>
3357 lookupX; /* Array of LookupRecords--in
3358 * design order) */
3359 public:
3360 DEFINE_SIZE_MIN (10);
3361 };
3362
3363 struct ChainContext
3364 {
3365 template <typename context_t, typename ...Ts>
dispatchOT::ChainContext3366 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3367 {
3368 TRACE_DISPATCH (this, u.format);
3369 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3370 switch (u.format) {
3371 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
3372 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
3373 case 3: return_trace (c->dispatch (u.format3, hb_forward<Ts> (ds)...));
3374 default:return_trace (c->default_return_value ());
3375 }
3376 }
3377
3378 protected:
3379 union {
3380 HBUINT16 format; /* Format identifier */
3381 ChainContextFormat1 format1;
3382 ChainContextFormat2 format2;
3383 ChainContextFormat3 format3;
3384 } u;
3385 };
3386
3387
3388 template <typename T>
3389 struct ExtensionFormat1
3390 {
get_typeOT::ExtensionFormat13391 unsigned int get_type () const { return extensionLookupType; }
3392
3393 template <typename X>
get_subtableOT::ExtensionFormat13394 const X& get_subtable () const
3395 { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
3396
3397 template <typename context_t, typename ...Ts>
dispatchOT::ExtensionFormat13398 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3399 {
3400 TRACE_DISPATCH (this, format);
3401 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3402 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), hb_forward<Ts> (ds)...));
3403 }
3404
collect_variation_indicesOT::ExtensionFormat13405 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3406 { dispatch (c); }
3407
3408 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
sanitizeOT::ExtensionFormat13409 bool sanitize (hb_sanitize_context_t *c) const
3410 {
3411 TRACE_SANITIZE (this);
3412 return_trace (c->check_struct (this) &&
3413 extensionLookupType != T::SubTable::Extension);
3414 }
3415
subsetOT::ExtensionFormat13416 bool subset (hb_subset_context_t *c) const
3417 {
3418 TRACE_SUBSET (this);
3419
3420 auto *out = c->serializer->start_embed (this);
3421 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3422
3423 out->format = format;
3424 out->extensionLookupType = extensionLookupType;
3425
3426 const auto& src_offset =
3427 reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
3428 auto& dest_offset =
3429 reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
3430
3431 return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
3432 }
3433
3434 protected:
3435 HBUINT16 format; /* Format identifier. Set to 1. */
3436 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3437 * by ExtensionOffset (i.e. the
3438 * extension subtable). */
3439 Offset32 extensionOffset; /* Offset to the extension subtable,
3440 * of lookup type subtable. */
3441 public:
3442 DEFINE_SIZE_STATIC (8);
3443 };
3444
3445 template <typename T>
3446 struct Extension
3447 {
get_typeOT::Extension3448 unsigned int get_type () const
3449 {
3450 switch (u.format) {
3451 case 1: return u.format1.get_type ();
3452 default:return 0;
3453 }
3454 }
3455 template <typename X>
get_subtableOT::Extension3456 const X& get_subtable () const
3457 {
3458 switch (u.format) {
3459 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3460 default:return Null (typename T::SubTable);
3461 }
3462 }
3463
3464 // Specialization of dispatch for subset. dispatch() normally just
3465 // dispatches to the sub table this points too, but for subset
3466 // we need to run subset on this subtable too.
3467 template <typename ...Ts>
dispatchOT::Extension3468 typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
3469 {
3470 switch (u.format) {
3471 case 1: return u.format1.subset (c);
3472 default: return c->default_return_value ();
3473 }
3474 }
3475
3476 template <typename context_t, typename ...Ts>
dispatchOT::Extension3477 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3478 {
3479 TRACE_DISPATCH (this, u.format);
3480 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3481 switch (u.format) {
3482 case 1: return_trace (u.format1.dispatch (c, hb_forward<Ts> (ds)...));
3483 default:return_trace (c->default_return_value ());
3484 }
3485 }
3486
3487 protected:
3488 union {
3489 HBUINT16 format; /* Format identifier */
3490 ExtensionFormat1<T> format1;
3491 } u;
3492 };
3493
3494
3495 /*
3496 * GSUB/GPOS Common
3497 */
3498
3499 struct hb_ot_layout_lookup_accelerator_t
3500 {
3501 template <typename TLookup>
initOT::hb_ot_layout_lookup_accelerator_t3502 void init (const TLookup &lookup)
3503 {
3504 digest.init ();
3505 lookup.collect_coverage (&digest);
3506
3507 subtables.init ();
3508 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3509 lookup.dispatch (&c_get_subtables);
3510 }
finiOT::hb_ot_layout_lookup_accelerator_t3511 void fini () { subtables.fini (); }
3512
may_haveOT::hb_ot_layout_lookup_accelerator_t3513 bool may_have (hb_codepoint_t g) const
3514 { return digest.may_have (g); }
3515
applyOT::hb_ot_layout_lookup_accelerator_t3516 bool apply (hb_ot_apply_context_t *c) const
3517 {
3518 for (unsigned int i = 0; i < subtables.length; i++)
3519 if (subtables[i].apply (c))
3520 return true;
3521 return false;
3522 }
3523
3524 private:
3525 hb_set_digest_t digest;
3526 hb_get_subtables_context_t::array_t subtables;
3527 };
3528
3529 struct GSUBGPOS
3530 {
has_dataOT::GSUBGPOS3531 bool has_data () const { return version.to_int (); }
get_script_countOT::GSUBGPOS3532 unsigned int get_script_count () const
3533 { return (this+scriptList).len; }
get_script_tagOT::GSUBGPOS3534 const Tag& get_script_tag (unsigned int i) const
3535 { return (this+scriptList).get_tag (i); }
get_script_tagsOT::GSUBGPOS3536 unsigned int get_script_tags (unsigned int start_offset,
3537 unsigned int *script_count /* IN/OUT */,
3538 hb_tag_t *script_tags /* OUT */) const
3539 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
get_scriptOT::GSUBGPOS3540 const Script& get_script (unsigned int i) const
3541 { return (this+scriptList)[i]; }
find_script_indexOT::GSUBGPOS3542 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3543 { return (this+scriptList).find_index (tag, index); }
3544
get_feature_countOT::GSUBGPOS3545 unsigned int get_feature_count () const
3546 { return (this+featureList).len; }
get_feature_tagOT::GSUBGPOS3547 hb_tag_t get_feature_tag (unsigned int i) const
3548 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
get_feature_tagsOT::GSUBGPOS3549 unsigned int get_feature_tags (unsigned int start_offset,
3550 unsigned int *feature_count /* IN/OUT */,
3551 hb_tag_t *feature_tags /* OUT */) const
3552 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
get_featureOT::GSUBGPOS3553 const Feature& get_feature (unsigned int i) const
3554 { return (this+featureList)[i]; }
find_feature_indexOT::GSUBGPOS3555 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3556 { return (this+featureList).find_index (tag, index); }
3557
get_lookup_countOT::GSUBGPOS3558 unsigned int get_lookup_count () const
3559 { return (this+lookupList).len; }
get_lookupOT::GSUBGPOS3560 const Lookup& get_lookup (unsigned int i) const
3561 { return (this+lookupList)[i]; }
3562
find_variations_indexOT::GSUBGPOS3563 bool find_variations_index (const int *coords, unsigned int num_coords,
3564 unsigned int *index) const
3565 {
3566 #ifdef HB_NO_VAR
3567 *index = FeatureVariations::NOT_FOUND_INDEX;
3568 return false;
3569 #endif
3570 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3571 .find_index (coords, num_coords, index);
3572 }
get_feature_variationOT::GSUBGPOS3573 const Feature& get_feature_variation (unsigned int feature_index,
3574 unsigned int variations_index) const
3575 {
3576 #ifndef HB_NO_VAR
3577 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3578 version.to_int () >= 0x00010001u)
3579 {
3580 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3581 feature_index);
3582 if (feature)
3583 return *feature;
3584 }
3585 #endif
3586 return get_feature (feature_index);
3587 }
3588
feature_variation_collect_lookupsOT::GSUBGPOS3589 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3590 hb_set_t *lookup_indexes /* OUT */) const
3591 {
3592 #ifndef HB_NO_VAR
3593 if (version.to_int () >= 0x00010001u)
3594 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3595 #endif
3596 }
3597
3598 template <typename TLookup>
closure_lookupsOT::GSUBGPOS3599 void closure_lookups (hb_face_t *face,
3600 const hb_set_t *glyphs,
3601 hb_set_t *lookup_indexes /* IN/OUT */) const
3602 {
3603 hb_set_t visited_lookups, inactive_lookups;
3604 OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
3605
3606 for (unsigned lookup_index : + hb_iter (lookup_indexes))
3607 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
3608
3609 hb_set_union (lookup_indexes, &visited_lookups);
3610 hb_set_subtract (lookup_indexes, &inactive_lookups);
3611 }
3612
prune_langsysOT::GSUBGPOS3613 void prune_langsys (const hb_map_t *duplicate_feature_map,
3614 hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map,
3615 hb_set_t *new_feature_indexes /* OUT */) const
3616 {
3617 hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
3618
3619 unsigned count = get_script_count ();
3620 for (unsigned script_index = 0; script_index < count; script_index++)
3621 {
3622 const Script& s = get_script (script_index);
3623 s.prune_langsys (&c, script_index);
3624 }
3625 }
3626
3627 template <typename TLookup>
subsetOT::GSUBGPOS3628 bool subset (hb_subset_layout_context_t *c) const
3629 {
3630 TRACE_SUBSET (this);
3631 auto *out = c->subset_context->serializer->embed (*this);
3632 if (unlikely (!out)) return_trace (false);
3633
3634 typedef LookupOffsetList<TLookup> TLookupList;
3635 reinterpret_cast<Offset16To<TLookupList> &> (out->lookupList)
3636 .serialize_subset (c->subset_context,
3637 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList),
3638 this,
3639 c);
3640
3641 reinterpret_cast<Offset16To<RecordListOfFeature> &> (out->featureList)
3642 .serialize_subset (c->subset_context,
3643 reinterpret_cast<const Offset16To<RecordListOfFeature> &> (featureList),
3644 this,
3645 c);
3646
3647 out->scriptList.serialize_subset (c->subset_context,
3648 scriptList,
3649 this,
3650 c);
3651
3652 #ifndef HB_NO_VAR
3653 if (version.to_int () >= 0x00010001u)
3654 {
3655 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3656 if (!ret)
3657 {
3658 out->version.major = 1;
3659 out->version.minor = 0;
3660 }
3661 }
3662 #endif
3663
3664 return_trace (true);
3665 }
3666
find_duplicate_featuresOT::GSUBGPOS3667 void find_duplicate_features (const hb_map_t *lookup_indices,
3668 const hb_set_t *feature_indices,
3669 hb_map_t *duplicate_feature_map /* OUT */) const
3670 {
3671 //find out duplicate features after subset
3672 unsigned prev = 0xFFFFu;
3673 for (unsigned i : feature_indices->iter ())
3674 {
3675 if (prev == 0xFFFFu)
3676 {
3677 duplicate_feature_map->set (i, i);
3678 prev = i;
3679 continue;
3680 }
3681
3682 hb_tag_t t = get_feature_tag (i);
3683 hb_tag_t prev_t = get_feature_tag (prev);
3684 if (t != prev_t)
3685 {
3686 duplicate_feature_map->set (i, i);
3687 prev = i;
3688 continue;
3689 }
3690
3691 const Feature& f = get_feature (i);
3692 const Feature& prev_f = get_feature (prev);
3693
3694 auto f_iter =
3695 + hb_iter (f.lookupIndex)
3696 | hb_filter (lookup_indices)
3697 ;
3698
3699 auto prev_iter =
3700 + hb_iter (prev_f.lookupIndex)
3701 | hb_filter (lookup_indices)
3702 ;
3703
3704 if (f_iter.len () != prev_iter.len ())
3705 {
3706 duplicate_feature_map->set (i, i);
3707 prev = i;
3708 continue;
3709 }
3710
3711 bool is_equal = true;
3712 for (auto _ : + hb_zip (f_iter, prev_iter))
3713 if (_.first != _.second) { is_equal = false; break; }
3714
3715 if (is_equal == true) duplicate_feature_map->set (i, prev);
3716 else
3717 {
3718 duplicate_feature_map->set (i, i);
3719 prev = i;
3720 }
3721 }
3722 }
3723
prune_featuresOT::GSUBGPOS3724 void prune_features (const hb_map_t *lookup_indices, /* IN */
3725 hb_set_t *feature_indices /* IN/OUT */) const
3726 {
3727 #ifndef HB_NO_VAR
3728 // This is the set of feature indices which have alternate versions defined
3729 // if the FeatureVariation's table and the alternate version(s) intersect the
3730 // set of lookup indices.
3731 hb_set_t alternate_feature_indices;
3732 if (version.to_int () >= 0x00010001u)
3733 (this+featureVars).closure_features (lookup_indices, &alternate_feature_indices);
3734 if (unlikely (alternate_feature_indices.in_error())) {
3735 feature_indices->successful = false;
3736 return;
3737 }
3738 #endif
3739
3740 for (unsigned i : feature_indices->iter())
3741 {
3742 const Feature& f = get_feature (i);
3743 hb_tag_t tag = get_feature_tag (i);
3744 if (tag == HB_TAG ('p', 'r', 'e', 'f'))
3745 // Note: Never ever drop feature 'pref', even if it's empty.
3746 // HarfBuzz chooses shaper for Khmer based on presence of this
3747 // feature. See thread at:
3748 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
3749 continue;
3750
3751 if (f.featureParams.is_null ()
3752 && !f.intersects_lookup_indexes (lookup_indices)
3753 #ifndef HB_NO_VAR
3754 && !alternate_feature_indices.has (i)
3755 #endif
3756 )
3757 feature_indices->del (i);
3758 }
3759 }
3760
get_sizeOT::GSUBGPOS3761 unsigned int get_size () const
3762 {
3763 return min_size +
3764 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3765 }
3766
3767 template <typename TLookup>
sanitizeOT::GSUBGPOS3768 bool sanitize (hb_sanitize_context_t *c) const
3769 {
3770 TRACE_SANITIZE (this);
3771 typedef List16OfOffset16To<TLookup> TLookupList;
3772 if (unlikely (!(version.sanitize (c) &&
3773 likely (version.major == 1) &&
3774 scriptList.sanitize (c, this) &&
3775 featureList.sanitize (c, this) &&
3776 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList).sanitize (c, this))))
3777 return_trace (false);
3778
3779 #ifndef HB_NO_VAR
3780 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3781 return_trace (false);
3782 #endif
3783
3784 return_trace (true);
3785 }
3786
3787 template <typename T>
3788 struct accelerator_t
3789 {
initOT::GSUBGPOS::accelerator_t3790 void init (hb_face_t *face)
3791 {
3792 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3793 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
3794 {
3795 hb_blob_destroy (this->table.get_blob ());
3796 this->table = hb_blob_get_empty ();
3797 }
3798
3799 this->lookup_count = table->get_lookup_count ();
3800
3801 this->accels = (hb_ot_layout_lookup_accelerator_t *) calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3802 if (unlikely (!this->accels))
3803 {
3804 this->lookup_count = 0;
3805 this->table.destroy ();
3806 this->table = hb_blob_get_empty ();
3807 }
3808
3809 for (unsigned int i = 0; i < this->lookup_count; i++)
3810 this->accels[i].init (table->get_lookup (i));
3811 }
3812
finiOT::GSUBGPOS::accelerator_t3813 void fini ()
3814 {
3815 for (unsigned int i = 0; i < this->lookup_count; i++)
3816 this->accels[i].fini ();
3817 free (this->accels);
3818 this->table.destroy ();
3819 }
3820
3821 hb_blob_ptr_t<T> table;
3822 unsigned int lookup_count;
3823 hb_ot_layout_lookup_accelerator_t *accels;
3824 };
3825
3826 protected:
3827 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3828 * to 0x00010000u */
3829 Offset16To<ScriptList>
3830 scriptList; /* ScriptList table */
3831 Offset16To<FeatureList>
3832 featureList; /* FeatureList table */
3833 Offset16To<LookupList>
3834 lookupList; /* LookupList table */
3835 Offset32To<FeatureVariations>
3836 featureVars; /* Offset to Feature Variations
3837 table--from beginning of table
3838 * (may be NULL). Introduced
3839 * in version 0x00010001. */
3840 public:
3841 DEFINE_SIZE_MIN (10);
3842 };
3843
3844
3845 } /* namespace OT */
3846
3847
3848 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */
3849