1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
31
32 #include "hb.hh"
33 #include "hb-buffer.hh"
34 #include "hb-map.hh"
35 #include "hb-set.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
39
40
41 namespace OT {
42
43
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
46 {
47 template <typename T>
dispatchOT::hb_intersects_context_t48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
default_return_valueOT::hb_intersects_context_t49 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_intersects_context_t50 bool stop_sublookup_iteration (return_t r) const { return r; }
51
52 const hb_set_t *glyphs;
53
hb_intersects_context_tOT::hb_intersects_context_t54 hb_intersects_context_t (const hb_set_t *glyphs_) :
55 glyphs (glyphs_) {}
56 };
57
58 struct hb_have_non_1to1_context_t :
59 hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
60 {
61 template <typename T>
dispatchOT::hb_have_non_1to1_context_t62 return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
default_return_valueOT::hb_have_non_1to1_context_t63 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_have_non_1to1_context_t64 bool stop_sublookup_iteration (return_t r) const { return r; }
65 };
66
67 struct hb_closure_context_t :
68 hb_dispatch_context_t<hb_closure_context_t>
69 {
70 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
71 template <typename T>
dispatchOT::hb_closure_context_t72 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_context_t73 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_context_t74 void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
75 {
76 if (unlikely (nesting_level_left == 0 || !recurse_func))
77 return;
78
79 nesting_level_left--;
80 recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
81 nesting_level_left++;
82 }
83
reset_lookup_visit_countOT::hb_closure_context_t84 void reset_lookup_visit_count ()
85 { lookup_count = 0; }
86
lookup_limit_exceededOT::hb_closure_context_t87 bool lookup_limit_exceeded ()
88 { return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
89
should_visit_lookupOT::hb_closure_context_t90 bool should_visit_lookup (unsigned int lookup_index)
91 {
92 if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
93 return false;
94
95 if (is_lookup_done (lookup_index))
96 return false;
97
98 return true;
99 }
100
is_lookup_doneOT::hb_closure_context_t101 bool is_lookup_done (unsigned int lookup_index)
102 {
103 if (unlikely (done_lookups_glyph_count->in_error () ||
104 done_lookups_glyph_set->in_error ()))
105 return true;
106
107 /* Have we visited this lookup with the current set of glyphs? */
108 if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
109 {
110 done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
111
112 if (!done_lookups_glyph_set->has (lookup_index))
113 {
114 if (unlikely (!done_lookups_glyph_set->set (lookup_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
115 return true;
116 }
117
118 done_lookups_glyph_set->get (lookup_index)->clear ();
119 }
120
121 hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
122 if (unlikely (covered_glyph_set->in_error ()))
123 return true;
124 if (parent_active_glyphs ().is_subset (*covered_glyph_set))
125 return true;
126
127 covered_glyph_set->union_ (parent_active_glyphs ());
128 return false;
129 }
130
previous_parent_active_glyphsOT::hb_closure_context_t131 const hb_set_t& previous_parent_active_glyphs () {
132 if (active_glyphs_stack.length <= 1)
133 return *glyphs;
134
135 return active_glyphs_stack[active_glyphs_stack.length - 2];
136 }
137
parent_active_glyphsOT::hb_closure_context_t138 const hb_set_t& parent_active_glyphs ()
139 {
140 if (!active_glyphs_stack)
141 return *glyphs;
142
143 return active_glyphs_stack.tail ();
144 }
145
push_cur_active_glyphsOT::hb_closure_context_t146 hb_set_t* push_cur_active_glyphs ()
147 {
148 hb_set_t *s = active_glyphs_stack.push ();
149 if (unlikely (active_glyphs_stack.in_error ()))
150 return nullptr;
151 return s;
152 }
153
pop_cur_done_glyphsOT::hb_closure_context_t154 bool pop_cur_done_glyphs ()
155 {
156 if (!active_glyphs_stack)
157 return false;
158
159 active_glyphs_stack.pop ();
160 return true;
161 }
162
163 hb_face_t *face;
164 hb_set_t *glyphs;
165 hb_set_t output[1];
166 hb_vector_t<hb_set_t> active_glyphs_stack;
167 recurse_func_t recurse_func = nullptr;
168 unsigned int nesting_level_left;
169
hb_closure_context_tOT::hb_closure_context_t170 hb_closure_context_t (hb_face_t *face_,
171 hb_set_t *glyphs_,
172 hb_map_t *done_lookups_glyph_count_,
173 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set_,
174 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
175 face (face_),
176 glyphs (glyphs_),
177 nesting_level_left (nesting_level_left_),
178 done_lookups_glyph_count (done_lookups_glyph_count_),
179 done_lookups_glyph_set (done_lookups_glyph_set_)
180 {}
181
~hb_closure_context_tOT::hb_closure_context_t182 ~hb_closure_context_t () { flush (); }
183
set_recurse_funcOT::hb_closure_context_t184 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
185
flushOT::hb_closure_context_t186 void flush ()
187 {
188 output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID); /* Remove invalid glyphs. */
189 glyphs->union_ (*output);
190 output->clear ();
191 active_glyphs_stack.pop ();
192 active_glyphs_stack.reset ();
193 }
194
195 private:
196 hb_map_t *done_lookups_glyph_count;
197 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set;
198 unsigned int lookup_count = 0;
199 };
200
201
202
203 struct hb_closure_lookups_context_t :
204 hb_dispatch_context_t<hb_closure_lookups_context_t>
205 {
206 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
207 template <typename T>
dispatchOT::hb_closure_lookups_context_t208 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_lookups_context_t209 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_lookups_context_t210 void recurse (unsigned lookup_index)
211 {
212 if (unlikely (nesting_level_left == 0 || !recurse_func))
213 return;
214
215 /* Return if new lookup was recursed to before. */
216 if (lookup_limit_exceeded ()
217 || visited_lookups->in_error ()
218 || visited_lookups->has (lookup_index))
219 // Don't increment lookup count here, that will be done in the call to closure_lookups()
220 // made by recurse_func.
221 return;
222
223 nesting_level_left--;
224 recurse_func (this, lookup_index);
225 nesting_level_left++;
226 }
227
set_lookup_visitedOT::hb_closure_lookups_context_t228 void set_lookup_visited (unsigned lookup_index)
229 { visited_lookups->add (lookup_index); }
230
set_lookup_inactiveOT::hb_closure_lookups_context_t231 void set_lookup_inactive (unsigned lookup_index)
232 { inactive_lookups->add (lookup_index); }
233
lookup_limit_exceededOT::hb_closure_lookups_context_t234 bool lookup_limit_exceeded ()
235 {
236 bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
237 if (ret)
238 DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
239 return ret; }
240
is_lookup_visitedOT::hb_closure_lookups_context_t241 bool is_lookup_visited (unsigned lookup_index)
242 {
243 if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
244 {
245 DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
246 lookup_count, lookup_index);
247 return true;
248 }
249
250 if (unlikely (visited_lookups->in_error ()))
251 return true;
252
253 return visited_lookups->has (lookup_index);
254 }
255
256 hb_face_t *face;
257 const hb_set_t *glyphs;
258 recurse_func_t recurse_func;
259 unsigned int nesting_level_left;
260
hb_closure_lookups_context_tOT::hb_closure_lookups_context_t261 hb_closure_lookups_context_t (hb_face_t *face_,
262 const hb_set_t *glyphs_,
263 hb_set_t *visited_lookups_,
264 hb_set_t *inactive_lookups_,
265 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
266 face (face_),
267 glyphs (glyphs_),
268 recurse_func (nullptr),
269 nesting_level_left (nesting_level_left_),
270 visited_lookups (visited_lookups_),
271 inactive_lookups (inactive_lookups_),
272 lookup_count (0) {}
273
set_recurse_funcOT::hb_closure_lookups_context_t274 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
275
276 private:
277 hb_set_t *visited_lookups;
278 hb_set_t *inactive_lookups;
279 unsigned int lookup_count;
280 };
281
282 struct hb_would_apply_context_t :
283 hb_dispatch_context_t<hb_would_apply_context_t, bool>
284 {
285 template <typename T>
dispatchOT::hb_would_apply_context_t286 return_t dispatch (const T &obj) { return obj.would_apply (this); }
default_return_valueOT::hb_would_apply_context_t287 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_would_apply_context_t288 bool stop_sublookup_iteration (return_t r) const { return r; }
289
290 hb_face_t *face;
291 const hb_codepoint_t *glyphs;
292 unsigned int len;
293 bool zero_context;
294
hb_would_apply_context_tOT::hb_would_apply_context_t295 hb_would_apply_context_t (hb_face_t *face_,
296 const hb_codepoint_t *glyphs_,
297 unsigned int len_,
298 bool zero_context_) :
299 face (face_),
300 glyphs (glyphs_),
301 len (len_),
302 zero_context (zero_context_) {}
303 };
304
305 struct hb_collect_glyphs_context_t :
306 hb_dispatch_context_t<hb_collect_glyphs_context_t>
307 {
308 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
309 template <typename T>
dispatchOT::hb_collect_glyphs_context_t310 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_glyphs_context_t311 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_collect_glyphs_context_t312 void recurse (unsigned int lookup_index)
313 {
314 if (unlikely (nesting_level_left == 0 || !recurse_func))
315 return;
316
317 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
318 * past the previous check. For GSUB, we only want to collect the output
319 * glyphs in the recursion. If output is not requested, we can go home now.
320 *
321 * Note further, that the above is not exactly correct. A recursed lookup
322 * is allowed to match input that is not matched in the context, but that's
323 * not how most fonts are built. It's possible to relax that and recurse
324 * with all sets here if it proves to be an issue.
325 */
326
327 if (output == hb_set_get_empty ())
328 return;
329
330 /* Return if new lookup was recursed to before. */
331 if (recursed_lookups->has (lookup_index))
332 return;
333
334 hb_set_t *old_before = before;
335 hb_set_t *old_input = input;
336 hb_set_t *old_after = after;
337 before = input = after = hb_set_get_empty ();
338
339 nesting_level_left--;
340 recurse_func (this, lookup_index);
341 nesting_level_left++;
342
343 before = old_before;
344 input = old_input;
345 after = old_after;
346
347 recursed_lookups->add (lookup_index);
348 }
349
350 hb_face_t *face;
351 hb_set_t *before;
352 hb_set_t *input;
353 hb_set_t *after;
354 hb_set_t *output;
355 recurse_func_t recurse_func;
356 hb_set_t *recursed_lookups;
357 unsigned int nesting_level_left;
358
hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t359 hb_collect_glyphs_context_t (hb_face_t *face_,
360 hb_set_t *glyphs_before, /* OUT. May be NULL */
361 hb_set_t *glyphs_input, /* OUT. May be NULL */
362 hb_set_t *glyphs_after, /* OUT. May be NULL */
363 hb_set_t *glyphs_output, /* OUT. May be NULL */
364 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
365 face (face_),
366 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
367 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
368 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
369 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
370 recurse_func (nullptr),
371 recursed_lookups (hb_set_create ()),
372 nesting_level_left (nesting_level_left_) {}
~hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t373 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
374
set_recurse_funcOT::hb_collect_glyphs_context_t375 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
376 };
377
378
379
380 template <typename set_t>
381 struct hb_collect_coverage_context_t :
382 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
383 {
384 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
385 template <typename T>
dispatchOT::hb_collect_coverage_context_t386 return_t dispatch (const T &obj) { return obj.get_coverage (); }
default_return_valueOT::hb_collect_coverage_context_t387 static return_t default_return_value () { return Null (Coverage); }
stop_sublookup_iterationOT::hb_collect_coverage_context_t388 bool stop_sublookup_iteration (return_t r) const
389 {
390 r.collect_coverage (set);
391 return false;
392 }
393
hb_collect_coverage_context_tOT::hb_collect_coverage_context_t394 hb_collect_coverage_context_t (set_t *set_) :
395 set (set_) {}
396
397 set_t *set;
398 };
399
400 struct hb_ot_apply_context_t :
401 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
402 {
403 struct matcher_t
404 {
405 typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
406
set_ignore_zwnjOT::hb_ot_apply_context_t::matcher_t407 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
set_ignore_zwjOT::hb_ot_apply_context_t::matcher_t408 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
set_ignore_hiddenOT::hb_ot_apply_context_t::matcher_t409 void set_ignore_hidden (bool ignore_hidden_) { ignore_hidden = ignore_hidden_; }
set_lookup_propsOT::hb_ot_apply_context_t::matcher_t410 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
set_maskOT::hb_ot_apply_context_t::matcher_t411 void set_mask (hb_mask_t mask_) { mask = mask_; }
set_per_syllableOT::hb_ot_apply_context_t::matcher_t412 void set_per_syllable (bool per_syllable_) { per_syllable = per_syllable_; }
set_syllableOT::hb_ot_apply_context_t::matcher_t413 void set_syllable (uint8_t syllable_) { syllable = per_syllable ? syllable_ : 0; }
set_match_funcOT::hb_ot_apply_context_t::matcher_t414 void set_match_func (match_func_t match_func_,
415 const void *match_data_)
416 { match_func = match_func_; match_data = match_data_; }
417
418 enum may_match_t {
419 MATCH_NO,
420 MATCH_YES,
421 MATCH_MAYBE
422 };
423
424 #ifndef HB_OPTIMIZE_SIZE
425 HB_ALWAYS_INLINE
426 #endif
may_matchOT::hb_ot_apply_context_t::matcher_t427 may_match_t may_match (hb_glyph_info_t &info,
428 hb_codepoint_t glyph_data) const
429 {
430 if (!(info.mask & mask) ||
431 (syllable && syllable != info.syllable ()))
432 return MATCH_NO;
433
434 if (match_func)
435 return match_func (info, glyph_data, match_data) ? MATCH_YES : MATCH_NO;
436
437 return MATCH_MAYBE;
438 }
439
440 enum may_skip_t {
441 SKIP_NO,
442 SKIP_YES,
443 SKIP_MAYBE
444 };
445
446 #ifndef HB_OPTIMIZE_SIZE
447 HB_ALWAYS_INLINE
448 #endif
may_skipOT::hb_ot_apply_context_t::matcher_t449 may_skip_t may_skip (const hb_ot_apply_context_t *c,
450 const hb_glyph_info_t &info) const
451 {
452 if (!c->check_glyph_property (&info, lookup_props))
453 return SKIP_YES;
454
455 if (unlikely (_hb_glyph_info_is_default_ignorable (&info) &&
456 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
457 (ignore_zwj || !_hb_glyph_info_is_zwj (&info)) &&
458 (ignore_hidden || !_hb_glyph_info_is_hidden (&info))))
459 return SKIP_MAYBE;
460
461 return SKIP_NO;
462 }
463
464 protected:
465 unsigned int lookup_props = 0;
466 hb_mask_t mask = -1;
467 bool ignore_zwnj = false;
468 bool ignore_zwj = false;
469 bool ignore_hidden = false;
470 bool per_syllable = false;
471 uint8_t syllable = 0;
472 match_func_t match_func = nullptr;
473 const void *match_data = nullptr;
474 };
475
476 struct skipping_iterator_t
477 {
initOT::hb_ot_apply_context_t::skipping_iterator_t478 void init (hb_ot_apply_context_t *c_, bool context_match = false)
479 {
480 c = c_;
481 end = c->buffer->len;
482 match_glyph_data16 = nullptr;
483 #ifndef HB_NO_BEYOND_64K
484 match_glyph_data24 = nullptr;
485 #endif
486 matcher.set_match_func (nullptr, nullptr);
487 matcher.set_lookup_props (c->lookup_props);
488 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
489 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
490 /* Ignore ZWJ if we are matching context, or asked to. */
491 matcher.set_ignore_zwj (context_match || c->auto_zwj);
492 /* Ignore hidden glyphs (like CGJ) during GPOS. */
493 matcher.set_ignore_hidden (c->table_index == 1);
494 matcher.set_mask (context_match ? -1 : c->lookup_mask);
495 /* Per syllable matching is only for GSUB. */
496 matcher.set_per_syllable (c->table_index == 0 && c->per_syllable);
497 matcher.set_syllable (0);
498 }
set_lookup_propsOT::hb_ot_apply_context_t::skipping_iterator_t499 void set_lookup_props (unsigned int lookup_props)
500 {
501 matcher.set_lookup_props (lookup_props);
502 }
set_match_funcOT::hb_ot_apply_context_t::skipping_iterator_t503 void set_match_func (matcher_t::match_func_t match_func_,
504 const void *match_data_)
505 {
506 matcher.set_match_func (match_func_, match_data_);
507 }
set_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t508 void set_glyph_data (const HBUINT16 glyph_data[])
509 {
510 match_glyph_data16 = glyph_data;
511 #ifndef HB_NO_BEYOND_64K
512 match_glyph_data24 = nullptr;
513 #endif
514 }
515 #ifndef HB_NO_BEYOND_64K
set_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t516 void set_glyph_data (const HBUINT24 glyph_data[])
517 {
518 match_glyph_data16 = nullptr;
519 match_glyph_data24 = glyph_data;
520 }
521 #endif
522
523 #ifndef HB_OPTIMIZE_SIZE
524 HB_ALWAYS_INLINE
525 #endif
resetOT::hb_ot_apply_context_t::skipping_iterator_t526 void reset (unsigned int start_index_)
527 {
528 idx = start_index_;
529 end = c->buffer->len;
530 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
531 }
532
533 #ifndef HB_OPTIMIZE_SIZE
534 HB_ALWAYS_INLINE
535 #endif
reset_fastOT::hb_ot_apply_context_t::skipping_iterator_t536 void reset_fast (unsigned int start_index_)
537 {
538 // Doesn't set end or syllable. Used by GPOS which doesn't care / change.
539 idx = start_index_;
540 }
541
rejectOT::hb_ot_apply_context_t::skipping_iterator_t542 void reject ()
543 {
544 backup_glyph_data ();
545 }
546
547 matcher_t::may_skip_t
548 #ifndef HB_OPTIMIZE_SIZE
549 HB_ALWAYS_INLINE
550 #endif
may_skipOT::hb_ot_apply_context_t::skipping_iterator_t551 may_skip (const hb_glyph_info_t &info) const
552 { return matcher.may_skip (c, info); }
553
554 enum match_t {
555 MATCH,
556 NOT_MATCH,
557 SKIP
558 };
559
560 #ifndef HB_OPTIMIZE_SIZE
561 HB_ALWAYS_INLINE
562 #endif
matchOT::hb_ot_apply_context_t::skipping_iterator_t563 match_t match (hb_glyph_info_t &info)
564 {
565 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
566 if (unlikely (skip == matcher_t::SKIP_YES))
567 return SKIP;
568
569 matcher_t::may_match_t match = matcher.may_match (info, get_glyph_data ());
570 if (match == matcher_t::MATCH_YES ||
571 (match == matcher_t::MATCH_MAYBE &&
572 skip == matcher_t::SKIP_NO))
573 return MATCH;
574
575 if (skip == matcher_t::SKIP_NO)
576 return NOT_MATCH;
577
578 return SKIP;
579 }
580
581 #ifndef HB_OPTIMIZE_SIZE
582 HB_ALWAYS_INLINE
583 #endif
nextOT::hb_ot_apply_context_t::skipping_iterator_t584 bool next (unsigned *unsafe_to = nullptr)
585 {
586 const signed stop = (signed) end - 1;
587 while ((signed) idx < stop)
588 {
589 idx++;
590 switch (match (c->buffer->info[idx]))
591 {
592 case MATCH:
593 {
594 advance_glyph_data ();
595 return true;
596 }
597 case NOT_MATCH:
598 {
599 if (unsafe_to)
600 *unsafe_to = idx + 1;
601 return false;
602 }
603 case SKIP:
604 continue;
605 }
606 }
607 if (unsafe_to)
608 *unsafe_to = end;
609 return false;
610 }
611 #ifndef HB_OPTIMIZE_SIZE
612 HB_ALWAYS_INLINE
613 #endif
prevOT::hb_ot_apply_context_t::skipping_iterator_t614 bool prev (unsigned *unsafe_from = nullptr)
615 {
616 const unsigned stop = 0;
617 while (idx > stop)
618 {
619 idx--;
620 switch (match (c->buffer->out_info[idx]))
621 {
622 case MATCH:
623 {
624 advance_glyph_data ();
625 return true;
626 }
627 case NOT_MATCH:
628 {
629 if (unsafe_from)
630 *unsafe_from = hb_max (1u, idx) - 1u;
631 return false;
632 }
633 case SKIP:
634 continue;
635 }
636 }
637 if (unsafe_from)
638 *unsafe_from = 0;
639 return false;
640 }
641
642 HB_ALWAYS_INLINE
643 hb_codepoint_t
get_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t644 get_glyph_data ()
645 {
646 if (match_glyph_data16) return *match_glyph_data16;
647 #ifndef HB_NO_BEYOND_64K
648 else
649 if (match_glyph_data24) return *match_glyph_data24;
650 #endif
651 return 0;
652 }
653 HB_ALWAYS_INLINE
654 void
advance_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t655 advance_glyph_data ()
656 {
657 if (match_glyph_data16) match_glyph_data16++;
658 #ifndef HB_NO_BEYOND_64K
659 else
660 if (match_glyph_data24) match_glyph_data24++;
661 #endif
662 }
663 void
backup_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t664 backup_glyph_data ()
665 {
666 if (match_glyph_data16) match_glyph_data16--;
667 #ifndef HB_NO_BEYOND_64K
668 else
669 if (match_glyph_data24) match_glyph_data24--;
670 #endif
671 }
672
673 unsigned int idx;
674 protected:
675 hb_ot_apply_context_t *c;
676 matcher_t matcher;
677 const HBUINT16 *match_glyph_data16;
678 #ifndef HB_NO_BEYOND_64K
679 const HBUINT24 *match_glyph_data24;
680 #endif
681
682 unsigned int end;
683 };
684
685
get_nameOT::hb_ot_apply_context_t686 const char *get_name () { return "APPLY"; }
687 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
688 template <typename T>
dispatchOT::hb_ot_apply_context_t689 return_t dispatch (const T &obj) { return obj.apply (this); }
default_return_valueOT::hb_ot_apply_context_t690 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_ot_apply_context_t691 bool stop_sublookup_iteration (return_t r) const { return r; }
recurseOT::hb_ot_apply_context_t692 return_t recurse (unsigned int sub_lookup_index)
693 {
694 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
695 {
696 buffer->shaping_failed = true;
697 return default_return_value ();
698 }
699
700 nesting_level_left--;
701 bool ret = recurse_func (this, sub_lookup_index);
702 nesting_level_left++;
703 return ret;
704 }
705
706 skipping_iterator_t iter_input, iter_context;
707
708 unsigned int table_index; /* GSUB/GPOS */
709 hb_font_t *font;
710 hb_face_t *face;
711 hb_buffer_t *buffer;
712 hb_sanitize_context_t sanitizer;
713 recurse_func_t recurse_func = nullptr;
714 const GDEF &gdef;
715 const GDEF::accelerator_t &gdef_accel;
716 const ItemVariationStore &var_store;
717 ItemVariationStore::cache_t *var_store_cache;
718 hb_set_digest_t digest;
719
720 hb_direction_t direction;
721 hb_mask_t lookup_mask = 1;
722 unsigned int lookup_index = (unsigned) -1;
723 unsigned int lookup_props = 0;
724 unsigned int nesting_level_left = HB_MAX_NESTING_LEVEL;
725
726 bool has_glyph_classes;
727 bool auto_zwnj = true;
728 bool auto_zwj = true;
729 bool per_syllable = false;
730 bool random = false;
731 unsigned new_syllables = (unsigned) -1;
732
733 signed last_base = -1; // GPOS uses
734 unsigned last_base_until = 0; // GPOS uses
735
hb_ot_apply_context_tOT::hb_ot_apply_context_t736 hb_ot_apply_context_t (unsigned int table_index_,
737 hb_font_t *font_,
738 hb_buffer_t *buffer_,
739 hb_blob_t *table_blob_) :
740 table_index (table_index_),
741 font (font_), face (font->face), buffer (buffer_),
742 sanitizer (table_blob_),
743 gdef (
744 #ifndef HB_NO_OT_LAYOUT
745 *face->table.GDEF->table
746 #else
747 Null (GDEF)
748 #endif
749 ),
750 gdef_accel (
751 #ifndef HB_NO_OT_LAYOUT
752 *face->table.GDEF
753 #else
754 Null (GDEF::accelerator_t)
755 #endif
756 ),
757 var_store (gdef.get_var_store ()),
758 var_store_cache (
759 #ifndef HB_NO_VAR
760 table_index == 1 && font->num_coords ? var_store.create_cache () : nullptr
761 #else
762 nullptr
763 #endif
764 ),
765 digest (buffer_->digest ()),
766 direction (buffer_->props.direction),
767 has_glyph_classes (gdef.has_glyph_classes ())
768 { init_iters (); }
769
~hb_ot_apply_context_tOT::hb_ot_apply_context_t770 ~hb_ot_apply_context_t ()
771 {
772 #ifndef HB_NO_VAR
773 ItemVariationStore::destroy_cache (var_store_cache);
774 #endif
775 }
776
init_itersOT::hb_ot_apply_context_t777 void init_iters ()
778 {
779 iter_input.init (this, false);
780 iter_context.init (this, true);
781 }
782
set_lookup_maskOT::hb_ot_apply_context_t783 void set_lookup_mask (hb_mask_t mask, bool init = true) { lookup_mask = mask; last_base = -1; last_base_until = 0; if (init) init_iters (); }
set_auto_zwjOT::hb_ot_apply_context_t784 void set_auto_zwj (bool auto_zwj_, bool init = true) { auto_zwj = auto_zwj_; if (init) init_iters (); }
set_auto_zwnjOT::hb_ot_apply_context_t785 void set_auto_zwnj (bool auto_zwnj_, bool init = true) { auto_zwnj = auto_zwnj_; if (init) init_iters (); }
set_per_syllableOT::hb_ot_apply_context_t786 void set_per_syllable (bool per_syllable_, bool init = true) { per_syllable = per_syllable_; if (init) init_iters (); }
set_randomOT::hb_ot_apply_context_t787 void set_random (bool random_) { random = random_; }
set_recurse_funcOT::hb_ot_apply_context_t788 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
set_lookup_indexOT::hb_ot_apply_context_t789 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
set_lookup_propsOT::hb_ot_apply_context_t790 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
791
random_numberOT::hb_ot_apply_context_t792 uint32_t random_number ()
793 {
794 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
795 buffer->random_state = buffer->random_state * 48271 % 2147483647;
796 return buffer->random_state;
797 }
798
match_properties_markOT::hb_ot_apply_context_t799 bool match_properties_mark (hb_codepoint_t glyph,
800 unsigned int glyph_props,
801 unsigned int match_props) const
802 {
803 /* If using mark filtering sets, the high short of
804 * match_props has the set index.
805 */
806 if (match_props & LookupFlag::UseMarkFilteringSet)
807 return gdef_accel.mark_set_covers (match_props >> 16, glyph);
808
809 /* The second byte of match_props has the meaning
810 * "ignore marks of attachment type different than
811 * the attachment type specified."
812 */
813 if (match_props & LookupFlag::MarkAttachmentType)
814 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
815
816 return true;
817 }
818
819 #ifndef HB_OPTIMIZE_SIZE
820 HB_ALWAYS_INLINE
821 #endif
check_glyph_propertyOT::hb_ot_apply_context_t822 bool check_glyph_property (const hb_glyph_info_t *info,
823 unsigned int match_props) const
824 {
825 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
826
827 /* Not covered, if, for example, glyph class is ligature and
828 * match_props includes LookupFlags::IgnoreLigatures
829 */
830 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
831 return false;
832
833 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
834 return match_properties_mark (info->codepoint, glyph_props, match_props);
835
836 return true;
837 }
838
_set_glyph_classOT::hb_ot_apply_context_t839 void _set_glyph_class (hb_codepoint_t glyph_index,
840 unsigned int class_guess = 0,
841 bool ligature = false,
842 bool component = false)
843 {
844 digest.add (glyph_index);
845
846 if (new_syllables != (unsigned) -1)
847 buffer->cur().syllable() = new_syllables;
848
849 unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
850 props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
851 if (ligature)
852 {
853 props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
854 /* In the only place that the MULTIPLIED bit is used, Uniscribe
855 * seems to only care about the "last" transformation between
856 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
857 * and ligate again, it forgives the multiplication and acts as
858 * if only ligation happened. As such, clear MULTIPLIED bit.
859 */
860 props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
861 }
862 if (component)
863 props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
864 if (likely (has_glyph_classes))
865 {
866 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
867 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef_accel.get_glyph_props (glyph_index));
868 }
869 else if (class_guess)
870 {
871 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
872 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess);
873 }
874 else
875 _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
876 }
877
replace_glyphOT::hb_ot_apply_context_t878 void replace_glyph (hb_codepoint_t glyph_index)
879 {
880 _set_glyph_class (glyph_index);
881 (void) buffer->replace_glyph (glyph_index);
882 }
replace_glyph_inplaceOT::hb_ot_apply_context_t883 void replace_glyph_inplace (hb_codepoint_t glyph_index)
884 {
885 _set_glyph_class (glyph_index);
886 buffer->cur().codepoint = glyph_index;
887 }
replace_glyph_with_ligatureOT::hb_ot_apply_context_t888 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
889 unsigned int class_guess)
890 {
891 _set_glyph_class (glyph_index, class_guess, true);
892 (void) buffer->replace_glyph (glyph_index);
893 }
output_glyph_for_componentOT::hb_ot_apply_context_t894 void output_glyph_for_component (hb_codepoint_t glyph_index,
895 unsigned int class_guess)
896 {
897 _set_glyph_class (glyph_index, class_guess, false, true);
898 (void) buffer->output_glyph (glyph_index);
899 }
900 };
901
902
903 struct hb_accelerate_subtables_context_t :
904 hb_dispatch_context_t<hb_accelerate_subtables_context_t>
905 {
906 template <typename Type>
apply_toOT::hb_accelerate_subtables_context_t907 static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c)
908 {
909 const Type *typed_obj = (const Type *) obj;
910 return typed_obj->apply (c);
911 }
912
913 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
914 template <typename T>
apply_cached_OT::hb_accelerate_subtables_context_t915 static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply_cached (c) )
916 template <typename T>
917 static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
918 template <typename Type>
919 static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c)
920 {
921 const Type *typed_obj = (const Type *) obj;
922 return apply_cached_ (typed_obj, c, hb_prioritize);
923 }
924
925 template <typename T>
cache_func_OT::hb_accelerate_subtables_context_t926 static inline auto cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
927 template <typename T>
928 static inline bool cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
929 template <typename Type>
cache_func_toOT::hb_accelerate_subtables_context_t930 static inline bool cache_func_to (const void *obj, hb_ot_apply_context_t *c, bool enter)
931 {
932 const Type *typed_obj = (const Type *) obj;
933 return cache_func_ (typed_obj, c, enter, hb_prioritize);
934 }
935 #endif
936
937 typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c);
938 typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter);
939
940 struct hb_applicable_t
941 {
942 friend struct hb_accelerate_subtables_context_t;
943 friend struct hb_ot_layout_lookup_accelerator_t;
944
945 template <typename T>
initOT::hb_accelerate_subtables_context_t::hb_applicable_t946 void init (const T &obj_,
947 hb_apply_func_t apply_func_
948 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
949 , hb_apply_func_t apply_cached_func_
950 , hb_cache_func_t cache_func_
951 #endif
952 )
953 {
954 obj = &obj_;
955 apply_func = apply_func_;
956 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
957 apply_cached_func = apply_cached_func_;
958 cache_func = cache_func_;
959 #endif
960 digest.init ();
961 obj_.get_coverage ().collect_coverage (&digest);
962 }
963
applyOT::hb_accelerate_subtables_context_t::hb_applicable_t964 bool apply (hb_ot_apply_context_t *c) const
965 {
966 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
967 }
968 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
apply_cachedOT::hb_accelerate_subtables_context_t::hb_applicable_t969 bool apply_cached (hb_ot_apply_context_t *c) const
970 {
971 return digest.may_have (c->buffer->cur().codepoint) && apply_cached_func (obj, c);
972 }
cache_enterOT::hb_accelerate_subtables_context_t::hb_applicable_t973 bool cache_enter (hb_ot_apply_context_t *c) const
974 {
975 return cache_func (obj, c, true);
976 }
cache_leaveOT::hb_accelerate_subtables_context_t::hb_applicable_t977 void cache_leave (hb_ot_apply_context_t *c) const
978 {
979 cache_func (obj, c, false);
980 }
981 #endif
982
983 private:
984 const void *obj;
985 hb_apply_func_t apply_func;
986 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
987 hb_apply_func_t apply_cached_func;
988 hb_cache_func_t cache_func;
989 #endif
990 hb_set_digest_t digest;
991 };
992
993 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
994 template <typename T>
cache_costOT::hb_accelerate_subtables_context_t995 auto cache_cost (const T &obj, hb_priority<1>) HB_AUTO_RETURN ( obj.cache_cost () )
996 template <typename T>
997 auto cache_cost (const T &obj, hb_priority<0>) HB_AUTO_RETURN ( 0u )
998 #endif
999
1000 /* Dispatch interface. */
1001 template <typename T>
1002 return_t dispatch (const T &obj)
1003 {
1004 hb_applicable_t *entry = &array[i++];
1005
1006 entry->init (obj,
1007 apply_to<T>
1008 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
1009 , apply_cached_to<T>
1010 , cache_func_to<T>
1011 #endif
1012 );
1013
1014 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
1015 /* Cache handling
1016 *
1017 * We allow one subtable from each lookup to use a cache. The assumption
1018 * being that multiple subtables of the same lookup cannot use a cache
1019 * because the resources they would use will collide. As such, we ask
1020 * each subtable to tell us how much it costs (which a cache would avoid),
1021 * and we allocate the cache opportunity to the costliest subtable.
1022 */
1023 unsigned cost = cache_cost (obj, hb_prioritize);
1024 if (cost > cache_user_cost)
1025 {
1026 cache_user_idx = i - 1;
1027 cache_user_cost = cost;
1028 }
1029 #endif
1030
1031 return hb_empty_t ();
1032 }
default_return_valueOT::hb_accelerate_subtables_context_t1033 static return_t default_return_value () { return hb_empty_t (); }
1034
hb_accelerate_subtables_context_tOT::hb_accelerate_subtables_context_t1035 hb_accelerate_subtables_context_t (hb_applicable_t *array_) :
1036 array (array_) {}
1037
1038 hb_applicable_t *array;
1039 unsigned i = 0;
1040
1041 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
1042 unsigned cache_user_idx = (unsigned) -1;
1043 unsigned cache_user_cost = 0;
1044 #endif
1045 };
1046
1047
1048 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data, void *cache);
1049 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache);
1050 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, unsigned value, const void *data);
1051 typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
1052
1053 struct ContextClosureFuncs
1054 {
1055 intersects_func_t intersects;
1056 intersected_glyphs_func_t intersected_glyphs;
1057 };
1058 struct ContextCollectGlyphsFuncs
1059 {
1060 collect_glyphs_func_t collect;
1061 };
1062 struct ContextApplyFuncs
1063 {
1064 match_func_t match;
1065 };
1066 struct ChainContextApplyFuncs
1067 {
1068 match_func_t match[3];
1069 };
1070
1071
intersects_glyph(const hb_set_t * glyphs,unsigned value,const void * data HB_UNUSED,void * cache HB_UNUSED)1072 static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED, void *cache HB_UNUSED)
1073 {
1074 return glyphs->has (value);
1075 }
intersects_class(const hb_set_t * glyphs,unsigned value,const void * data,void * cache)1076 static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data, void *cache)
1077 {
1078 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1079 hb_map_t *map = (hb_map_t *) cache;
1080
1081 hb_codepoint_t *cached_v;
1082 if (map->has (value, &cached_v))
1083 return *cached_v;
1084
1085 bool v = class_def.intersects_class (glyphs, value);
1086 map->set (value, v);
1087
1088 return v;
1089 }
intersects_coverage(const hb_set_t * glyphs,unsigned value,const void * data,void * cache HB_UNUSED)1090 static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data, void *cache HB_UNUSED)
1091 {
1092 Offset16To<Coverage> coverage;
1093 coverage = value;
1094 return (data+coverage).intersects (glyphs);
1095 }
1096
1097
intersected_glyph(const hb_set_t * glyphs HB_UNUSED,const void * data,unsigned value,hb_set_t * intersected_glyphs,HB_UNUSED void * cache)1098 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
1099 {
1100 unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
1101 intersected_glyphs->add (g);
1102 }
1103
1104 using intersected_class_cache_t = hb_hashmap_t<unsigned, hb_set_t>;
1105
intersected_class_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs,void * cache)1106 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache)
1107 {
1108 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1109
1110 intersected_class_cache_t *map = (intersected_class_cache_t *) cache;
1111
1112 hb_set_t *cached_v;
1113 if (map->has (value, &cached_v))
1114 {
1115 intersected_glyphs->union_ (*cached_v);
1116 return;
1117 }
1118
1119 hb_set_t v;
1120 class_def.intersected_class_glyphs (glyphs, value, &v);
1121
1122 intersected_glyphs->union_ (v);
1123
1124 map->set (value, std::move (v));
1125 }
1126
intersected_coverage_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs,HB_UNUSED void * cache)1127 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
1128 {
1129 Offset16To<Coverage> coverage;
1130 coverage = value;
1131 (data+coverage).intersect_set (*glyphs, *intersected_glyphs);
1132 }
1133
1134
1135 template <typename HBUINT>
array_is_subset_of(const hb_set_t * glyphs,unsigned int count,const HBUINT values[],intersects_func_t intersects_func,const void * intersects_data,void * cache)1136 static inline bool array_is_subset_of (const hb_set_t *glyphs,
1137 unsigned int count,
1138 const HBUINT values[],
1139 intersects_func_t intersects_func,
1140 const void *intersects_data,
1141 void *cache)
1142 {
1143 for (const auto &_ : + hb_iter (values, count))
1144 if (!intersects_func (glyphs, _, intersects_data, cache)) return false;
1145 return true;
1146 }
1147
1148
collect_glyph(hb_set_t * glyphs,unsigned value,const void * data HB_UNUSED)1149 static inline void collect_glyph (hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED)
1150 {
1151 glyphs->add (value);
1152 }
collect_class(hb_set_t * glyphs,unsigned value,const void * data)1153 static inline void collect_class (hb_set_t *glyphs, unsigned value, const void *data)
1154 {
1155 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1156 class_def.collect_class (glyphs, value);
1157 }
collect_coverage(hb_set_t * glyphs,unsigned value,const void * data)1158 static inline void collect_coverage (hb_set_t *glyphs, unsigned value, const void *data)
1159 {
1160 Offset16To<Coverage> coverage;
1161 coverage = value;
1162 (data+coverage).collect_coverage (glyphs);
1163 }
1164 template <typename HBUINT>
collect_array(hb_collect_glyphs_context_t * c HB_UNUSED,hb_set_t * glyphs,unsigned int count,const HBUINT values[],collect_glyphs_func_t collect_func,const void * collect_data)1165 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
1166 hb_set_t *glyphs,
1167 unsigned int count,
1168 const HBUINT values[],
1169 collect_glyphs_func_t collect_func,
1170 const void *collect_data)
1171 {
1172 return
1173 + hb_iter (values, count)
1174 | hb_apply ([&] (const HBUINT &_) { collect_func (glyphs, _, collect_data); })
1175 ;
1176 }
1177
1178
match_always(hb_glyph_info_t & info HB_UNUSED,unsigned value HB_UNUSED,const void * data HB_UNUSED)1179 static inline bool match_always (hb_glyph_info_t &info HB_UNUSED, unsigned value HB_UNUSED, const void *data HB_UNUSED)
1180 {
1181 return true;
1182 }
match_glyph(hb_glyph_info_t & info,unsigned value,const void * data HB_UNUSED)1183 static inline bool match_glyph (hb_glyph_info_t &info, unsigned value, const void *data HB_UNUSED)
1184 {
1185 return info.codepoint == value;
1186 }
match_class(hb_glyph_info_t & info,unsigned value,const void * data)1187 static inline bool match_class (hb_glyph_info_t &info, unsigned value, const void *data)
1188 {
1189 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1190 return class_def.get_class (info.codepoint) == value;
1191 }
match_class_cached(hb_glyph_info_t & info,unsigned value,const void * data)1192 static inline bool match_class_cached (hb_glyph_info_t &info, unsigned value, const void *data)
1193 {
1194 unsigned klass = info.syllable();
1195 if (klass < 255)
1196 return klass == value;
1197 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1198 klass = class_def.get_class (info.codepoint);
1199 if (likely (klass < 255))
1200 info.syllable() = klass;
1201 return klass == value;
1202 }
match_class_cached1(hb_glyph_info_t & info,unsigned value,const void * data)1203 static inline bool match_class_cached1 (hb_glyph_info_t &info, unsigned value, const void *data)
1204 {
1205 unsigned klass = info.syllable() & 0x0F;
1206 if (klass < 15)
1207 return klass == value;
1208 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1209 klass = class_def.get_class (info.codepoint);
1210 if (likely (klass < 15))
1211 info.syllable() = (info.syllable() & 0xF0) | klass;
1212 return klass == value;
1213 }
match_class_cached2(hb_glyph_info_t & info,unsigned value,const void * data)1214 static inline bool match_class_cached2 (hb_glyph_info_t &info, unsigned value, const void *data)
1215 {
1216 unsigned klass = (info.syllable() & 0xF0) >> 4;
1217 if (klass < 15)
1218 return klass == value;
1219 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1220 klass = class_def.get_class (info.codepoint);
1221 if (likely (klass < 15))
1222 info.syllable() = (info.syllable() & 0x0F) | (klass << 4);
1223 return klass == value;
1224 }
match_coverage(hb_glyph_info_t & info,unsigned value,const void * data)1225 static inline bool match_coverage (hb_glyph_info_t &info, unsigned value, const void *data)
1226 {
1227 Offset16To<Coverage> coverage;
1228 coverage = value;
1229 return (data+coverage).get_coverage (info.codepoint) != NOT_COVERED;
1230 }
1231
1232 template <typename HBUINT>
would_match_input(hb_would_apply_context_t * c,unsigned int count,const HBUINT input[],match_func_t match_func,const void * match_data)1233 static inline bool would_match_input (hb_would_apply_context_t *c,
1234 unsigned int count, /* Including the first glyph (not matched) */
1235 const HBUINT input[], /* Array of input values--start with second glyph */
1236 match_func_t match_func,
1237 const void *match_data)
1238 {
1239 if (count != c->len)
1240 return false;
1241
1242 for (unsigned int i = 1; i < count; i++)
1243 {
1244 hb_glyph_info_t info;
1245 info.codepoint = c->glyphs[i];
1246 if (likely (!match_func (info, input[i - 1], match_data)))
1247 return false;
1248 }
1249
1250 return true;
1251 }
1252 template <typename HBUINT>
1253 #ifndef HB_OPTIMIZE_SIZE
1254 HB_ALWAYS_INLINE
1255 #endif
match_input(hb_ot_apply_context_t * c,unsigned int count,const HBUINT input[],match_func_t match_func,const void * match_data,unsigned int * end_position,unsigned int * match_positions,unsigned int * p_total_component_count=nullptr)1256 static bool match_input (hb_ot_apply_context_t *c,
1257 unsigned int count, /* Including the first glyph (not matched) */
1258 const HBUINT input[], /* Array of input values--start with second glyph */
1259 match_func_t match_func,
1260 const void *match_data,
1261 unsigned int *end_position,
1262 unsigned int *match_positions,
1263 unsigned int *p_total_component_count = nullptr)
1264 {
1265 TRACE_APPLY (nullptr);
1266
1267 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
1268
1269 hb_buffer_t *buffer = c->buffer;
1270
1271 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1272 skippy_iter.reset (buffer->idx);
1273 skippy_iter.set_match_func (match_func, match_data);
1274 skippy_iter.set_glyph_data (input);
1275
1276 /*
1277 * This is perhaps the trickiest part of OpenType... Remarks:
1278 *
1279 * - If all components of the ligature were marks, we call this a mark ligature.
1280 *
1281 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
1282 * it as a ligature glyph.
1283 *
1284 * - Ligatures cannot be formed across glyphs attached to different components
1285 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
1286 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
1287 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
1288 * There are a couple of exceptions to this:
1289 *
1290 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
1291 * assuming that the font designer knows what they are doing (otherwise it can
1292 * break Indic stuff when a matra wants to ligate with a conjunct,
1293 *
1294 * o If two marks want to ligate and they belong to different components of the
1295 * same ligature glyph, and said ligature glyph is to be ignored according to
1296 * mark-filtering rules, then allow.
1297 * https://github.com/harfbuzz/harfbuzz/issues/545
1298 */
1299
1300 unsigned int total_component_count = 0;
1301
1302 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1303 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1304
1305 enum {
1306 LIGBASE_NOT_CHECKED,
1307 LIGBASE_MAY_NOT_SKIP,
1308 LIGBASE_MAY_SKIP
1309 } ligbase = LIGBASE_NOT_CHECKED;
1310
1311 for (unsigned int i = 1; i < count; i++)
1312 {
1313 unsigned unsafe_to;
1314 if (!skippy_iter.next (&unsafe_to))
1315 {
1316 *end_position = unsafe_to;
1317 return_trace (false);
1318 }
1319
1320 match_positions[i] = skippy_iter.idx;
1321
1322 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
1323 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
1324
1325 if (first_lig_id && first_lig_comp)
1326 {
1327 /* If first component was attached to a previous ligature component,
1328 * all subsequent components should be attached to the same ligature
1329 * component, otherwise we shouldn't ligate them... */
1330 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1331 {
1332 /* ...unless, we are attached to a base ligature and that base
1333 * ligature is ignorable. */
1334 if (ligbase == LIGBASE_NOT_CHECKED)
1335 {
1336 bool found = false;
1337 const auto *out = buffer->out_info;
1338 unsigned int j = buffer->out_len;
1339 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1340 {
1341 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1342 {
1343 j--;
1344 found = true;
1345 break;
1346 }
1347 j--;
1348 }
1349
1350 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1351 ligbase = LIGBASE_MAY_SKIP;
1352 else
1353 ligbase = LIGBASE_MAY_NOT_SKIP;
1354 }
1355
1356 if (ligbase == LIGBASE_MAY_NOT_SKIP)
1357 return_trace (false);
1358 }
1359 }
1360 else
1361 {
1362 /* If first component was NOT attached to a previous ligature component,
1363 * all subsequent components should also NOT be attached to any ligature
1364 * component, unless they are attached to the first component itself! */
1365 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1366 return_trace (false);
1367 }
1368
1369 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1370 }
1371
1372 *end_position = skippy_iter.idx + 1;
1373
1374 if (p_total_component_count)
1375 {
1376 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1377 *p_total_component_count = total_component_count;
1378 }
1379
1380 match_positions[0] = buffer->idx;
1381
1382 return_trace (true);
1383 }
ligate_input(hb_ot_apply_context_t * c,unsigned int count,const unsigned int * match_positions,unsigned int match_end,hb_codepoint_t lig_glyph,unsigned int total_component_count)1384 static inline bool ligate_input (hb_ot_apply_context_t *c,
1385 unsigned int count, /* Including the first glyph */
1386 const unsigned int *match_positions, /* Including the first glyph */
1387 unsigned int match_end,
1388 hb_codepoint_t lig_glyph,
1389 unsigned int total_component_count)
1390 {
1391 TRACE_APPLY (nullptr);
1392
1393 hb_buffer_t *buffer = c->buffer;
1394
1395 buffer->merge_clusters (buffer->idx, match_end);
1396
1397 /* - If a base and one or more marks ligate, consider that as a base, NOT
1398 * ligature, such that all following marks can still attach to it.
1399 * https://github.com/harfbuzz/harfbuzz/issues/1109
1400 *
1401 * - If all components of the ligature were marks, we call this a mark ligature.
1402 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1403 * the ligature to keep its old ligature id. This will allow it to attach to
1404 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1405 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1406 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1407 * later, we don't want them to lose their ligature id/component, otherwise
1408 * GPOS will fail to correctly position the mark ligature on top of the
1409 * LAM,LAM,HEH ligature. See:
1410 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1411 *
1412 * - If a ligature is formed of components that some of which are also ligatures
1413 * themselves, and those ligature components had marks attached to *their*
1414 * components, we have to attach the marks to the new ligature component
1415 * positions! Now *that*'s tricky! And these marks may be following the
1416 * last component of the whole sequence, so we should loop forward looking
1417 * for them and update them.
1418 *
1419 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1420 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1421 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1422 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1423 * the new ligature with a component value of 2.
1424 *
1425 * This in fact happened to a font... See:
1426 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1427 */
1428
1429 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1430 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1431 for (unsigned int i = 1; i < count; i++)
1432 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1433 {
1434 is_base_ligature = false;
1435 is_mark_ligature = false;
1436 break;
1437 }
1438 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1439
1440 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1441 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1442 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1443 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1444 unsigned int components_so_far = last_num_components;
1445
1446 if (is_ligature)
1447 {
1448 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1449 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1450 {
1451 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1452 }
1453 }
1454 c->replace_glyph_with_ligature (lig_glyph, klass);
1455
1456 for (unsigned int i = 1; i < count; i++)
1457 {
1458 while (buffer->idx < match_positions[i] && buffer->successful)
1459 {
1460 if (is_ligature)
1461 {
1462 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1463 if (this_comp == 0)
1464 this_comp = last_num_components;
1465 assert (components_so_far >= last_num_components);
1466 unsigned int new_lig_comp = components_so_far - last_num_components +
1467 hb_min (this_comp, last_num_components);
1468 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1469 }
1470 (void) buffer->next_glyph ();
1471 }
1472
1473 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1474 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1475 components_so_far += last_num_components;
1476
1477 /* Skip the base glyph */
1478 buffer->idx++;
1479 }
1480
1481 if (!is_mark_ligature && last_lig_id)
1482 {
1483 /* Re-adjust components for any marks following. */
1484 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1485 {
1486 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1487
1488 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1489 if (!this_comp) break;
1490
1491 assert (components_so_far >= last_num_components);
1492 unsigned new_lig_comp = components_so_far - last_num_components +
1493 hb_min (this_comp, last_num_components);
1494 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1495 }
1496 }
1497 return_trace (true);
1498 }
1499
1500 template <typename HBUINT>
1501 #ifndef HB_OPTIMIZE_SIZE
1502 HB_ALWAYS_INLINE
1503 #endif
match_backtrack(hb_ot_apply_context_t * c,unsigned int count,const HBUINT backtrack[],match_func_t match_func,const void * match_data,unsigned int * match_start)1504 static bool match_backtrack (hb_ot_apply_context_t *c,
1505 unsigned int count,
1506 const HBUINT backtrack[],
1507 match_func_t match_func,
1508 const void *match_data,
1509 unsigned int *match_start)
1510 {
1511 TRACE_APPLY (nullptr);
1512
1513 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1514 skippy_iter.reset (c->buffer->backtrack_len ());
1515 skippy_iter.set_match_func (match_func, match_data);
1516 skippy_iter.set_glyph_data (backtrack);
1517
1518 for (unsigned int i = 0; i < count; i++)
1519 {
1520 unsigned unsafe_from;
1521 if (!skippy_iter.prev (&unsafe_from))
1522 {
1523 *match_start = unsafe_from;
1524 return_trace (false);
1525 }
1526 }
1527
1528 *match_start = skippy_iter.idx;
1529 return_trace (true);
1530 }
1531
1532 template <typename HBUINT>
1533 #ifndef HB_OPTIMIZE_SIZE
1534 HB_ALWAYS_INLINE
1535 #endif
match_lookahead(hb_ot_apply_context_t * c,unsigned int count,const HBUINT lookahead[],match_func_t match_func,const void * match_data,unsigned int start_index,unsigned int * end_index)1536 static bool match_lookahead (hb_ot_apply_context_t *c,
1537 unsigned int count,
1538 const HBUINT lookahead[],
1539 match_func_t match_func,
1540 const void *match_data,
1541 unsigned int start_index,
1542 unsigned int *end_index)
1543 {
1544 TRACE_APPLY (nullptr);
1545
1546 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1547 assert (start_index >= 1);
1548 skippy_iter.reset (start_index - 1);
1549 skippy_iter.set_match_func (match_func, match_data);
1550 skippy_iter.set_glyph_data (lookahead);
1551
1552 for (unsigned int i = 0; i < count; i++)
1553 {
1554 unsigned unsafe_to;
1555 if (!skippy_iter.next (&unsafe_to))
1556 {
1557 *end_index = unsafe_to;
1558 return_trace (false);
1559 }
1560 }
1561
1562 *end_index = skippy_iter.idx + 1;
1563 return_trace (true);
1564 }
1565
1566
1567
1568 struct LookupRecord
1569 {
serializeOT::LookupRecord1570 bool serialize (hb_serialize_context_t *c,
1571 const hb_map_t *lookup_map) const
1572 {
1573 TRACE_SERIALIZE (this);
1574 auto *out = c->embed (*this);
1575 if (unlikely (!out)) return_trace (false);
1576
1577 return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
1578 }
1579
sanitizeOT::LookupRecord1580 bool sanitize (hb_sanitize_context_t *c) const
1581 {
1582 TRACE_SANITIZE (this);
1583 return_trace (c->check_struct (this));
1584 }
1585
1586 HBUINT16 sequenceIndex; /* Index into current glyph
1587 * sequence--first glyph = 0 */
1588 HBUINT16 lookupListIndex; /* Lookup to apply to that
1589 * position--zero--based */
1590 public:
1591 DEFINE_SIZE_STATIC (4);
1592 };
1593
serialize_lookuprecord_array(hb_serialize_context_t * c,const hb_array_t<const LookupRecord> lookupRecords,const hb_map_t * lookup_map)1594 static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
1595 const hb_array_t<const LookupRecord> lookupRecords,
1596 const hb_map_t *lookup_map)
1597 {
1598 unsigned count = 0;
1599 for (const LookupRecord& r : lookupRecords)
1600 {
1601 if (!lookup_map->has (r.lookupListIndex))
1602 continue;
1603
1604 if (!r.serialize (c, lookup_map))
1605 return 0;
1606
1607 count++;
1608 }
1609 return count;
1610 }
1611
1612 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1613
1614 template <typename HBUINT>
context_closure_recurse_lookups(hb_closure_context_t * c,unsigned inputCount,const HBUINT input[],unsigned lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextFormat context_format,const void * data,intersected_glyphs_func_t intersected_glyphs_func,void * cache)1615 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1616 unsigned inputCount, const HBUINT input[],
1617 unsigned lookupCount,
1618 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1619 unsigned value,
1620 ContextFormat context_format,
1621 const void *data,
1622 intersected_glyphs_func_t intersected_glyphs_func,
1623 void *cache)
1624 {
1625 hb_set_t covered_seq_indicies;
1626 hb_set_t pos_glyphs;
1627 for (unsigned int i = 0; i < lookupCount; i++)
1628 {
1629 unsigned seqIndex = lookupRecord[i].sequenceIndex;
1630 if (seqIndex >= inputCount) continue;
1631
1632 bool has_pos_glyphs = false;
1633
1634 if (!covered_seq_indicies.has (seqIndex))
1635 {
1636 has_pos_glyphs = true;
1637 pos_glyphs.clear ();
1638 if (seqIndex == 0)
1639 {
1640 switch (context_format) {
1641 case ContextFormat::SimpleContext:
1642 pos_glyphs.add (value);
1643 break;
1644 case ContextFormat::ClassBasedContext:
1645 intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs, cache);
1646 break;
1647 case ContextFormat::CoverageBasedContext:
1648 pos_glyphs.set (c->parent_active_glyphs ());
1649 break;
1650 }
1651 }
1652 else
1653 {
1654 const void *input_data = input;
1655 unsigned input_value = seqIndex - 1;
1656 if (context_format != ContextFormat::SimpleContext)
1657 {
1658 input_data = data;
1659 input_value = input[seqIndex - 1];
1660 }
1661
1662 intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs, cache);
1663 }
1664 }
1665
1666 covered_seq_indicies.add (seqIndex);
1667 hb_set_t *cur_active_glyphs = c->push_cur_active_glyphs ();
1668 if (unlikely (!cur_active_glyphs))
1669 return;
1670 if (has_pos_glyphs) {
1671 *cur_active_glyphs = std::move (pos_glyphs);
1672 } else {
1673 *cur_active_glyphs = *c->glyphs;
1674 }
1675
1676 unsigned endIndex = inputCount;
1677 if (context_format == ContextFormat::CoverageBasedContext)
1678 endIndex += 1;
1679
1680 c->recurse (lookupRecord[i].lookupListIndex, &covered_seq_indicies, seqIndex, endIndex);
1681
1682 c->pop_cur_done_glyphs ();
1683 }
1684 }
1685
1686 template <typename context_t>
recurse_lookups(context_t * c,unsigned int lookupCount,const LookupRecord lookupRecord[])1687 static inline void recurse_lookups (context_t *c,
1688 unsigned int lookupCount,
1689 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1690 {
1691 for (unsigned int i = 0; i < lookupCount; i++)
1692 c->recurse (lookupRecord[i].lookupListIndex);
1693 }
1694
apply_lookup(hb_ot_apply_context_t * c,unsigned int count,unsigned int * match_positions,unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned int match_end)1695 static inline void apply_lookup (hb_ot_apply_context_t *c,
1696 unsigned int count, /* Including the first glyph */
1697 unsigned int *match_positions, /* Including the first glyph */
1698 unsigned int lookupCount,
1699 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1700 unsigned int match_end)
1701 {
1702 hb_buffer_t *buffer = c->buffer;
1703 int end;
1704
1705 unsigned int *match_positions_input = match_positions;
1706 unsigned int match_positions_count = count;
1707
1708 /* All positions are distance from beginning of *output* buffer.
1709 * Adjust. */
1710 {
1711 unsigned int bl = buffer->backtrack_len ();
1712 end = bl + match_end - buffer->idx;
1713
1714 int delta = bl - buffer->idx;
1715 /* Convert positions to new indexing. */
1716 for (unsigned int j = 0; j < count; j++)
1717 match_positions[j] += delta;
1718 }
1719
1720 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1721 {
1722 unsigned int idx = lookupRecord[i].sequenceIndex;
1723 if (idx >= count)
1724 continue;
1725
1726 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1727
1728 /* This can happen if earlier recursed lookups deleted many entries. */
1729 if (unlikely (match_positions[idx] >= orig_len))
1730 continue;
1731
1732 if (unlikely (!buffer->move_to (match_positions[idx])))
1733 break;
1734
1735 if (unlikely (buffer->max_ops <= 0))
1736 break;
1737
1738 if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
1739 {
1740 if (buffer->have_output)
1741 c->buffer->sync_so_far ();
1742 c->buffer->message (c->font,
1743 "recursing to lookup %u at %u",
1744 (unsigned) lookupRecord[i].lookupListIndex,
1745 buffer->idx);
1746 }
1747
1748 if (!c->recurse (lookupRecord[i].lookupListIndex))
1749 continue;
1750
1751 if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
1752 {
1753 if (buffer->have_output)
1754 c->buffer->sync_so_far ();
1755 c->buffer->message (c->font,
1756 "recursed to lookup %u",
1757 (unsigned) lookupRecord[i].lookupListIndex);
1758 }
1759
1760 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1761 int delta = new_len - orig_len;
1762
1763 if (!delta)
1764 continue;
1765
1766 /* Recursed lookup changed buffer len. Adjust.
1767 *
1768 * TODO:
1769 *
1770 * Right now, if buffer length increased by n, we assume n new glyphs
1771 * were added right after the current position, and if buffer length
1772 * was decreased by n, we assume n match positions after the current
1773 * one where removed. The former (buffer length increased) case is
1774 * fine, but the decrease case can be improved in at least two ways,
1775 * both of which are significant:
1776 *
1777 * - If recursed-to lookup is MultipleSubst and buffer length
1778 * decreased, then it's current match position that was deleted,
1779 * NOT the one after it.
1780 *
1781 * - If buffer length was decreased by n, it does not necessarily
1782 * mean that n match positions where removed, as there recursed-to
1783 * lookup might had a different LookupFlag. Here's a constructed
1784 * case of that:
1785 * https://github.com/harfbuzz/harfbuzz/discussions/3538
1786 *
1787 * It should be possible to construct tests for both of these cases.
1788 */
1789
1790 end += delta;
1791 if (end < int (match_positions[idx]))
1792 {
1793 /* End might end up being smaller than match_positions[idx] if the recursed
1794 * lookup ended up removing many items.
1795 * Just never rewind end beyond start of current position, since that is
1796 * not possible in the recursed lookup. Also adjust delta as such.
1797 *
1798 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496
1799 * https://github.com/harfbuzz/harfbuzz/issues/1611
1800 */
1801 delta += match_positions[idx] - end;
1802 end = match_positions[idx];
1803 }
1804
1805 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1806
1807 if (delta > 0)
1808 {
1809 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1810 break;
1811 if (unlikely (delta + count > match_positions_count))
1812 {
1813 unsigned new_match_positions_count = hb_max (delta + count, hb_max(match_positions_count, 4u) * 1.5);
1814 if (match_positions == match_positions_input)
1815 {
1816 match_positions = (unsigned int *) hb_malloc (new_match_positions_count * sizeof (match_positions[0]));
1817 if (unlikely (!match_positions))
1818 break;
1819 memcpy (match_positions, match_positions_input, count * sizeof (match_positions[0]));
1820 match_positions_count = new_match_positions_count;
1821 }
1822 else
1823 {
1824 unsigned int *new_match_positions = (unsigned int *) hb_realloc (match_positions, new_match_positions_count * sizeof (match_positions[0]));
1825 if (unlikely (!new_match_positions))
1826 break;
1827 match_positions = new_match_positions;
1828 match_positions_count = new_match_positions_count;
1829 }
1830 }
1831
1832 }
1833 else
1834 {
1835 /* NOTE: delta is non-positive. */
1836 delta = hb_max (delta, (int) next - (int) count);
1837 next -= delta;
1838 }
1839
1840 /* Shift! */
1841 memmove (match_positions + next + delta, match_positions + next,
1842 (count - next) * sizeof (match_positions[0]));
1843 next += delta;
1844 count += delta;
1845
1846 /* Fill in new entries. */
1847 for (unsigned int j = idx + 1; j < next; j++)
1848 match_positions[j] = match_positions[j - 1] + 1;
1849
1850 /* And fixup the rest. */
1851 for (; next < count; next++)
1852 match_positions[next] += delta;
1853 }
1854
1855 if (match_positions != match_positions_input)
1856 hb_free (match_positions);
1857
1858 assert (end >= 0);
1859 (void) buffer->move_to (end);
1860 }
1861
1862
1863
1864 /* Contextual lookups */
1865
1866 struct ContextClosureLookupContext
1867 {
1868 ContextClosureFuncs funcs;
1869 ContextFormat context_format;
1870 const void *intersects_data;
1871 void *intersects_cache;
1872 void *intersected_glyphs_cache;
1873 };
1874
1875 struct ContextCollectGlyphsLookupContext
1876 {
1877 ContextCollectGlyphsFuncs funcs;
1878 const void *collect_data;
1879 };
1880
1881 struct ContextApplyLookupContext
1882 {
1883 ContextApplyFuncs funcs;
1884 const void *match_data;
1885 };
1886
1887 template <typename HBUINT>
context_intersects(const hb_set_t * glyphs,unsigned int inputCount,const HBUINT input[],ContextClosureLookupContext & lookup_context)1888 static inline bool context_intersects (const hb_set_t *glyphs,
1889 unsigned int inputCount, /* Including the first glyph (not matched) */
1890 const HBUINT input[], /* Array of input values--start with second glyph */
1891 ContextClosureLookupContext &lookup_context)
1892 {
1893 return array_is_subset_of (glyphs,
1894 inputCount ? inputCount - 1 : 0, input,
1895 lookup_context.funcs.intersects,
1896 lookup_context.intersects_data,
1897 lookup_context.intersects_cache);
1898 }
1899
1900 template <typename HBUINT>
context_closure_lookup(hb_closure_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextClosureLookupContext & lookup_context)1901 static inline void context_closure_lookup (hb_closure_context_t *c,
1902 unsigned int inputCount, /* Including the first glyph (not matched) */
1903 const HBUINT input[], /* Array of input values--start with second glyph */
1904 unsigned int lookupCount,
1905 const LookupRecord lookupRecord[],
1906 unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1907 ContextClosureLookupContext &lookup_context)
1908 {
1909 if (context_intersects (c->glyphs,
1910 inputCount, input,
1911 lookup_context))
1912 context_closure_recurse_lookups (c,
1913 inputCount, input,
1914 lookupCount, lookupRecord,
1915 value,
1916 lookup_context.context_format,
1917 lookup_context.intersects_data,
1918 lookup_context.funcs.intersected_glyphs,
1919 lookup_context.intersected_glyphs_cache);
1920 }
1921
1922 template <typename HBUINT>
context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextCollectGlyphsLookupContext & lookup_context)1923 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1924 unsigned int inputCount, /* Including the first glyph (not matched) */
1925 const HBUINT input[], /* Array of input values--start with second glyph */
1926 unsigned int lookupCount,
1927 const LookupRecord lookupRecord[],
1928 ContextCollectGlyphsLookupContext &lookup_context)
1929 {
1930 collect_array (c, c->input,
1931 inputCount ? inputCount - 1 : 0, input,
1932 lookup_context.funcs.collect, lookup_context.collect_data);
1933 recurse_lookups (c,
1934 lookupCount, lookupRecord);
1935 }
1936
1937 template <typename HBUINT>
context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,const ContextApplyLookupContext & lookup_context)1938 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1939 unsigned int inputCount, /* Including the first glyph (not matched) */
1940 const HBUINT input[], /* Array of input values--start with second glyph */
1941 unsigned int lookupCount HB_UNUSED,
1942 const LookupRecord lookupRecord[] HB_UNUSED,
1943 const ContextApplyLookupContext &lookup_context)
1944 {
1945 return would_match_input (c,
1946 inputCount, input,
1947 lookup_context.funcs.match, lookup_context.match_data);
1948 }
1949
1950 template <typename HBUINT>
1951 HB_ALWAYS_INLINE
context_apply_lookup(hb_ot_apply_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],const ContextApplyLookupContext & lookup_context)1952 static bool context_apply_lookup (hb_ot_apply_context_t *c,
1953 unsigned int inputCount, /* Including the first glyph (not matched) */
1954 const HBUINT input[], /* Array of input values--start with second glyph */
1955 unsigned int lookupCount,
1956 const LookupRecord lookupRecord[],
1957 const ContextApplyLookupContext &lookup_context)
1958 {
1959 if (unlikely (inputCount > HB_MAX_CONTEXT_LENGTH)) return false;
1960 unsigned match_positions_stack[4];
1961 unsigned *match_positions = match_positions_stack;
1962 if (unlikely (inputCount > ARRAY_LENGTH (match_positions_stack)))
1963 {
1964 match_positions = (unsigned *) hb_malloc (hb_max (inputCount, 1u) * sizeof (match_positions[0]));
1965 if (unlikely (!match_positions))
1966 return false;
1967 }
1968
1969 unsigned match_end = 0;
1970 bool ret = false;
1971 if (match_input (c,
1972 inputCount, input,
1973 lookup_context.funcs.match, lookup_context.match_data,
1974 &match_end, match_positions))
1975 {
1976 c->buffer->unsafe_to_break (c->buffer->idx, match_end);
1977 apply_lookup (c,
1978 inputCount, match_positions,
1979 lookupCount, lookupRecord,
1980 match_end);
1981 ret = true;
1982 }
1983 else
1984 {
1985 c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
1986 ret = false;
1987 }
1988
1989 if (unlikely (match_positions != match_positions_stack))
1990 hb_free (match_positions);
1991
1992 return ret;
1993 }
1994
1995 template <typename Types>
1996 struct Rule
1997 {
1998 template <typename T>
1999 friend struct RuleSet;
2000
intersectsOT::Rule2001 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
2002 {
2003 return context_intersects (glyphs,
2004 inputCount, inputZ.arrayZ,
2005 lookup_context);
2006 }
2007
closureOT::Rule2008 void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
2009 {
2010 if (unlikely (c->lookup_limit_exceeded ())) return;
2011
2012 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2013 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
2014 context_closure_lookup (c,
2015 inputCount, inputZ.arrayZ,
2016 lookupCount, lookupRecord.arrayZ,
2017 value, lookup_context);
2018 }
2019
closure_lookupsOT::Rule2020 void closure_lookups (hb_closure_lookups_context_t *c,
2021 ContextClosureLookupContext &lookup_context) const
2022 {
2023 if (unlikely (c->lookup_limit_exceeded ())) return;
2024 if (!intersects (c->glyphs, lookup_context)) return;
2025
2026 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2027 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
2028 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
2029 }
2030
collect_glyphsOT::Rule2031 void collect_glyphs (hb_collect_glyphs_context_t *c,
2032 ContextCollectGlyphsLookupContext &lookup_context) const
2033 {
2034 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2035 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
2036 context_collect_glyphs_lookup (c,
2037 inputCount, inputZ.arrayZ,
2038 lookupCount, lookupRecord.arrayZ,
2039 lookup_context);
2040 }
2041
would_applyOT::Rule2042 bool would_apply (hb_would_apply_context_t *c,
2043 const ContextApplyLookupContext &lookup_context) const
2044 {
2045 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2046 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
2047 return context_would_apply_lookup (c,
2048 inputCount, inputZ.arrayZ,
2049 lookupCount, lookupRecord.arrayZ,
2050 lookup_context);
2051 }
2052
applyOT::Rule2053 bool apply (hb_ot_apply_context_t *c,
2054 const ContextApplyLookupContext &lookup_context) const
2055 {
2056 TRACE_APPLY (this);
2057 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2058 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
2059 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
2060 }
2061
serializeOT::Rule2062 bool serialize (hb_serialize_context_t *c,
2063 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
2064 const hb_map_t *lookup_map) const
2065 {
2066 TRACE_SERIALIZE (this);
2067 auto *out = c->start_embed (this);
2068 if (unlikely (!c->extend_min (out))) return_trace (false);
2069
2070 out->inputCount = inputCount;
2071 const auto input = inputZ.as_array (inputCount - 1);
2072 for (const auto org : input)
2073 {
2074 HBUINT16 d;
2075 d = input_mapping->get (org);
2076 c->copy (d);
2077 }
2078
2079 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2080 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
2081
2082 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
2083 return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2084 }
2085
subsetOT::Rule2086 bool subset (hb_subset_context_t *c,
2087 const hb_map_t *lookup_map,
2088 const hb_map_t *klass_map = nullptr) const
2089 {
2090 TRACE_SUBSET (this);
2091 if (unlikely (!inputCount)) return_trace (false);
2092 const auto input = inputZ.as_array (inputCount - 1);
2093
2094 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
2095 if (!hb_all (input, mapping)) return_trace (false);
2096 return_trace (serialize (c->serializer, mapping, lookup_map));
2097 }
2098
2099 public:
sanitizeOT::Rule2100 bool sanitize (hb_sanitize_context_t *c) const
2101 {
2102 TRACE_SANITIZE (this);
2103 return_trace (c->check_struct (this) &&
2104 hb_barrier () &&
2105 c->check_range (inputZ.arrayZ,
2106 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
2107 LookupRecord::static_size * lookupCount));
2108 }
2109
2110 protected:
2111 HBUINT16 inputCount; /* Total number of glyphs in input
2112 * glyph sequence--includes the first
2113 * glyph */
2114 HBUINT16 lookupCount; /* Number of LookupRecords */
2115 UnsizedArrayOf<typename Types::HBUINT>
2116 inputZ; /* Array of match inputs--start with
2117 * second glyph */
2118 /*UnsizedArrayOf<LookupRecord>
2119 lookupRecordX;*/ /* Array of LookupRecords--in
2120 * design order */
2121 public:
2122 DEFINE_SIZE_ARRAY (4, inputZ);
2123 };
2124
2125 template <typename Types>
2126 struct RuleSet
2127 {
2128 using Rule = OT::Rule<Types>;
2129
intersectsOT::RuleSet2130 bool intersects (const hb_set_t *glyphs,
2131 ContextClosureLookupContext &lookup_context) const
2132 {
2133 return
2134 + hb_iter (rule)
2135 | hb_map (hb_add (this))
2136 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
2137 | hb_any
2138 ;
2139 }
2140
closureOT::RuleSet2141 void closure (hb_closure_context_t *c, unsigned value,
2142 ContextClosureLookupContext &lookup_context) const
2143 {
2144 if (unlikely (c->lookup_limit_exceeded ())) return;
2145
2146 return
2147 + hb_iter (rule)
2148 | hb_map (hb_add (this))
2149 | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
2150 ;
2151 }
2152
closure_lookupsOT::RuleSet2153 void closure_lookups (hb_closure_lookups_context_t *c,
2154 ContextClosureLookupContext &lookup_context) const
2155 {
2156 if (unlikely (c->lookup_limit_exceeded ())) return;
2157 + hb_iter (rule)
2158 | hb_map (hb_add (this))
2159 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
2160 ;
2161 }
2162
collect_glyphsOT::RuleSet2163 void collect_glyphs (hb_collect_glyphs_context_t *c,
2164 ContextCollectGlyphsLookupContext &lookup_context) const
2165 {
2166 return
2167 + hb_iter (rule)
2168 | hb_map (hb_add (this))
2169 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
2170 ;
2171 }
2172
would_applyOT::RuleSet2173 bool would_apply (hb_would_apply_context_t *c,
2174 const ContextApplyLookupContext &lookup_context) const
2175 {
2176 return
2177 + hb_iter (rule)
2178 | hb_map (hb_add (this))
2179 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
2180 | hb_any
2181 ;
2182 }
2183
applyOT::RuleSet2184 bool apply (hb_ot_apply_context_t *c,
2185 const ContextApplyLookupContext &lookup_context) const
2186 {
2187 TRACE_APPLY (this);
2188
2189 unsigned num_rules = rule.len;
2190
2191 #ifndef HB_NO_OT_RULESETS_FAST_PATH
2192 if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
2193 #endif
2194 {
2195 slow:
2196 return_trace (
2197 + hb_iter (rule)
2198 | hb_map (hb_add (this))
2199 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
2200 | hb_any
2201 )
2202 ;
2203 }
2204
2205 /* This version is optimized for speed by matching the first & second
2206 * components of the rule here, instead of calling into the matching code.
2207 *
2208 * Replicated from LigatureSet::apply(). */
2209
2210 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2211 skippy_iter.reset (c->buffer->idx);
2212 skippy_iter.set_match_func (match_always, nullptr);
2213 skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
2214 unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
2215 hb_glyph_info_t *first = nullptr, *second = nullptr;
2216 bool matched = skippy_iter.next ();
2217 if (likely (matched))
2218 {
2219 first = &c->buffer->info[skippy_iter.idx];
2220 unsafe_to = skippy_iter.idx + 1;
2221
2222 if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
2223 {
2224 /* Can't use the fast path if eg. the next char is a default-ignorable
2225 * or other skippable. */
2226 goto slow;
2227 }
2228 }
2229 else
2230 {
2231 /* Failed to match a next glyph. Only try applying rules that have
2232 * no further input. */
2233 return_trace (
2234 + hb_iter (rule)
2235 | hb_map (hb_add (this))
2236 | hb_filter ([&] (const Rule &_) { return _.inputCount <= 1; })
2237 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
2238 | hb_any
2239 )
2240 ;
2241 }
2242 matched = skippy_iter.next ();
2243 if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])))
2244 {
2245 second = &c->buffer->info[skippy_iter.idx];
2246 unsafe_to2 = skippy_iter.idx + 1;
2247 }
2248
2249 auto match_input = lookup_context.funcs.match;
2250 auto *input_data = lookup_context.match_data;
2251 for (unsigned int i = 0; i < num_rules; i++)
2252 {
2253 const auto &r = this+rule.arrayZ[i];
2254
2255 const auto &input = r.inputZ;
2256
2257 if (r.inputCount <= 1 ||
2258 (!match_input ||
2259 match_input (*first, input.arrayZ[0], input_data)))
2260 {
2261 if (!second ||
2262 (r.inputCount <= 2 ||
2263 (!match_input ||
2264 match_input (*second, input.arrayZ[1], input_data)))
2265 )
2266 {
2267 if (r.apply (c, lookup_context))
2268 {
2269 if (unsafe_to != (unsigned) -1)
2270 c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
2271 return_trace (true);
2272 }
2273 }
2274 else
2275 unsafe_to = unsafe_to2;
2276 }
2277 else
2278 {
2279 if (unsafe_to == (unsigned) -1)
2280 unsafe_to = unsafe_to1;
2281 }
2282 }
2283 if (likely (unsafe_to != (unsigned) -1))
2284 c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
2285
2286 return_trace (false);
2287 }
2288
subsetOT::RuleSet2289 bool subset (hb_subset_context_t *c,
2290 const hb_map_t *lookup_map,
2291 const hb_map_t *klass_map = nullptr) const
2292 {
2293 TRACE_SUBSET (this);
2294
2295 auto snap = c->serializer->snapshot ();
2296 auto *out = c->serializer->start_embed (*this);
2297 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2298
2299 for (const Offset16To<Rule>& _ : rule)
2300 {
2301 if (!_) continue;
2302 auto o_snap = c->serializer->snapshot ();
2303 auto *o = out->rule.serialize_append (c->serializer);
2304 if (unlikely (!o)) continue;
2305
2306 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
2307 {
2308 out->rule.pop ();
2309 c->serializer->revert (o_snap);
2310 }
2311 }
2312
2313 bool ret = bool (out->rule);
2314 if (!ret) c->serializer->revert (snap);
2315
2316 return_trace (ret);
2317 }
2318
sanitizeOT::RuleSet2319 bool sanitize (hb_sanitize_context_t *c) const
2320 {
2321 TRACE_SANITIZE (this);
2322 return_trace (rule.sanitize (c, this));
2323 }
2324
2325 protected:
2326 Array16OfOffset16To<Rule>
2327 rule; /* Array of Rule tables
2328 * ordered by preference */
2329 public:
2330 DEFINE_SIZE_ARRAY (2, rule);
2331 };
2332
2333
2334 template <typename Types>
2335 struct ContextFormat1_4
2336 {
2337 using RuleSet = OT::RuleSet<Types>;
2338
intersectsOT::ContextFormat1_42339 bool intersects (const hb_set_t *glyphs) const
2340 {
2341 struct ContextClosureLookupContext lookup_context = {
2342 {intersects_glyph, intersected_glyph},
2343 ContextFormat::SimpleContext,
2344 nullptr
2345 };
2346
2347 return
2348 + hb_zip (this+coverage, ruleSet)
2349 | hb_filter (*glyphs, hb_first)
2350 | hb_map (hb_second)
2351 | hb_map (hb_add (this))
2352 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
2353 | hb_any
2354 ;
2355 }
2356
may_have_non_1to1OT::ContextFormat1_42357 bool may_have_non_1to1 () const
2358 { return true; }
2359
closureOT::ContextFormat1_42360 void closure (hb_closure_context_t *c) const
2361 {
2362 hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
2363 if (unlikely (!cur_active_glyphs)) return;
2364 get_coverage ().intersect_set (c->previous_parent_active_glyphs (), *cur_active_glyphs);
2365
2366 struct ContextClosureLookupContext lookup_context = {
2367 {intersects_glyph, intersected_glyph},
2368 ContextFormat::SimpleContext,
2369 nullptr
2370 };
2371
2372 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2373 | hb_filter ([&] (hb_codepoint_t _) {
2374 return c->previous_parent_active_glyphs ().has (_);
2375 }, hb_first)
2376 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
2377 | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2378 ;
2379
2380 c->pop_cur_done_glyphs ();
2381 }
2382
closure_lookupsOT::ContextFormat1_42383 void closure_lookups (hb_closure_lookups_context_t *c) const
2384 {
2385 struct ContextClosureLookupContext lookup_context = {
2386 {intersects_glyph, nullptr},
2387 ContextFormat::SimpleContext,
2388 nullptr
2389 };
2390
2391 + hb_zip (this+coverage, ruleSet)
2392 | hb_filter (*c->glyphs, hb_first)
2393 | hb_map (hb_second)
2394 | hb_map (hb_add (this))
2395 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
2396 ;
2397 }
2398
collect_variation_indicesOT::ContextFormat1_42399 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2400
collect_glyphsOT::ContextFormat1_42401 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2402 {
2403 (this+coverage).collect_coverage (c->input);
2404
2405 struct ContextCollectGlyphsLookupContext lookup_context = {
2406 {collect_glyph},
2407 nullptr
2408 };
2409
2410 + hb_iter (ruleSet)
2411 | hb_map (hb_add (this))
2412 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2413 ;
2414 }
2415
would_applyOT::ContextFormat1_42416 bool would_apply (hb_would_apply_context_t *c) const
2417 {
2418 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2419 struct ContextApplyLookupContext lookup_context = {
2420 {match_glyph},
2421 nullptr
2422 };
2423 return rule_set.would_apply (c, lookup_context);
2424 }
2425
get_coverageOT::ContextFormat1_42426 const Coverage &get_coverage () const { return this+coverage; }
2427
applyOT::ContextFormat1_42428 bool apply (hb_ot_apply_context_t *c) const
2429 {
2430 TRACE_APPLY (this);
2431 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2432 if (likely (index == NOT_COVERED))
2433 return_trace (false);
2434
2435 const RuleSet &rule_set = this+ruleSet[index];
2436 struct ContextApplyLookupContext lookup_context = {
2437 {match_glyph},
2438 nullptr
2439 };
2440 return_trace (rule_set.apply (c, lookup_context));
2441 }
2442
subsetOT::ContextFormat1_42443 bool subset (hb_subset_context_t *c) const
2444 {
2445 TRACE_SUBSET (this);
2446 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2447 const hb_map_t &glyph_map = *c->plan->glyph_map;
2448
2449 auto *out = c->serializer->start_embed (*this);
2450 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2451 out->format = format;
2452
2453 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
2454 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2455 + hb_zip (this+coverage, ruleSet)
2456 | hb_filter (glyphset, hb_first)
2457 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2458 | hb_map (hb_first)
2459 | hb_map (glyph_map)
2460 | hb_sink (new_coverage)
2461 ;
2462
2463 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
2464 return_trace (bool (new_coverage));
2465 }
2466
sanitizeOT::ContextFormat1_42467 bool sanitize (hb_sanitize_context_t *c) const
2468 {
2469 TRACE_SANITIZE (this);
2470 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2471 }
2472
2473 protected:
2474 HBUINT16 format; /* Format identifier--format = 1 */
2475 typename Types::template OffsetTo<Coverage>
2476 coverage; /* Offset to Coverage table--from
2477 * beginning of table */
2478 Array16Of<typename Types::template OffsetTo<RuleSet>>
2479 ruleSet; /* Array of RuleSet tables
2480 * ordered by Coverage Index */
2481 public:
2482 DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
2483 };
2484
2485
2486 template <typename Types>
2487 struct ContextFormat2_5
2488 {
2489 using RuleSet = OT::RuleSet<SmallTypes>;
2490
intersectsOT::ContextFormat2_52491 bool intersects (const hb_set_t *glyphs) const
2492 {
2493 if (!(this+coverage).intersects (glyphs))
2494 return false;
2495
2496 const ClassDef &class_def = this+classDef;
2497
2498 hb_map_t cache;
2499 struct ContextClosureLookupContext lookup_context = {
2500 {intersects_class, nullptr},
2501 ContextFormat::ClassBasedContext,
2502 &class_def,
2503 &cache
2504 };
2505
2506 hb_set_t retained_coverage_glyphs;
2507 (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
2508
2509 hb_set_t coverage_glyph_classes;
2510 class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2511
2512
2513 return
2514 + hb_iter (ruleSet)
2515 | hb_map (hb_add (this))
2516 | hb_enumerate
2517 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2518 { return class_def.intersects_class (glyphs, p.first) &&
2519 coverage_glyph_classes.has (p.first) &&
2520 p.second.intersects (glyphs, lookup_context); })
2521 | hb_any
2522 ;
2523 }
2524
may_have_non_1to1OT::ContextFormat2_52525 bool may_have_non_1to1 () const
2526 { return true; }
2527
closureOT::ContextFormat2_52528 void closure (hb_closure_context_t *c) const
2529 {
2530 if (!(this+coverage).intersects (c->glyphs))
2531 return;
2532
2533 hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
2534 if (unlikely (!cur_active_glyphs)) return;
2535 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
2536 *cur_active_glyphs);
2537
2538 const ClassDef &class_def = this+classDef;
2539
2540 hb_map_t cache;
2541 intersected_class_cache_t intersected_cache;
2542 struct ContextClosureLookupContext lookup_context = {
2543 {intersects_class, intersected_class_glyphs},
2544 ContextFormat::ClassBasedContext,
2545 &class_def,
2546 &cache,
2547 &intersected_cache
2548 };
2549
2550 + hb_enumerate (ruleSet)
2551 | hb_filter ([&] (unsigned _)
2552 { return class_def.intersects_class (&c->parent_active_glyphs (), _); },
2553 hb_first)
2554 | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<RuleSet>&> _)
2555 {
2556 const RuleSet& rule_set = this+_.second;
2557 rule_set.closure (c, _.first, lookup_context);
2558 })
2559 ;
2560
2561 c->pop_cur_done_glyphs ();
2562 }
2563
closure_lookupsOT::ContextFormat2_52564 void closure_lookups (hb_closure_lookups_context_t *c) const
2565 {
2566 if (!(this+coverage).intersects (c->glyphs))
2567 return;
2568
2569 const ClassDef &class_def = this+classDef;
2570
2571 hb_map_t cache;
2572 struct ContextClosureLookupContext lookup_context = {
2573 {intersects_class, nullptr},
2574 ContextFormat::ClassBasedContext,
2575 &class_def,
2576 &cache
2577 };
2578
2579 + hb_iter (ruleSet)
2580 | hb_map (hb_add (this))
2581 | hb_enumerate
2582 | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2583 { return class_def.intersects_class (c->glyphs, p.first); })
2584 | hb_map (hb_second)
2585 | hb_apply ([&] (const RuleSet & _)
2586 { _.closure_lookups (c, lookup_context); });
2587 }
2588
collect_variation_indicesOT::ContextFormat2_52589 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2590
collect_glyphsOT::ContextFormat2_52591 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2592 {
2593 (this+coverage).collect_coverage (c->input);
2594
2595 const ClassDef &class_def = this+classDef;
2596 struct ContextCollectGlyphsLookupContext lookup_context = {
2597 {collect_class},
2598 &class_def
2599 };
2600
2601 + hb_iter (ruleSet)
2602 | hb_map (hb_add (this))
2603 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2604 ;
2605 }
2606
would_applyOT::ContextFormat2_52607 bool would_apply (hb_would_apply_context_t *c) const
2608 {
2609 const ClassDef &class_def = this+classDef;
2610 unsigned int index = class_def.get_class (c->glyphs[0]);
2611 const RuleSet &rule_set = this+ruleSet[index];
2612 struct ContextApplyLookupContext lookup_context = {
2613 {match_class},
2614 &class_def
2615 };
2616 return rule_set.would_apply (c, lookup_context);
2617 }
2618
get_coverageOT::ContextFormat2_52619 const Coverage &get_coverage () const { return this+coverage; }
2620
cache_costOT::ContextFormat2_52621 unsigned cache_cost () const
2622 {
2623 unsigned c = (this+classDef).cost () * ruleSet.len;
2624 return c >= 4 ? c : 0;
2625 }
cache_funcOT::ContextFormat2_52626 bool cache_func (hb_ot_apply_context_t *c, bool enter) const
2627 {
2628 if (enter)
2629 {
2630 if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
2631 return false;
2632 auto &info = c->buffer->info;
2633 unsigned count = c->buffer->len;
2634 for (unsigned i = 0; i < count; i++)
2635 info[i].syllable() = 255;
2636 c->new_syllables = 255;
2637 return true;
2638 }
2639 else
2640 {
2641 c->new_syllables = (unsigned) -1;
2642 HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
2643 return true;
2644 }
2645 }
2646
apply_cachedOT::ContextFormat2_52647 bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); }
applyOT::ContextFormat2_52648 bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); }
_applyOT::ContextFormat2_52649 bool _apply (hb_ot_apply_context_t *c, bool cached) const
2650 {
2651 TRACE_APPLY (this);
2652 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2653 if (likely (index == NOT_COVERED)) return_trace (false);
2654
2655 const ClassDef &class_def = this+classDef;
2656
2657 struct ContextApplyLookupContext lookup_context = {
2658 {cached ? match_class_cached : match_class},
2659 &class_def
2660 };
2661
2662 if (cached && c->buffer->cur().syllable() < 255)
2663 index = c->buffer->cur().syllable ();
2664 else
2665 index = class_def.get_class (c->buffer->cur().codepoint);
2666 const RuleSet &rule_set = this+ruleSet[index];
2667 return_trace (rule_set.apply (c, lookup_context));
2668 }
2669
subsetOT::ContextFormat2_52670 bool subset (hb_subset_context_t *c) const
2671 {
2672 TRACE_SUBSET (this);
2673 auto *out = c->serializer->start_embed (*this);
2674 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2675 out->format = format;
2676 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2677 return_trace (false);
2678
2679 hb_map_t klass_map;
2680 out->classDef.serialize_subset (c, classDef, this, &klass_map);
2681
2682 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
2683 hb_set_t retained_coverage_glyphs;
2684 (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
2685
2686 hb_set_t coverage_glyph_classes;
2687 (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2688
2689 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
2690 bool ret = true;
2691 int non_zero_index = -1, index = 0;
2692 auto snapshot = c->serializer->snapshot();
2693 for (const auto& _ : + hb_enumerate (ruleSet)
2694 | hb_filter (klass_map, hb_first))
2695 {
2696 auto *o = out->ruleSet.serialize_append (c->serializer);
2697 if (unlikely (!o))
2698 {
2699 ret = false;
2700 break;
2701 }
2702
2703 if (coverage_glyph_classes.has (_.first) &&
2704 o->serialize_subset (c, _.second, this, lookup_map, &klass_map)) {
2705 non_zero_index = index;
2706 snapshot = c->serializer->snapshot();
2707 }
2708
2709 index++;
2710 }
2711
2712 if (!ret || non_zero_index == -1) return_trace (false);
2713
2714 //prune empty trailing ruleSets
2715 --index;
2716 while (index > non_zero_index)
2717 {
2718 out->ruleSet.pop ();
2719 index--;
2720 }
2721 c->serializer->revert (snapshot);
2722
2723 return_trace (bool (out->ruleSet));
2724 }
2725
sanitizeOT::ContextFormat2_52726 bool sanitize (hb_sanitize_context_t *c) const
2727 {
2728 TRACE_SANITIZE (this);
2729 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2730 }
2731
2732 protected:
2733 HBUINT16 format; /* Format identifier--format = 2 */
2734 typename Types::template OffsetTo<Coverage>
2735 coverage; /* Offset to Coverage table--from
2736 * beginning of table */
2737 typename Types::template OffsetTo<ClassDef>
2738 classDef; /* Offset to glyph ClassDef table--from
2739 * beginning of table */
2740 Array16Of<typename Types::template OffsetTo<RuleSet>>
2741 ruleSet; /* Array of RuleSet tables
2742 * ordered by class */
2743 public:
2744 DEFINE_SIZE_ARRAY (4 + 2 * Types::size, ruleSet);
2745 };
2746
2747
2748 struct ContextFormat3
2749 {
2750 using RuleSet = OT::RuleSet<SmallTypes>;
2751
intersectsOT::ContextFormat32752 bool intersects (const hb_set_t *glyphs) const
2753 {
2754 if (!(this+coverageZ[0]).intersects (glyphs))
2755 return false;
2756
2757 struct ContextClosureLookupContext lookup_context = {
2758 {intersects_coverage, nullptr},
2759 ContextFormat::CoverageBasedContext,
2760 this
2761 };
2762 return context_intersects (glyphs,
2763 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2764 lookup_context);
2765 }
2766
may_have_non_1to1OT::ContextFormat32767 bool may_have_non_1to1 () const
2768 { return true; }
2769
closureOT::ContextFormat32770 void closure (hb_closure_context_t *c) const
2771 {
2772 if (!(this+coverageZ[0]).intersects (c->glyphs))
2773 return;
2774
2775 hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
2776 if (unlikely (!cur_active_glyphs)) return;
2777 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
2778 *cur_active_glyphs);
2779
2780 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2781 struct ContextClosureLookupContext lookup_context = {
2782 {intersects_coverage, intersected_coverage_glyphs},
2783 ContextFormat::CoverageBasedContext,
2784 this
2785 };
2786 context_closure_lookup (c,
2787 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2788 lookupCount, lookupRecord,
2789 0, lookup_context);
2790
2791 c->pop_cur_done_glyphs ();
2792 }
2793
closure_lookupsOT::ContextFormat32794 void closure_lookups (hb_closure_lookups_context_t *c) const
2795 {
2796 if (!intersects (c->glyphs))
2797 return;
2798 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2799 recurse_lookups (c, lookupCount, lookupRecord);
2800 }
2801
collect_variation_indicesOT::ContextFormat32802 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2803
collect_glyphsOT::ContextFormat32804 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2805 {
2806 (this+coverageZ[0]).collect_coverage (c->input);
2807
2808 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2809 struct ContextCollectGlyphsLookupContext lookup_context = {
2810 {collect_coverage},
2811 this
2812 };
2813
2814 context_collect_glyphs_lookup (c,
2815 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2816 lookupCount, lookupRecord,
2817 lookup_context);
2818 }
2819
would_applyOT::ContextFormat32820 bool would_apply (hb_would_apply_context_t *c) const
2821 {
2822 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2823 struct ContextApplyLookupContext lookup_context = {
2824 {match_coverage},
2825 this
2826 };
2827 return context_would_apply_lookup (c,
2828 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2829 lookupCount, lookupRecord,
2830 lookup_context);
2831 }
2832
get_coverageOT::ContextFormat32833 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2834
applyOT::ContextFormat32835 bool apply (hb_ot_apply_context_t *c) const
2836 {
2837 TRACE_APPLY (this);
2838 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2839 if (likely (index == NOT_COVERED)) return_trace (false);
2840
2841 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2842 struct ContextApplyLookupContext lookup_context = {
2843 {match_coverage},
2844 this
2845 };
2846 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2847 }
2848
subsetOT::ContextFormat32849 bool subset (hb_subset_context_t *c) const
2850 {
2851 TRACE_SUBSET (this);
2852 auto *out = c->serializer->start_embed (this);
2853 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2854
2855 out->format = format;
2856 out->glyphCount = glyphCount;
2857
2858 auto coverages = coverageZ.as_array (glyphCount);
2859
2860 for (const Offset16To<Coverage>& offset : coverages)
2861 {
2862 /* TODO(subset) This looks like should not be necessary to write this way. */
2863 auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2864 if (unlikely (!o)) return_trace (false);
2865 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2866 }
2867
2868 const auto& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
2869 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
2870
2871
2872 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
2873 return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2874 }
2875
sanitizeOT::ContextFormat32876 bool sanitize (hb_sanitize_context_t *c) const
2877 {
2878 TRACE_SANITIZE (this);
2879 if (unlikely (!c->check_struct (this))) return_trace (false);
2880 hb_barrier ();
2881 unsigned int count = glyphCount;
2882 if (unlikely (!count)) return_trace (false); /* We want to access coverageZ[0] freely. */
2883 if (unlikely (!c->check_array (coverageZ.arrayZ, count))) return_trace (false);
2884 for (unsigned int i = 0; i < count; i++)
2885 if (unlikely (!coverageZ[i].sanitize (c, this))) return_trace (false);
2886 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2887 return_trace (likely (c->check_array (lookupRecord, lookupCount)));
2888 }
2889
2890 protected:
2891 HBUINT16 format; /* Format identifier--format = 3 */
2892 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2893 * sequence */
2894 HBUINT16 lookupCount; /* Number of LookupRecords */
2895 UnsizedArrayOf<Offset16To<Coverage>>
2896 coverageZ; /* Array of offsets to Coverage
2897 * table in glyph sequence order */
2898 /*UnsizedArrayOf<LookupRecord>
2899 lookupRecordX;*/ /* Array of LookupRecords--in
2900 * design order */
2901 public:
2902 DEFINE_SIZE_ARRAY (6, coverageZ);
2903 };
2904
2905 struct Context
2906 {
2907 template <typename context_t, typename ...Ts>
dispatchOT::Context2908 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2909 {
2910 if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
2911 TRACE_DISPATCH (this, u.format);
2912 switch (u.format) {
2913 case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2914 case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
2915 case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2916 #ifndef HB_NO_BEYOND_64K
2917 case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
2918 case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
2919 #endif
2920 default:return_trace (c->default_return_value ());
2921 }
2922 }
2923
2924 protected:
2925 union {
2926 HBUINT16 format; /* Format identifier */
2927 ContextFormat1_4<SmallTypes> format1;
2928 ContextFormat2_5<SmallTypes> format2;
2929 ContextFormat3 format3;
2930 #ifndef HB_NO_BEYOND_64K
2931 ContextFormat1_4<MediumTypes> format4;
2932 ContextFormat2_5<MediumTypes> format5;
2933 #endif
2934 } u;
2935 };
2936
2937
2938 /* Chaining Contextual lookups */
2939
2940 struct ChainContextClosureLookupContext
2941 {
2942 ContextClosureFuncs funcs;
2943 ContextFormat context_format;
2944 const void *intersects_data[3];
2945 void *intersects_cache[3];
2946 void *intersected_glyphs_cache;
2947 };
2948
2949 struct ChainContextCollectGlyphsLookupContext
2950 {
2951 ContextCollectGlyphsFuncs funcs;
2952 const void *collect_data[3];
2953 };
2954
2955 struct ChainContextApplyLookupContext
2956 {
2957 ChainContextApplyFuncs funcs;
2958 const void *match_data[3];
2959 };
2960
2961 template <typename HBUINT>
chain_context_intersects(const hb_set_t * glyphs,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],ChainContextClosureLookupContext & lookup_context)2962 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2963 unsigned int backtrackCount,
2964 const HBUINT backtrack[],
2965 unsigned int inputCount, /* Including the first glyph (not matched) */
2966 const HBUINT input[], /* Array of input values--start with second glyph */
2967 unsigned int lookaheadCount,
2968 const HBUINT lookahead[],
2969 ChainContextClosureLookupContext &lookup_context)
2970 {
2971 return array_is_subset_of (glyphs,
2972 backtrackCount, backtrack,
2973 lookup_context.funcs.intersects,
2974 lookup_context.intersects_data[0],
2975 lookup_context.intersects_cache[0])
2976 && array_is_subset_of (glyphs,
2977 inputCount ? inputCount - 1 : 0, input,
2978 lookup_context.funcs.intersects,
2979 lookup_context.intersects_data[1],
2980 lookup_context.intersects_cache[1])
2981 && array_is_subset_of (glyphs,
2982 lookaheadCount, lookahead,
2983 lookup_context.funcs.intersects,
2984 lookup_context.intersects_data[2],
2985 lookup_context.intersects_cache[2]);
2986 }
2987
2988 template <typename HBUINT>
chain_context_closure_lookup(hb_closure_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ChainContextClosureLookupContext & lookup_context)2989 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2990 unsigned int backtrackCount,
2991 const HBUINT backtrack[],
2992 unsigned int inputCount, /* Including the first glyph (not matched) */
2993 const HBUINT input[], /* Array of input values--start with second glyph */
2994 unsigned int lookaheadCount,
2995 const HBUINT lookahead[],
2996 unsigned int lookupCount,
2997 const LookupRecord lookupRecord[],
2998 unsigned value,
2999 ChainContextClosureLookupContext &lookup_context)
3000 {
3001 if (chain_context_intersects (c->glyphs,
3002 backtrackCount, backtrack,
3003 inputCount, input,
3004 lookaheadCount, lookahead,
3005 lookup_context))
3006 context_closure_recurse_lookups (c,
3007 inputCount, input,
3008 lookupCount, lookupRecord,
3009 value,
3010 lookup_context.context_format,
3011 lookup_context.intersects_data[1],
3012 lookup_context.funcs.intersected_glyphs,
3013 lookup_context.intersected_glyphs_cache);
3014 }
3015
3016 template <typename HBUINT>
chain_context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextCollectGlyphsLookupContext & lookup_context)3017 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
3018 unsigned int backtrackCount,
3019 const HBUINT backtrack[],
3020 unsigned int inputCount, /* Including the first glyph (not matched) */
3021 const HBUINT input[], /* Array of input values--start with second glyph */
3022 unsigned int lookaheadCount,
3023 const HBUINT lookahead[],
3024 unsigned int lookupCount,
3025 const LookupRecord lookupRecord[],
3026 ChainContextCollectGlyphsLookupContext &lookup_context)
3027 {
3028 collect_array (c, c->before,
3029 backtrackCount, backtrack,
3030 lookup_context.funcs.collect, lookup_context.collect_data[0]);
3031 collect_array (c, c->input,
3032 inputCount ? inputCount - 1 : 0, input,
3033 lookup_context.funcs.collect, lookup_context.collect_data[1]);
3034 collect_array (c, c->after,
3035 lookaheadCount, lookahead,
3036 lookup_context.funcs.collect, lookup_context.collect_data[2]);
3037 recurse_lookups (c,
3038 lookupCount, lookupRecord);
3039 }
3040
3041 template <typename HBUINT>
chain_context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[]HB_UNUSED,unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[]HB_UNUSED,unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,const ChainContextApplyLookupContext & lookup_context)3042 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
3043 unsigned int backtrackCount,
3044 const HBUINT backtrack[] HB_UNUSED,
3045 unsigned int inputCount, /* Including the first glyph (not matched) */
3046 const HBUINT input[], /* Array of input values--start with second glyph */
3047 unsigned int lookaheadCount,
3048 const HBUINT lookahead[] HB_UNUSED,
3049 unsigned int lookupCount HB_UNUSED,
3050 const LookupRecord lookupRecord[] HB_UNUSED,
3051 const ChainContextApplyLookupContext &lookup_context)
3052 {
3053 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
3054 && would_match_input (c,
3055 inputCount, input,
3056 lookup_context.funcs.match[1], lookup_context.match_data[1]);
3057 }
3058
3059 template <typename HBUINT>
3060 HB_ALWAYS_INLINE
chain_context_apply_lookup(hb_ot_apply_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],const ChainContextApplyLookupContext & lookup_context)3061 static bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
3062 unsigned int backtrackCount,
3063 const HBUINT backtrack[],
3064 unsigned int inputCount, /* Including the first glyph (not matched) */
3065 const HBUINT input[], /* Array of input values--start with second glyph */
3066 unsigned int lookaheadCount,
3067 const HBUINT lookahead[],
3068 unsigned int lookupCount,
3069 const LookupRecord lookupRecord[],
3070 const ChainContextApplyLookupContext &lookup_context)
3071 {
3072 if (unlikely (inputCount > HB_MAX_CONTEXT_LENGTH)) return false;
3073 unsigned match_positions_stack[4];
3074 unsigned *match_positions = match_positions_stack;
3075 if (unlikely (inputCount > ARRAY_LENGTH (match_positions_stack)))
3076 {
3077 match_positions = (unsigned *) hb_malloc (hb_max (inputCount, 1u) * sizeof (match_positions[0]));
3078 if (unlikely (!match_positions))
3079 return false;
3080 }
3081
3082 unsigned start_index = c->buffer->out_len;
3083 unsigned end_index = c->buffer->idx;
3084 unsigned match_end = 0;
3085 bool ret = true;
3086 if (!(match_input (c,
3087 inputCount, input,
3088 lookup_context.funcs.match[1], lookup_context.match_data[1],
3089 &match_end, match_positions) && (end_index = match_end)
3090 && match_lookahead (c,
3091 lookaheadCount, lookahead,
3092 lookup_context.funcs.match[2], lookup_context.match_data[2],
3093 match_end, &end_index)))
3094 {
3095 c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
3096 ret = false;
3097 goto done;
3098 }
3099
3100 if (!match_backtrack (c,
3101 backtrackCount, backtrack,
3102 lookup_context.funcs.match[0], lookup_context.match_data[0],
3103 &start_index))
3104 {
3105 c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
3106 ret = false;
3107 goto done;
3108 }
3109
3110 c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
3111 apply_lookup (c,
3112 inputCount, match_positions,
3113 lookupCount, lookupRecord,
3114 match_end);
3115 done:
3116
3117 if (unlikely (match_positions != match_positions_stack))
3118 hb_free (match_positions);
3119
3120 return ret;
3121 }
3122
3123 template <typename Types>
3124 struct ChainRule
3125 {
3126 template <typename T>
3127 friend struct ChainRuleSet;
3128
intersectsOT::ChainRule3129 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
3130 {
3131 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3132 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3133 return chain_context_intersects (glyphs,
3134 backtrack.len, backtrack.arrayZ,
3135 input.lenP1, input.arrayZ,
3136 lookahead.len, lookahead.arrayZ,
3137 lookup_context);
3138 }
3139
closureOT::ChainRule3140 void closure (hb_closure_context_t *c, unsigned value,
3141 ChainContextClosureLookupContext &lookup_context) const
3142 {
3143 if (unlikely (c->lookup_limit_exceeded ())) return;
3144
3145 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3146 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3147 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3148 chain_context_closure_lookup (c,
3149 backtrack.len, backtrack.arrayZ,
3150 input.lenP1, input.arrayZ,
3151 lookahead.len, lookahead.arrayZ,
3152 lookup.len, lookup.arrayZ,
3153 value,
3154 lookup_context);
3155 }
3156
closure_lookupsOT::ChainRule3157 void closure_lookups (hb_closure_lookups_context_t *c,
3158 ChainContextClosureLookupContext &lookup_context) const
3159 {
3160 if (unlikely (c->lookup_limit_exceeded ())) return;
3161 if (!intersects (c->glyphs, lookup_context)) return;
3162
3163 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3164 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3165 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3166 recurse_lookups (c, lookup.len, lookup.arrayZ);
3167 }
3168
collect_glyphsOT::ChainRule3169 void collect_glyphs (hb_collect_glyphs_context_t *c,
3170 ChainContextCollectGlyphsLookupContext &lookup_context) const
3171 {
3172 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3173 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3174 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3175 chain_context_collect_glyphs_lookup (c,
3176 backtrack.len, backtrack.arrayZ,
3177 input.lenP1, input.arrayZ,
3178 lookahead.len, lookahead.arrayZ,
3179 lookup.len, lookup.arrayZ,
3180 lookup_context);
3181 }
3182
would_applyOT::ChainRule3183 bool would_apply (hb_would_apply_context_t *c,
3184 const ChainContextApplyLookupContext &lookup_context) const
3185 {
3186 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3187 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3188 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3189 return chain_context_would_apply_lookup (c,
3190 backtrack.len, backtrack.arrayZ,
3191 input.lenP1, input.arrayZ,
3192 lookahead.len, lookahead.arrayZ, lookup.len,
3193 lookup.arrayZ, lookup_context);
3194 }
3195
applyOT::ChainRule3196 bool apply (hb_ot_apply_context_t *c,
3197 const ChainContextApplyLookupContext &lookup_context) const
3198 {
3199 TRACE_APPLY (this);
3200 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3201 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3202 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3203 return_trace (chain_context_apply_lookup (c,
3204 backtrack.len, backtrack.arrayZ,
3205 input.lenP1, input.arrayZ,
3206 lookahead.len, lookahead.arrayZ, lookup.len,
3207 lookup.arrayZ, lookup_context));
3208 }
3209
3210 template<typename Iterator,
3211 hb_requires (hb_is_iterator (Iterator))>
serialize_arrayOT::ChainRule3212 void serialize_array (hb_serialize_context_t *c,
3213 HBUINT16 len,
3214 Iterator it) const
3215 {
3216 c->copy (len);
3217 for (const auto g : it)
3218 c->copy ((HBUINT16) g);
3219 }
3220
serializeOT::ChainRule3221 bool serialize (hb_serialize_context_t *c,
3222 const hb_map_t *lookup_map,
3223 const hb_map_t *backtrack_map,
3224 const hb_map_t *input_map = nullptr,
3225 const hb_map_t *lookahead_map = nullptr) const
3226 {
3227 TRACE_SERIALIZE (this);
3228
3229 const hb_map_t *mapping = backtrack_map;
3230 serialize_array (c, backtrack.len, + backtrack.iter ()
3231 | hb_map (mapping));
3232
3233 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3234 if (input_map) mapping = input_map;
3235 serialize_array (c, input.lenP1, + input.iter ()
3236 | hb_map (mapping));
3237
3238 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3239 if (lookahead_map) mapping = lookahead_map;
3240 serialize_array (c, lookahead.len, + lookahead.iter ()
3241 | hb_map (mapping));
3242
3243 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3244
3245 HBUINT16* lookupCount = c->embed (&(lookup.len));
3246 if (!lookupCount) return_trace (false);
3247
3248 unsigned count = serialize_lookuprecord_array (c, lookup.as_array (), lookup_map);
3249 return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
3250 }
3251
subsetOT::ChainRule3252 bool subset (hb_subset_context_t *c,
3253 const hb_map_t *lookup_map,
3254 const hb_map_t *backtrack_map = nullptr,
3255 const hb_map_t *input_map = nullptr,
3256 const hb_map_t *lookahead_map = nullptr) const
3257 {
3258 TRACE_SUBSET (this);
3259
3260 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3261 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3262
3263 if (!backtrack_map)
3264 {
3265 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
3266 if (!hb_all (backtrack, glyphset) ||
3267 !hb_all (input, glyphset) ||
3268 !hb_all (lookahead, glyphset))
3269 return_trace (false);
3270
3271 serialize (c->serializer, lookup_map, c->plan->glyph_map);
3272 }
3273 else
3274 {
3275 if (!hb_all (backtrack, backtrack_map) ||
3276 !hb_all (input, input_map) ||
3277 !hb_all (lookahead, lookahead_map))
3278 return_trace (false);
3279
3280 serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
3281 }
3282
3283 return_trace (true);
3284 }
3285
sanitizeOT::ChainRule3286 bool sanitize (hb_sanitize_context_t *c) const
3287 {
3288 TRACE_SANITIZE (this);
3289 /* Hyper-optimized sanitized because this is really hot. */
3290 if (unlikely (!backtrack.len.sanitize (c))) return_trace (false);
3291 hb_barrier ();
3292 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3293 if (unlikely (!input.lenP1.sanitize (c))) return_trace (false);
3294 hb_barrier ();
3295 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3296 if (unlikely (!lookahead.len.sanitize (c))) return_trace (false);
3297 hb_barrier ();
3298 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3299 return_trace (likely (lookup.sanitize (c)));
3300 }
3301
3302 protected:
3303 Array16Of<typename Types::HBUINT>
3304 backtrack; /* Array of backtracking values
3305 * (to be matched before the input
3306 * sequence) */
3307 HeadlessArray16Of<typename Types::HBUINT>
3308 inputX; /* Array of input values (start with
3309 * second glyph) */
3310 Array16Of<typename Types::HBUINT>
3311 lookaheadX; /* Array of lookahead values's (to be
3312 * matched after the input sequence) */
3313 Array16Of<LookupRecord>
3314 lookupX; /* Array of LookupRecords--in
3315 * design order) */
3316 public:
3317 DEFINE_SIZE_MIN (8);
3318 };
3319
3320 template <typename Types>
3321 struct ChainRuleSet
3322 {
3323 using ChainRule = OT::ChainRule<Types>;
3324
intersectsOT::ChainRuleSet3325 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
3326 {
3327 return
3328 + hb_iter (rule)
3329 | hb_map (hb_add (this))
3330 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
3331 | hb_any
3332 ;
3333 }
closureOT::ChainRuleSet3334 void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
3335 {
3336 if (unlikely (c->lookup_limit_exceeded ())) return;
3337
3338 return
3339 + hb_iter (rule)
3340 | hb_map (hb_add (this))
3341 | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
3342 ;
3343 }
3344
closure_lookupsOT::ChainRuleSet3345 void closure_lookups (hb_closure_lookups_context_t *c,
3346 ChainContextClosureLookupContext &lookup_context) const
3347 {
3348 if (unlikely (c->lookup_limit_exceeded ())) return;
3349
3350 + hb_iter (rule)
3351 | hb_map (hb_add (this))
3352 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
3353 ;
3354 }
3355
collect_glyphsOT::ChainRuleSet3356 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
3357 {
3358 return
3359 + hb_iter (rule)
3360 | hb_map (hb_add (this))
3361 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
3362 ;
3363 }
3364
would_applyOT::ChainRuleSet3365 bool would_apply (hb_would_apply_context_t *c,
3366 const ChainContextApplyLookupContext &lookup_context) const
3367 {
3368 return
3369 + hb_iter (rule)
3370 | hb_map (hb_add (this))
3371 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
3372 | hb_any
3373 ;
3374 }
3375
applyOT::ChainRuleSet3376 bool apply (hb_ot_apply_context_t *c,
3377 const ChainContextApplyLookupContext &lookup_context) const
3378 {
3379 TRACE_APPLY (this);
3380
3381 unsigned num_rules = rule.len;
3382
3383 #ifndef HB_NO_OT_RULESETS_FAST_PATH
3384 if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
3385 #endif
3386 {
3387 slow:
3388 return_trace (
3389 + hb_iter (rule)
3390 | hb_map (hb_add (this))
3391 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
3392 | hb_any
3393 )
3394 ;
3395 }
3396
3397 /* This version is optimized for speed by matching the first & second
3398 * components of the rule here, instead of calling into the matching code.
3399 *
3400 * Replicated from LigatureSet::apply(). */
3401
3402 /* If the input skippy has non-auto joiners behavior (as in Indic shapers),
3403 * skip this fast path, as we don't distinguish between input & lookahead
3404 * matching in the fast path.
3405 *
3406 * https://github.com/harfbuzz/harfbuzz/issues/4813
3407 */
3408 if (!c->auto_zwnj || !c->auto_zwj)
3409 goto slow;
3410
3411 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
3412 skippy_iter.reset (c->buffer->idx);
3413 skippy_iter.set_match_func (match_always, nullptr);
3414 skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
3415 unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
3416 hb_glyph_info_t *first = nullptr, *second = nullptr;
3417 bool matched = skippy_iter.next ();
3418 if (likely (matched))
3419 {
3420 first = &c->buffer->info[skippy_iter.idx];
3421 unsafe_to1 = skippy_iter.idx + 1;
3422
3423 if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
3424 {
3425 /* Can't use the fast path if eg. the next char is a default-ignorable
3426 * or other skippable. */
3427 goto slow;
3428 }
3429 }
3430 else
3431 {
3432 /* Failed to match a next glyph. Only try applying rules that have
3433 * no further input and lookahead. */
3434 return_trace (
3435 + hb_iter (rule)
3436 | hb_map (hb_add (this))
3437 | hb_filter ([&] (const ChainRule &_)
3438 {
3439 const auto &input = StructAfter<decltype (_.inputX)> (_.backtrack);
3440 const auto &lookahead = StructAfter<decltype (_.lookaheadX)> (input);
3441 return input.lenP1 <= 1 && lookahead.len == 0;
3442 })
3443 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
3444 | hb_any
3445 )
3446 ;
3447 }
3448 matched = skippy_iter.next ();
3449 if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])))
3450 {
3451 second = &c->buffer->info[skippy_iter.idx];
3452 unsafe_to2 = skippy_iter.idx + 1;
3453 }
3454
3455 auto match_input = lookup_context.funcs.match[1];
3456 auto match_lookahead = lookup_context.funcs.match[2];
3457 auto *input_data = lookup_context.match_data[1];
3458 auto *lookahead_data = lookup_context.match_data[2];
3459 for (unsigned int i = 0; i < num_rules; i++)
3460 {
3461 const auto &r = this+rule.arrayZ[i];
3462
3463 const auto &input = StructAfter<decltype (r.inputX)> (r.backtrack);
3464 const auto &lookahead = StructAfter<decltype (r.lookaheadX)> (input);
3465
3466 unsigned lenP1 = hb_max ((unsigned) input.lenP1, 1u);
3467 if (lenP1 > 1 ?
3468 (!match_input ||
3469 match_input (*first, input.arrayZ[0], input_data))
3470 :
3471 (!lookahead.len || !match_lookahead ||
3472 match_lookahead (*first, lookahead.arrayZ[0], lookahead_data)))
3473 {
3474 if (!second ||
3475 (lenP1 > 2 ?
3476 (!match_input ||
3477 match_input (*second, input.arrayZ[1], input_data))
3478 :
3479 (lookahead.len <= 2 - lenP1 || !match_lookahead ||
3480 match_lookahead (*second, lookahead.arrayZ[2 - lenP1], lookahead_data))))
3481 {
3482 if (r.apply (c, lookup_context))
3483 {
3484 if (unsafe_to != (unsigned) -1)
3485 c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
3486 return_trace (true);
3487 }
3488 }
3489 else
3490 unsafe_to = unsafe_to2;
3491 }
3492 else
3493 {
3494 if (unsafe_to == (unsigned) -1)
3495 unsafe_to = unsafe_to1;
3496 }
3497 }
3498 if (likely (unsafe_to != (unsigned) -1))
3499 c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
3500
3501 return_trace (false);
3502 }
3503
subsetOT::ChainRuleSet3504 bool subset (hb_subset_context_t *c,
3505 const hb_map_t *lookup_map,
3506 const hb_map_t *backtrack_klass_map = nullptr,
3507 const hb_map_t *input_klass_map = nullptr,
3508 const hb_map_t *lookahead_klass_map = nullptr) const
3509 {
3510 TRACE_SUBSET (this);
3511
3512 auto snap = c->serializer->snapshot ();
3513 auto *out = c->serializer->start_embed (*this);
3514 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3515
3516 for (const Offset16To<ChainRule>& _ : rule)
3517 {
3518 if (!_) continue;
3519 auto o_snap = c->serializer->snapshot ();
3520 auto *o = out->rule.serialize_append (c->serializer);
3521 if (unlikely (!o)) continue;
3522
3523 if (!o->serialize_subset (c, _, this,
3524 lookup_map,
3525 backtrack_klass_map,
3526 input_klass_map,
3527 lookahead_klass_map))
3528 {
3529 out->rule.pop ();
3530 c->serializer->revert (o_snap);
3531 }
3532 }
3533
3534 bool ret = bool (out->rule);
3535 if (!ret) c->serializer->revert (snap);
3536
3537 return_trace (ret);
3538 }
3539
sanitizeOT::ChainRuleSet3540 bool sanitize (hb_sanitize_context_t *c) const
3541 {
3542 TRACE_SANITIZE (this);
3543 return_trace (rule.sanitize (c, this));
3544 }
3545
3546 protected:
3547 Array16OfOffset16To<ChainRule>
3548 rule; /* Array of ChainRule tables
3549 * ordered by preference */
3550 public:
3551 DEFINE_SIZE_ARRAY (2, rule);
3552 };
3553
3554 template <typename Types>
3555 struct ChainContextFormat1_4
3556 {
3557 using ChainRuleSet = OT::ChainRuleSet<Types>;
3558
intersectsOT::ChainContextFormat1_43559 bool intersects (const hb_set_t *glyphs) const
3560 {
3561 struct ChainContextClosureLookupContext lookup_context = {
3562 {intersects_glyph, intersected_glyph},
3563 ContextFormat::SimpleContext,
3564 {nullptr, nullptr, nullptr}
3565 };
3566
3567 return
3568 + hb_zip (this+coverage, ruleSet)
3569 | hb_filter (*glyphs, hb_first)
3570 | hb_map (hb_second)
3571 | hb_map (hb_add (this))
3572 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
3573 | hb_any
3574 ;
3575 }
3576
may_have_non_1to1OT::ChainContextFormat1_43577 bool may_have_non_1to1 () const
3578 { return true; }
3579
closureOT::ChainContextFormat1_43580 void closure (hb_closure_context_t *c) const
3581 {
3582 hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
3583 if (unlikely (!cur_active_glyphs)) return;
3584 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
3585 *cur_active_glyphs);
3586
3587 struct ChainContextClosureLookupContext lookup_context = {
3588 {intersects_glyph, intersected_glyph},
3589 ContextFormat::SimpleContext,
3590 {nullptr, nullptr, nullptr}
3591 };
3592
3593 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
3594 | hb_filter ([&] (hb_codepoint_t _) {
3595 return c->previous_parent_active_glyphs ().has (_);
3596 }, hb_first)
3597 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
3598 | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
3599 ;
3600
3601 c->pop_cur_done_glyphs ();
3602 }
3603
closure_lookupsOT::ChainContextFormat1_43604 void closure_lookups (hb_closure_lookups_context_t *c) const
3605 {
3606 struct ChainContextClosureLookupContext lookup_context = {
3607 {intersects_glyph, nullptr},
3608 ContextFormat::SimpleContext,
3609 {nullptr, nullptr, nullptr}
3610 };
3611
3612 + hb_zip (this+coverage, ruleSet)
3613 | hb_filter (*c->glyphs, hb_first)
3614 | hb_map (hb_second)
3615 | hb_map (hb_add (this))
3616 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
3617 ;
3618 }
3619
collect_variation_indicesOT::ChainContextFormat1_43620 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3621
collect_glyphsOT::ChainContextFormat1_43622 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3623 {
3624 (this+coverage).collect_coverage (c->input);
3625
3626 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3627 {collect_glyph},
3628 {nullptr, nullptr, nullptr}
3629 };
3630
3631 + hb_iter (ruleSet)
3632 | hb_map (hb_add (this))
3633 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3634 ;
3635 }
3636
would_applyOT::ChainContextFormat1_43637 bool would_apply (hb_would_apply_context_t *c) const
3638 {
3639 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
3640 struct ChainContextApplyLookupContext lookup_context = {
3641 {{match_glyph, match_glyph, match_glyph}},
3642 {nullptr, nullptr, nullptr}
3643 };
3644 return rule_set.would_apply (c, lookup_context);
3645 }
3646
get_coverageOT::ChainContextFormat1_43647 const Coverage &get_coverage () const { return this+coverage; }
3648
applyOT::ChainContextFormat1_43649 bool apply (hb_ot_apply_context_t *c) const
3650 {
3651 TRACE_APPLY (this);
3652 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3653 if (likely (index == NOT_COVERED)) return_trace (false);
3654
3655 const ChainRuleSet &rule_set = this+ruleSet[index];
3656 struct ChainContextApplyLookupContext lookup_context = {
3657 {{match_glyph, match_glyph, match_glyph}},
3658 {nullptr, nullptr, nullptr}
3659 };
3660 return_trace (rule_set.apply (c, lookup_context));
3661 }
3662
subsetOT::ChainContextFormat1_43663 bool subset (hb_subset_context_t *c) const
3664 {
3665 TRACE_SUBSET (this);
3666 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
3667 const hb_map_t &glyph_map = *c->plan->glyph_map;
3668
3669 auto *out = c->serializer->start_embed (*this);
3670 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3671 out->format = format;
3672
3673 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
3674 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
3675 + hb_zip (this+coverage, ruleSet)
3676 | hb_filter (glyphset, hb_first)
3677 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
3678 | hb_map (hb_first)
3679 | hb_map (glyph_map)
3680 | hb_sink (new_coverage)
3681 ;
3682
3683 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
3684 return_trace (bool (new_coverage));
3685 }
3686
sanitizeOT::ChainContextFormat1_43687 bool sanitize (hb_sanitize_context_t *c) const
3688 {
3689 TRACE_SANITIZE (this);
3690 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
3691 }
3692
3693 protected:
3694 HBUINT16 format; /* Format identifier--format = 1 */
3695 typename Types::template OffsetTo<Coverage>
3696 coverage; /* Offset to Coverage table--from
3697 * beginning of table */
3698 Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
3699 ruleSet; /* Array of ChainRuleSet tables
3700 * ordered by Coverage Index */
3701 public:
3702 DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
3703 };
3704
3705 template <typename Types>
3706 struct ChainContextFormat2_5
3707 {
3708 using ChainRuleSet = OT::ChainRuleSet<SmallTypes>;
3709
intersectsOT::ChainContextFormat2_53710 bool intersects (const hb_set_t *glyphs) const
3711 {
3712 if (!(this+coverage).intersects (glyphs))
3713 return false;
3714
3715 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3716 const ClassDef &input_class_def = this+inputClassDef;
3717 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3718
3719 hb_map_t caches[3] = {};
3720 struct ChainContextClosureLookupContext lookup_context = {
3721 {intersects_class, nullptr},
3722 ContextFormat::ClassBasedContext,
3723 {&backtrack_class_def,
3724 &input_class_def,
3725 &lookahead_class_def},
3726 {&caches[0], &caches[1], &caches[2]}
3727 };
3728
3729 hb_set_t retained_coverage_glyphs;
3730 (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
3731
3732 hb_set_t coverage_glyph_classes;
3733 input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3734
3735 return
3736 + hb_iter (ruleSet)
3737 | hb_map (hb_add (this))
3738 | hb_enumerate
3739 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
3740 { return input_class_def.intersects_class (glyphs, p.first) &&
3741 coverage_glyph_classes.has (p.first) &&
3742 p.second.intersects (glyphs, lookup_context); })
3743 | hb_any
3744 ;
3745 }
3746
may_have_non_1to1OT::ChainContextFormat2_53747 bool may_have_non_1to1 () const
3748 { return true; }
3749
closureOT::ChainContextFormat2_53750 void closure (hb_closure_context_t *c) const
3751 {
3752 if (!(this+coverage).intersects (c->glyphs))
3753 return;
3754
3755 hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
3756 if (unlikely (!cur_active_glyphs)) return;
3757 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
3758 *cur_active_glyphs);
3759
3760 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3761 const ClassDef &input_class_def = this+inputClassDef;
3762 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3763
3764 hb_map_t caches[3] = {};
3765 intersected_class_cache_t intersected_cache;
3766 struct ChainContextClosureLookupContext lookup_context = {
3767 {intersects_class, intersected_class_glyphs},
3768 ContextFormat::ClassBasedContext,
3769 {&backtrack_class_def,
3770 &input_class_def,
3771 &lookahead_class_def},
3772 {&caches[0], &caches[1], &caches[2]},
3773 &intersected_cache
3774 };
3775
3776 + hb_enumerate (ruleSet)
3777 | hb_filter ([&] (unsigned _)
3778 { return input_class_def.intersects_class (&c->parent_active_glyphs (), _); },
3779 hb_first)
3780 | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<ChainRuleSet>&> _)
3781 {
3782 const ChainRuleSet& chainrule_set = this+_.second;
3783 chainrule_set.closure (c, _.first, lookup_context);
3784 })
3785 ;
3786
3787 c->pop_cur_done_glyphs ();
3788 }
3789
closure_lookupsOT::ChainContextFormat2_53790 void closure_lookups (hb_closure_lookups_context_t *c) const
3791 {
3792 if (!(this+coverage).intersects (c->glyphs))
3793 return;
3794
3795 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3796 const ClassDef &input_class_def = this+inputClassDef;
3797 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3798
3799 hb_map_t caches[3] = {};
3800 struct ChainContextClosureLookupContext lookup_context = {
3801 {intersects_class, nullptr},
3802 ContextFormat::ClassBasedContext,
3803 {&backtrack_class_def,
3804 &input_class_def,
3805 &lookahead_class_def},
3806 {&caches[0], &caches[1], &caches[2]}
3807 };
3808
3809 + hb_iter (ruleSet)
3810 | hb_map (hb_add (this))
3811 | hb_enumerate
3812 | hb_filter([&] (unsigned klass)
3813 { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
3814 | hb_map (hb_second)
3815 | hb_apply ([&] (const ChainRuleSet &_)
3816 { _.closure_lookups (c, lookup_context); })
3817 ;
3818 }
3819
collect_variation_indicesOT::ChainContextFormat2_53820 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3821
collect_glyphsOT::ChainContextFormat2_53822 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3823 {
3824 (this+coverage).collect_coverage (c->input);
3825
3826 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3827 const ClassDef &input_class_def = this+inputClassDef;
3828 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3829
3830 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3831 {collect_class},
3832 {&backtrack_class_def,
3833 &input_class_def,
3834 &lookahead_class_def}
3835 };
3836
3837 + hb_iter (ruleSet)
3838 | hb_map (hb_add (this))
3839 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3840 ;
3841 }
3842
would_applyOT::ChainContextFormat2_53843 bool would_apply (hb_would_apply_context_t *c) const
3844 {
3845 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3846 const ClassDef &input_class_def = this+inputClassDef;
3847 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3848
3849 unsigned int index = input_class_def.get_class (c->glyphs[0]);
3850 const ChainRuleSet &rule_set = this+ruleSet[index];
3851 struct ChainContextApplyLookupContext lookup_context = {
3852 {{match_class, match_class, match_class}},
3853 {&backtrack_class_def,
3854 &input_class_def,
3855 &lookahead_class_def}
3856 };
3857 return rule_set.would_apply (c, lookup_context);
3858 }
3859
get_coverageOT::ChainContextFormat2_53860 const Coverage &get_coverage () const { return this+coverage; }
3861
cache_costOT::ChainContextFormat2_53862 unsigned cache_cost () const
3863 {
3864 unsigned c = (this+lookaheadClassDef).cost () * ruleSet.len;
3865 return c >= 4 ? c : 0;
3866 }
cache_funcOT::ChainContextFormat2_53867 bool cache_func (hb_ot_apply_context_t *c, bool enter) const
3868 {
3869 if (enter)
3870 {
3871 if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
3872 return false;
3873 auto &info = c->buffer->info;
3874 unsigned count = c->buffer->len;
3875 for (unsigned i = 0; i < count; i++)
3876 info[i].syllable() = 255;
3877 c->new_syllables = 255;
3878 return true;
3879 }
3880 else
3881 {
3882 c->new_syllables = (unsigned) -1;
3883 HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
3884 return true;
3885 }
3886 }
3887
apply_cachedOT::ChainContextFormat2_53888 bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); }
applyOT::ChainContextFormat2_53889 bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); }
_applyOT::ChainContextFormat2_53890 bool _apply (hb_ot_apply_context_t *c, bool cached) const
3891 {
3892 TRACE_APPLY (this);
3893 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3894 if (likely (index == NOT_COVERED)) return_trace (false);
3895
3896 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3897 const ClassDef &input_class_def = this+inputClassDef;
3898 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3899
3900 /* match_class_caches1 is slightly faster. Use it for lookahead,
3901 * which is typically longer. */
3902 struct ChainContextApplyLookupContext lookup_context = {
3903 {{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached1 : match_class,
3904 cached ? match_class_cached2 : match_class,
3905 cached ? match_class_cached1 : match_class}},
3906 {&backtrack_class_def,
3907 &input_class_def,
3908 &lookahead_class_def}
3909 };
3910
3911 // Note: Corresponds to match_class_cached2
3912 if (cached && ((c->buffer->cur().syllable() & 0xF0) >> 4) < 15)
3913 index = (c->buffer->cur().syllable () & 0xF0) >> 4;
3914 else
3915 index = input_class_def.get_class (c->buffer->cur().codepoint);
3916 const ChainRuleSet &rule_set = this+ruleSet[index];
3917 return_trace (rule_set.apply (c, lookup_context));
3918 }
3919
subsetOT::ChainContextFormat2_53920 bool subset (hb_subset_context_t *c) const
3921 {
3922 TRACE_SUBSET (this);
3923 auto *out = c->serializer->start_embed (*this);
3924 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3925 out->format = format;
3926 out->coverage.serialize_subset (c, coverage, this);
3927
3928 hb_map_t backtrack_klass_map;
3929 hb_map_t input_klass_map;
3930 hb_map_t lookahead_klass_map;
3931
3932 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3933 // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3934 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3935 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3936
3937 if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3938 input_klass_map,
3939 lookahead_klass_map)))
3940 return_trace (false);
3941
3942 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
3943 hb_set_t retained_coverage_glyphs;
3944 (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
3945
3946 hb_set_t coverage_glyph_classes;
3947 (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3948
3949 int non_zero_index = -1, index = 0;
3950 bool ret = true;
3951 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
3952 auto last_non_zero = c->serializer->snapshot ();
3953 for (const auto& _ : + hb_enumerate (ruleSet)
3954 | hb_filter (input_klass_map, hb_first))
3955 {
3956 auto *o = out->ruleSet.serialize_append (c->serializer);
3957 if (unlikely (!o))
3958 {
3959 ret = false;
3960 break;
3961 }
3962 if (coverage_glyph_classes.has (_.first) &&
3963 o->serialize_subset (c, _.second, this,
3964 lookup_map,
3965 &backtrack_klass_map,
3966 &input_klass_map,
3967 &lookahead_klass_map))
3968 {
3969 last_non_zero = c->serializer->snapshot ();
3970 non_zero_index = index;
3971 }
3972
3973 index++;
3974 }
3975
3976 if (!ret || non_zero_index == -1) return_trace (false);
3977
3978 // prune empty trailing ruleSets
3979 if (index > non_zero_index) {
3980 c->serializer->revert (last_non_zero);
3981 out->ruleSet.len = non_zero_index + 1;
3982 }
3983
3984 return_trace (bool (out->ruleSet));
3985 }
3986
sanitizeOT::ChainContextFormat2_53987 bool sanitize (hb_sanitize_context_t *c) const
3988 {
3989 TRACE_SANITIZE (this);
3990 return_trace (coverage.sanitize (c, this) &&
3991 backtrackClassDef.sanitize (c, this) &&
3992 inputClassDef.sanitize (c, this) &&
3993 lookaheadClassDef.sanitize (c, this) &&
3994 ruleSet.sanitize (c, this));
3995 }
3996
3997 protected:
3998 HBUINT16 format; /* Format identifier--format = 2 */
3999 typename Types::template OffsetTo<Coverage>
4000 coverage; /* Offset to Coverage table--from
4001 * beginning of table */
4002 typename Types::template OffsetTo<ClassDef>
4003 backtrackClassDef; /* Offset to glyph ClassDef table
4004 * containing backtrack sequence
4005 * data--from beginning of table */
4006 typename Types::template OffsetTo<ClassDef>
4007 inputClassDef; /* Offset to glyph ClassDef
4008 * table containing input sequence
4009 * data--from beginning of table */
4010 typename Types::template OffsetTo<ClassDef>
4011 lookaheadClassDef; /* Offset to glyph ClassDef table
4012 * containing lookahead sequence
4013 * data--from beginning of table */
4014 Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
4015 ruleSet; /* Array of ChainRuleSet tables
4016 * ordered by class */
4017 public:
4018 DEFINE_SIZE_ARRAY (4 + 4 * Types::size, ruleSet);
4019 };
4020
4021 struct ChainContextFormat3
4022 {
4023 using RuleSet = OT::RuleSet<SmallTypes>;
4024
intersectsOT::ChainContextFormat34025 bool intersects (const hb_set_t *glyphs) const
4026 {
4027 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4028
4029 if (!(this+input[0]).intersects (glyphs))
4030 return false;
4031
4032 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4033 struct ChainContextClosureLookupContext lookup_context = {
4034 {intersects_coverage, nullptr},
4035 ContextFormat::CoverageBasedContext,
4036 {this, this, this}
4037 };
4038 return chain_context_intersects (glyphs,
4039 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4040 input.len, (const HBUINT16 *) input.arrayZ + 1,
4041 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4042 lookup_context);
4043 }
4044
may_have_non_1to1OT::ChainContextFormat34045 bool may_have_non_1to1 () const
4046 { return true; }
4047
closureOT::ChainContextFormat34048 void closure (hb_closure_context_t *c) const
4049 {
4050 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4051
4052 if (!(this+input[0]).intersects (c->glyphs))
4053 return;
4054
4055 hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
4056 if (unlikely (!cur_active_glyphs))
4057 return;
4058 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
4059 *cur_active_glyphs);
4060
4061 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4062 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4063 struct ChainContextClosureLookupContext lookup_context = {
4064 {intersects_coverage, intersected_coverage_glyphs},
4065 ContextFormat::CoverageBasedContext,
4066 {this, this, this}
4067 };
4068 chain_context_closure_lookup (c,
4069 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4070 input.len, (const HBUINT16 *) input.arrayZ + 1,
4071 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4072 lookup.len, lookup.arrayZ,
4073 0, lookup_context);
4074
4075 c->pop_cur_done_glyphs ();
4076 }
4077
closure_lookupsOT::ChainContextFormat34078 void closure_lookups (hb_closure_lookups_context_t *c) const
4079 {
4080 if (!intersects (c->glyphs))
4081 return;
4082
4083 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4084 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4085 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4086 recurse_lookups (c, lookup.len, lookup.arrayZ);
4087 }
4088
collect_variation_indicesOT::ChainContextFormat34089 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
4090
collect_glyphsOT::ChainContextFormat34091 void collect_glyphs (hb_collect_glyphs_context_t *c) const
4092 {
4093 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4094
4095 (this+input[0]).collect_coverage (c->input);
4096
4097 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4098 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4099
4100 struct ChainContextCollectGlyphsLookupContext lookup_context = {
4101 {collect_coverage},
4102 {this, this, this}
4103 };
4104 chain_context_collect_glyphs_lookup (c,
4105 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4106 input.len, (const HBUINT16 *) input.arrayZ + 1,
4107 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4108 lookup.len, lookup.arrayZ,
4109 lookup_context);
4110 }
4111
would_applyOT::ChainContextFormat34112 bool would_apply (hb_would_apply_context_t *c) const
4113 {
4114 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4115 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4116 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4117 struct ChainContextApplyLookupContext lookup_context = {
4118 {{match_coverage, match_coverage, match_coverage}},
4119 {this, this, this}
4120 };
4121 return chain_context_would_apply_lookup (c,
4122 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4123 input.len, (const HBUINT16 *) input.arrayZ + 1,
4124 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4125 lookup.len, lookup.arrayZ, lookup_context);
4126 }
4127
get_coverageOT::ChainContextFormat34128 const Coverage &get_coverage () const
4129 {
4130 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4131 return this+input[0];
4132 }
4133
applyOT::ChainContextFormat34134 bool apply (hb_ot_apply_context_t *c) const
4135 {
4136 TRACE_APPLY (this);
4137 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4138
4139 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
4140 if (likely (index == NOT_COVERED)) return_trace (false);
4141
4142 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4143 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4144 struct ChainContextApplyLookupContext lookup_context = {
4145 {{match_coverage, match_coverage, match_coverage}},
4146 {this, this, this}
4147 };
4148 return_trace (chain_context_apply_lookup (c,
4149 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4150 input.len, (const HBUINT16 *) input.arrayZ + 1,
4151 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4152 lookup.len, lookup.arrayZ, lookup_context));
4153 }
4154
4155 template<typename Iterator,
4156 hb_requires (hb_is_iterator (Iterator))>
serialize_coverage_offsetsOT::ChainContextFormat34157 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
4158 {
4159 TRACE_SERIALIZE (this);
4160 auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
4161
4162 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
4163 return_trace (false);
4164
4165 for (auto& offset : it) {
4166 auto *o = out->serialize_append (c->serializer);
4167 if (unlikely (!o) || !o->serialize_subset (c, offset, base))
4168 return_trace (false);
4169 }
4170
4171 return_trace (true);
4172 }
4173
subsetOT::ChainContextFormat34174 bool subset (hb_subset_context_t *c) const
4175 {
4176 TRACE_SUBSET (this);
4177
4178 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
4179
4180 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
4181 return_trace (false);
4182
4183 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4184 if (!serialize_coverage_offsets (c, input.iter (), this))
4185 return_trace (false);
4186
4187 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4188 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
4189 return_trace (false);
4190
4191 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4192 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
4193
4194 HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookup.len);
4195 if (!lookupCount) return_trace (false);
4196
4197 unsigned count = serialize_lookuprecord_array (c->serializer, lookup.as_array (), lookup_map);
4198 return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
4199 }
4200
sanitizeOT::ChainContextFormat34201 bool sanitize (hb_sanitize_context_t *c) const
4202 {
4203 TRACE_SANITIZE (this);
4204 if (unlikely (!backtrack.sanitize (c, this))) return_trace (false);
4205 hb_barrier ();
4206 const auto &input = StructAfter<decltype (inputX)> (backtrack);
4207 if (unlikely (!input.sanitize (c, this))) return_trace (false);
4208 hb_barrier ();
4209 if (unlikely (!input.len)) return_trace (false); /* To be consistent with Context. */
4210 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4211 if (unlikely (!lookahead.sanitize (c, this))) return_trace (false);
4212 hb_barrier ();
4213 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4214 return_trace (likely (lookup.sanitize (c)));
4215 }
4216
4217 protected:
4218 HBUINT16 format; /* Format identifier--format = 3 */
4219 Array16OfOffset16To<Coverage>
4220 backtrack; /* Array of coverage tables
4221 * in backtracking sequence, in glyph
4222 * sequence order */
4223 Array16OfOffset16To<Coverage>
4224 inputX ; /* Array of coverage
4225 * tables in input sequence, in glyph
4226 * sequence order */
4227 Array16OfOffset16To<Coverage>
4228 lookaheadX; /* Array of coverage tables
4229 * in lookahead sequence, in glyph
4230 * sequence order */
4231 Array16Of<LookupRecord>
4232 lookupX; /* Array of LookupRecords--in
4233 * design order) */
4234 public:
4235 DEFINE_SIZE_MIN (10);
4236 };
4237
4238 struct ChainContext
4239 {
4240 template <typename context_t, typename ...Ts>
dispatchOT::ChainContext4241 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
4242 {
4243 if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
4244 TRACE_DISPATCH (this, u.format);
4245 switch (u.format) {
4246 case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
4247 case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
4248 case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
4249 #ifndef HB_NO_BEYOND_64K
4250 case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
4251 case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
4252 #endif
4253 default:return_trace (c->default_return_value ());
4254 }
4255 }
4256
4257 protected:
4258 union {
4259 HBUINT16 format; /* Format identifier */
4260 ChainContextFormat1_4<SmallTypes> format1;
4261 ChainContextFormat2_5<SmallTypes> format2;
4262 ChainContextFormat3 format3;
4263 #ifndef HB_NO_BEYOND_64K
4264 ChainContextFormat1_4<MediumTypes> format4;
4265 ChainContextFormat2_5<MediumTypes> format5;
4266 #endif
4267 } u;
4268 };
4269
4270
4271 template <typename T>
4272 struct ExtensionFormat1
4273 {
get_typeOT::ExtensionFormat14274 unsigned int get_type () const { return extensionLookupType; }
4275
4276 template <typename X>
get_subtableOT::ExtensionFormat14277 const X& get_subtable () const
4278 { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
4279
4280 template <typename context_t, typename ...Ts>
dispatchOT::ExtensionFormat14281 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
4282 {
4283 if (unlikely (!c->may_dispatch (this, this))) return c->no_dispatch_return_value ();
4284 TRACE_DISPATCH (this, format);
4285 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
4286 }
4287
collect_variation_indicesOT::ExtensionFormat14288 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
4289 { dispatch (c); }
4290
4291 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
sanitizeOT::ExtensionFormat14292 bool sanitize (hb_sanitize_context_t *c) const
4293 {
4294 TRACE_SANITIZE (this);
4295 return_trace (c->check_struct (this) &&
4296 hb_barrier () &&
4297 extensionLookupType != T::SubTable::Extension);
4298 }
4299
subsetOT::ExtensionFormat14300 bool subset (hb_subset_context_t *c) const
4301 {
4302 TRACE_SUBSET (this);
4303
4304 auto *out = c->serializer->start_embed (this);
4305 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
4306
4307 out->format = format;
4308 out->extensionLookupType = extensionLookupType;
4309
4310 const auto& src_offset =
4311 reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
4312 auto& dest_offset =
4313 reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
4314
4315 return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
4316 }
4317
4318 protected:
4319 HBUINT16 format; /* Format identifier. Set to 1. */
4320 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
4321 * by ExtensionOffset (i.e. the
4322 * extension subtable). */
4323 Offset32 extensionOffset; /* Offset to the extension subtable,
4324 * of lookup type subtable. */
4325 public:
4326 DEFINE_SIZE_STATIC (8);
4327 };
4328
4329 template <typename T>
4330 struct Extension
4331 {
get_typeOT::Extension4332 unsigned int get_type () const
4333 {
4334 switch (u.format) {
4335 case 1: hb_barrier (); return u.format1.get_type ();
4336 default:return 0;
4337 }
4338 }
4339 template <typename X>
get_subtableOT::Extension4340 const X& get_subtable () const
4341 {
4342 switch (u.format) {
4343 case 1: hb_barrier (); return u.format1.template get_subtable<typename T::SubTable> ();
4344 default:return Null (typename T::SubTable);
4345 }
4346 }
4347
4348 // Specialization of dispatch for subset. dispatch() normally just
4349 // dispatches to the sub table this points too, but for subset
4350 // we need to run subset on this subtable too.
4351 template <typename ...Ts>
dispatchOT::Extension4352 typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
4353 {
4354 switch (u.format) {
4355 case 1: hb_barrier (); return u.format1.subset (c);
4356 default: return c->default_return_value ();
4357 }
4358 }
4359
4360 template <typename context_t, typename ...Ts>
dispatchOT::Extension4361 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
4362 {
4363 if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
4364 TRACE_DISPATCH (this, u.format);
4365 switch (u.format) {
4366 case 1: hb_barrier (); return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
4367 default:return_trace (c->default_return_value ());
4368 }
4369 }
4370
4371 protected:
4372 union {
4373 HBUINT16 format; /* Format identifier */
4374 ExtensionFormat1<T> format1;
4375 } u;
4376 };
4377
4378
4379 /*
4380 * GSUB/GPOS Common
4381 */
4382
4383 struct hb_ot_layout_lookup_accelerator_t
4384 {
4385 template <typename TLookup>
createOT::hb_ot_layout_lookup_accelerator_t4386 static hb_ot_layout_lookup_accelerator_t *create (const TLookup &lookup)
4387 {
4388 unsigned count = lookup.get_subtable_count ();
4389
4390 unsigned size = sizeof (hb_ot_layout_lookup_accelerator_t) -
4391 HB_VAR_ARRAY * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t) +
4392 count * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t);
4393
4394 /* The following is a calloc because when we are collecting subtables,
4395 * some of them might be invalid and hence not collect; as a result,
4396 * we might not fill in all the count entries of the subtables array.
4397 * Zeroing it allows the set digest to gatekeep it without having to
4398 * initialize it further. */
4399 auto *thiz = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (1, size);
4400 if (unlikely (!thiz))
4401 return nullptr;
4402
4403 hb_accelerate_subtables_context_t c_accelerate_subtables (thiz->subtables);
4404 lookup.dispatch (&c_accelerate_subtables);
4405
4406 thiz->digest.init ();
4407 for (auto& subtable : hb_iter (thiz->subtables, count))
4408 thiz->digest.union_ (subtable.digest);
4409
4410 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4411 thiz->cache_user_idx = c_accelerate_subtables.cache_user_idx;
4412 for (unsigned i = 0; i < count; i++)
4413 if (i != thiz->cache_user_idx)
4414 thiz->subtables[i].apply_cached_func = thiz->subtables[i].apply_func;
4415 #endif
4416
4417 return thiz;
4418 }
4419
may_haveOT::hb_ot_layout_lookup_accelerator_t4420 bool may_have (hb_codepoint_t g) const
4421 { return digest.may_have (g); }
4422
4423 #ifndef HB_OPTIMIZE_SIZE
4424 HB_ALWAYS_INLINE
4425 #endif
applyOT::hb_ot_layout_lookup_accelerator_t4426 bool apply (hb_ot_apply_context_t *c, unsigned subtables_count, bool use_cache) const
4427 {
4428 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4429 if (use_cache)
4430 {
4431 return
4432 + hb_iter (hb_iter (subtables, subtables_count))
4433 | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply_cached (c); })
4434 | hb_any
4435 ;
4436 }
4437 else
4438 #endif
4439 {
4440 return
4441 + hb_iter (hb_iter (subtables, subtables_count))
4442 | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply (c); })
4443 | hb_any
4444 ;
4445 }
4446 return false;
4447 }
4448
cache_enterOT::hb_ot_layout_lookup_accelerator_t4449 bool cache_enter (hb_ot_apply_context_t *c) const
4450 {
4451 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4452 return cache_user_idx != (unsigned) -1 &&
4453 subtables[cache_user_idx].cache_enter (c);
4454 #else
4455 return false;
4456 #endif
4457 }
cache_leaveOT::hb_ot_layout_lookup_accelerator_t4458 void cache_leave (hb_ot_apply_context_t *c) const
4459 {
4460 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4461 subtables[cache_user_idx].cache_leave (c);
4462 #endif
4463 }
4464
4465
4466 hb_set_digest_t digest;
4467 private:
4468 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4469 unsigned cache_user_idx = (unsigned) -1;
4470 #endif
4471 hb_accelerate_subtables_context_t::hb_applicable_t subtables[HB_VAR_ARRAY];
4472 };
4473
4474 template <typename Types>
4475 struct GSUBGPOSVersion1_2
4476 {
4477 friend struct GSUBGPOS;
4478
4479 protected:
4480 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
4481 * to 0x00010000u */
4482 typename Types:: template OffsetTo<ScriptList>
4483 scriptList; /* ScriptList table */
4484 typename Types::template OffsetTo<FeatureList>
4485 featureList; /* FeatureList table */
4486 typename Types::template OffsetTo<LookupList<Types>>
4487 lookupList; /* LookupList table */
4488 Offset32To<FeatureVariations>
4489 featureVars; /* Offset to Feature Variations
4490 table--from beginning of table
4491 * (may be NULL). Introduced
4492 * in version 0x00010001. */
4493 public:
4494 DEFINE_SIZE_MIN (4 + 3 * Types::size);
4495
get_sizeOT::GSUBGPOSVersion1_24496 unsigned int get_size () const
4497 {
4498 return min_size +
4499 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
4500 }
4501
get_lookup_list_offsetOT::GSUBGPOSVersion1_24502 const typename Types::template OffsetTo<LookupList<Types>>* get_lookup_list_offset () const
4503 {
4504 return &lookupList;
4505 }
4506
4507 template <typename TLookup>
sanitizeOT::GSUBGPOSVersion1_24508 bool sanitize (hb_sanitize_context_t *c) const
4509 {
4510 TRACE_SANITIZE (this);
4511 typedef List16OfOffsetTo<TLookup, typename Types::HBUINT> TLookupList;
4512 if (unlikely (!(scriptList.sanitize (c, this) &&
4513 featureList.sanitize (c, this) &&
4514 reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList).sanitize (c, this))))
4515 return_trace (false);
4516
4517 #ifndef HB_NO_VAR
4518 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
4519 return_trace (false);
4520 #endif
4521
4522 return_trace (true);
4523 }
4524
4525 template <typename TLookup>
subsetOT::GSUBGPOSVersion1_24526 bool subset (hb_subset_layout_context_t *c) const
4527 {
4528 TRACE_SUBSET (this);
4529
4530 auto *out = c->subset_context->serializer->start_embed (this);
4531 if (unlikely (!c->subset_context->serializer->extend_min (out))) return_trace (false);
4532
4533 out->version = version;
4534
4535 typedef LookupOffsetList<TLookup, typename Types::HBUINT> TLookupList;
4536 reinterpret_cast<typename Types::template OffsetTo<TLookupList> &> (out->lookupList)
4537 .serialize_subset (c->subset_context,
4538 reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList),
4539 this,
4540 c);
4541
4542 reinterpret_cast<typename Types::template OffsetTo<RecordListOfFeature> &> (out->featureList)
4543 .serialize_subset (c->subset_context,
4544 reinterpret_cast<const typename Types::template OffsetTo<RecordListOfFeature> &> (featureList),
4545 this,
4546 c);
4547
4548 out->scriptList.serialize_subset (c->subset_context,
4549 scriptList,
4550 this,
4551 c);
4552
4553 #ifndef HB_NO_VAR
4554 if (version.to_int () >= 0x00010001u)
4555 {
4556 auto snapshot = c->subset_context->serializer->snapshot ();
4557 if (!c->subset_context->serializer->extend_min (&out->featureVars))
4558 return_trace (false);
4559
4560 // if all axes are pinned all feature vars are dropped.
4561 bool ret = !c->subset_context->plan->all_axes_pinned
4562 && out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
4563 if (!ret && version.major == 1)
4564 {
4565 c->subset_context->serializer->revert (snapshot);
4566 out->version.major = 1;
4567 out->version.minor = 0;
4568 }
4569 }
4570 #endif
4571
4572 return_trace (true);
4573 }
4574 };
4575
4576 struct GSUBGPOS
4577 {
get_sizeOT::GSUBGPOS4578 unsigned int get_size () const
4579 {
4580 switch (u.version.major) {
4581 case 1: hb_barrier (); return u.version1.get_size ();
4582 #ifndef HB_NO_BEYOND_64K
4583 case 2: hb_barrier (); return u.version2.get_size ();
4584 #endif
4585 default: return u.version.static_size;
4586 }
4587 }
4588
4589 template <typename TLookup>
sanitizeOT::GSUBGPOS4590 bool sanitize (hb_sanitize_context_t *c) const
4591 {
4592 TRACE_SANITIZE (this);
4593 if (unlikely (!u.version.sanitize (c))) return_trace (false);
4594 hb_barrier ();
4595 switch (u.version.major) {
4596 case 1: hb_barrier (); return_trace (u.version1.sanitize<TLookup> (c));
4597 #ifndef HB_NO_BEYOND_64K
4598 case 2: hb_barrier (); return_trace (u.version2.sanitize<TLookup> (c));
4599 #endif
4600 default: return_trace (true);
4601 }
4602 }
4603
4604 template <typename TLookup>
subsetOT::GSUBGPOS4605 bool subset (hb_subset_layout_context_t *c) const
4606 {
4607 switch (u.version.major) {
4608 case 1: hb_barrier (); return u.version1.subset<TLookup> (c);
4609 #ifndef HB_NO_BEYOND_64K
4610 case 2: hb_barrier (); return u.version2.subset<TLookup> (c);
4611 #endif
4612 default: return false;
4613 }
4614 }
4615
get_script_listOT::GSUBGPOS4616 const ScriptList &get_script_list () const
4617 {
4618 switch (u.version.major) {
4619 case 1: hb_barrier (); return this+u.version1.scriptList;
4620 #ifndef HB_NO_BEYOND_64K
4621 case 2: hb_barrier (); return this+u.version2.scriptList;
4622 #endif
4623 default: return Null (ScriptList);
4624 }
4625 }
get_feature_listOT::GSUBGPOS4626 const FeatureList &get_feature_list () const
4627 {
4628 switch (u.version.major) {
4629 case 1: hb_barrier (); return this+u.version1.featureList;
4630 #ifndef HB_NO_BEYOND_64K
4631 case 2: hb_barrier (); return this+u.version2.featureList;
4632 #endif
4633 default: return Null (FeatureList);
4634 }
4635 }
get_lookup_countOT::GSUBGPOS4636 unsigned int get_lookup_count () const
4637 {
4638 switch (u.version.major) {
4639 case 1: hb_barrier (); return (this+u.version1.lookupList).len;
4640 #ifndef HB_NO_BEYOND_64K
4641 case 2: hb_barrier (); return (this+u.version2.lookupList).len;
4642 #endif
4643 default: return 0;
4644 }
4645 }
get_lookupOT::GSUBGPOS4646 const Lookup& get_lookup (unsigned int i) const
4647 {
4648 switch (u.version.major) {
4649 case 1: hb_barrier (); return (this+u.version1.lookupList)[i];
4650 #ifndef HB_NO_BEYOND_64K
4651 case 2: hb_barrier (); return (this+u.version2.lookupList)[i];
4652 #endif
4653 default: return Null (Lookup);
4654 }
4655 }
get_feature_variationsOT::GSUBGPOS4656 const FeatureVariations &get_feature_variations () const
4657 {
4658 switch (u.version.major) {
4659 case 1: hb_barrier (); return (u.version.to_int () >= 0x00010001u && hb_barrier () ? this+u.version1.featureVars : Null (FeatureVariations));
4660 #ifndef HB_NO_BEYOND_64K
4661 case 2: hb_barrier (); return this+u.version2.featureVars;
4662 #endif
4663 default: return Null (FeatureVariations);
4664 }
4665 }
4666
has_dataOT::GSUBGPOS4667 bool has_data () const { return u.version.to_int (); }
get_script_countOT::GSUBGPOS4668 unsigned int get_script_count () const
4669 { return get_script_list ().len; }
get_script_tagOT::GSUBGPOS4670 const Tag& get_script_tag (unsigned int i) const
4671 { return get_script_list ().get_tag (i); }
get_script_tagsOT::GSUBGPOS4672 unsigned int get_script_tags (unsigned int start_offset,
4673 unsigned int *script_count /* IN/OUT */,
4674 hb_tag_t *script_tags /* OUT */) const
4675 { return get_script_list ().get_tags (start_offset, script_count, script_tags); }
get_scriptOT::GSUBGPOS4676 const Script& get_script (unsigned int i) const
4677 { return get_script_list ()[i]; }
find_script_indexOT::GSUBGPOS4678 bool find_script_index (hb_tag_t tag, unsigned int *index) const
4679 { return get_script_list ().find_index (tag, index); }
4680
get_feature_countOT::GSUBGPOS4681 unsigned int get_feature_count () const
4682 { return get_feature_list ().len; }
get_feature_tagOT::GSUBGPOS4683 hb_tag_t get_feature_tag (unsigned int i) const
4684 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : get_feature_list ().get_tag (i); }
get_feature_tagsOT::GSUBGPOS4685 unsigned int get_feature_tags (unsigned int start_offset,
4686 unsigned int *feature_count /* IN/OUT */,
4687 hb_tag_t *feature_tags /* OUT */) const
4688 { return get_feature_list ().get_tags (start_offset, feature_count, feature_tags); }
get_featureOT::GSUBGPOS4689 const Feature& get_feature (unsigned int i) const
4690 { return get_feature_list ()[i]; }
find_feature_indexOT::GSUBGPOS4691 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
4692 { return get_feature_list ().find_index (tag, index); }
4693
find_variations_indexOT::GSUBGPOS4694 bool find_variations_index (const int *coords, unsigned int num_coords,
4695 unsigned int *index,
4696 ItemVarStoreInstancer *instancer) const
4697 {
4698 #ifdef HB_NO_VAR
4699 *index = FeatureVariations::NOT_FOUND_INDEX;
4700 return false;
4701 #endif
4702 return get_feature_variations ().find_index (coords, num_coords, index, instancer);
4703 }
get_feature_variationOT::GSUBGPOS4704 const Feature& get_feature_variation (unsigned int feature_index,
4705 unsigned int variations_index) const
4706 {
4707 #ifndef HB_NO_VAR
4708 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
4709 u.version.to_int () >= 0x00010001u)
4710 {
4711 const Feature *feature = get_feature_variations ().find_substitute (variations_index,
4712 feature_index);
4713 if (feature)
4714 return *feature;
4715 }
4716 #endif
4717 return get_feature (feature_index);
4718 }
4719
feature_variation_collect_lookupsOT::GSUBGPOS4720 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
4721 const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
4722 hb_set_t *lookup_indexes /* OUT */) const
4723 {
4724 #ifndef HB_NO_VAR
4725 get_feature_variations ().collect_lookups (feature_indexes, feature_record_cond_idx_map, lookup_indexes);
4726 #endif
4727 }
4728
4729 #ifndef HB_NO_VAR
collect_feature_substitutes_with_variationsOT::GSUBGPOS4730 void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
4731 { get_feature_variations ().collect_feature_substitutes_with_variations (c); }
4732 #endif
4733
4734 template <typename TLookup>
closure_lookupsOT::GSUBGPOS4735 void closure_lookups (hb_face_t *face,
4736 const hb_set_t *glyphs,
4737 hb_set_t *lookup_indexes /* IN/OUT */) const
4738 {
4739 hb_set_t visited_lookups, inactive_lookups;
4740 hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
4741
4742 c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>);
4743
4744 for (unsigned lookup_index : *lookup_indexes)
4745 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
4746
4747 hb_set_union (lookup_indexes, &visited_lookups);
4748 hb_set_subtract (lookup_indexes, &inactive_lookups);
4749 }
4750
prune_langsysOT::GSUBGPOS4751 void prune_langsys (const hb_map_t *duplicate_feature_map,
4752 const hb_set_t *layout_scripts,
4753 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map,
4754 hb_set_t *new_feature_indexes /* OUT */) const
4755 {
4756 hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
4757
4758 unsigned count = get_script_count ();
4759 for (unsigned script_index = 0; script_index < count; script_index++)
4760 {
4761 const Tag& tag = get_script_tag (script_index);
4762 if (!layout_scripts->has (tag)) continue;
4763 const Script& s = get_script (script_index);
4764 s.prune_langsys (&c, script_index);
4765 }
4766 }
4767
prune_featuresOT::GSUBGPOS4768 void prune_features (const hb_map_t *lookup_indices, /* IN */
4769 const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* IN */
4770 const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, /* IN */
4771 hb_set_t *feature_indices /* IN/OUT */) const
4772 {
4773 #ifndef HB_NO_VAR
4774 // This is the set of feature indices which have alternate versions defined
4775 // if the FeatureVariation's table and the alternate version(s) intersect the
4776 // set of lookup indices.
4777 hb_set_t alternate_feature_indices;
4778 get_feature_variations ().closure_features (lookup_indices, feature_record_cond_idx_map, &alternate_feature_indices);
4779 if (unlikely (alternate_feature_indices.in_error()))
4780 {
4781 feature_indices->err ();
4782 return;
4783 }
4784 #endif
4785
4786 for (unsigned i : hb_iter (feature_indices))
4787 {
4788 hb_tag_t tag = get_feature_tag (i);
4789 if (tag == HB_TAG ('p', 'r', 'e', 'f'))
4790 // Note: Never ever drop feature 'pref', even if it's empty.
4791 // HarfBuzz chooses shaper for Khmer based on presence of this
4792 // feature. See thread at:
4793 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
4794 continue;
4795
4796
4797 const Feature *f = &(get_feature (i));
4798 const Feature** p = nullptr;
4799 if (feature_substitutes_map->has (i, &p))
4800 f = *p;
4801
4802 if (!f->featureParams.is_null () &&
4803 tag == HB_TAG ('s', 'i', 'z', 'e'))
4804 continue;
4805
4806 if (!f->intersects_lookup_indexes (lookup_indices)
4807 #ifndef HB_NO_VAR
4808 && !alternate_feature_indices.has (i)
4809 #endif
4810 )
4811 feature_indices->del (i);
4812 }
4813 }
4814
collect_name_idsOT::GSUBGPOS4815 void collect_name_ids (const hb_map_t *feature_index_map,
4816 hb_set_t *nameids_to_retain /* OUT */) const
4817 {
4818 unsigned count = get_feature_count ();
4819 for (unsigned i = 0 ; i < count; i++)
4820 {
4821 if (!feature_index_map->has (i)) continue;
4822 hb_tag_t tag = get_feature_tag (i);
4823 get_feature (i).collect_name_ids (tag, nameids_to_retain);
4824 }
4825 }
4826
4827 template <typename T>
4828 struct accelerator_t
4829 {
accelerator_tOT::GSUBGPOS::accelerator_t4830 accelerator_t (hb_face_t *face)
4831 {
4832 hb_sanitize_context_t sc;
4833 sc.lazy_some_gpos = true;
4834 this->table = sc.reference_table<T> (face);
4835
4836 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
4837 {
4838 hb_blob_destroy (this->table.get_blob ());
4839 this->table = hb_blob_get_empty ();
4840 }
4841
4842 this->lookup_count = table->get_lookup_count ();
4843
4844 this->accels = (hb_atomic_ptr_t<hb_ot_layout_lookup_accelerator_t> *) hb_calloc (this->lookup_count, sizeof (*accels));
4845 if (unlikely (!this->accels))
4846 {
4847 this->lookup_count = 0;
4848 this->table.destroy ();
4849 this->table = hb_blob_get_empty ();
4850 }
4851 }
~accelerator_tOT::GSUBGPOS::accelerator_t4852 ~accelerator_t ()
4853 {
4854 for (unsigned int i = 0; i < this->lookup_count; i++)
4855 hb_free (this->accels[i]);
4856 hb_free (this->accels);
4857 this->table.destroy ();
4858 }
4859
get_blobOT::GSUBGPOS::accelerator_t4860 hb_blob_t *get_blob () const { return table.get_blob (); }
4861
get_accelOT::GSUBGPOS::accelerator_t4862 hb_ot_layout_lookup_accelerator_t *get_accel (unsigned lookup_index) const
4863 {
4864 if (unlikely (lookup_index >= lookup_count)) return nullptr;
4865
4866 retry:
4867 auto *accel = accels[lookup_index].get_acquire ();
4868 if (unlikely (!accel))
4869 {
4870 accel = hb_ot_layout_lookup_accelerator_t::create (table->get_lookup (lookup_index));
4871 if (unlikely (!accel))
4872 return nullptr;
4873
4874 if (unlikely (!accels[lookup_index].cmpexch (nullptr, accel)))
4875 {
4876 hb_free (accel);
4877 goto retry;
4878 }
4879 }
4880
4881 return accel;
4882 }
4883
4884 hb_blob_ptr_t<T> table;
4885 unsigned int lookup_count;
4886 hb_atomic_ptr_t<hb_ot_layout_lookup_accelerator_t> *accels;
4887 };
4888
4889 protected:
4890 union {
4891 FixedVersion<> version; /* Version identifier */
4892 GSUBGPOSVersion1_2<SmallTypes> version1;
4893 #ifndef HB_NO_BEYOND_64K
4894 GSUBGPOSVersion1_2<MediumTypes> version2;
4895 #endif
4896 } u;
4897 public:
4898 DEFINE_SIZE_MIN (4);
4899 };
4900
4901
4902 } /* namespace OT */
4903
4904
4905 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */
4906