1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
31
32 #include "hb.hh"
33 #include "hb-buffer.hh"
34 #include "hb-map.hh"
35 #include "hb-set.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
39
40
41 namespace OT {
42
43
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
46 {
47 template <typename T>
dispatchOT::hb_intersects_context_t48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
default_return_valueOT::hb_intersects_context_t49 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_intersects_context_t50 bool stop_sublookup_iteration (return_t r) const { return r; }
51
52 const hb_set_t *glyphs;
53
hb_intersects_context_tOT::hb_intersects_context_t54 hb_intersects_context_t (const hb_set_t *glyphs_) :
55 glyphs (glyphs_) {}
56 };
57
58 struct hb_have_non_1to1_context_t :
59 hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
60 {
61 template <typename T>
dispatchOT::hb_have_non_1to1_context_t62 return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
default_return_valueOT::hb_have_non_1to1_context_t63 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_have_non_1to1_context_t64 bool stop_sublookup_iteration (return_t r) const { return r; }
65 };
66
67 struct hb_closure_context_t :
68 hb_dispatch_context_t<hb_closure_context_t>
69 {
70 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
71 template <typename T>
dispatchOT::hb_closure_context_t72 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_context_t73 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_context_t74 void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
75 {
76 if (unlikely (nesting_level_left == 0 || !recurse_func))
77 return;
78
79 nesting_level_left--;
80 recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
81 nesting_level_left++;
82 }
83
reset_lookup_visit_countOT::hb_closure_context_t84 void reset_lookup_visit_count ()
85 { lookup_count = 0; }
86
lookup_limit_exceededOT::hb_closure_context_t87 bool lookup_limit_exceeded ()
88 { return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
89
should_visit_lookupOT::hb_closure_context_t90 bool should_visit_lookup (unsigned int lookup_index)
91 {
92 if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
93 return false;
94
95 if (is_lookup_done (lookup_index))
96 return false;
97
98 return true;
99 }
100
is_lookup_doneOT::hb_closure_context_t101 bool is_lookup_done (unsigned int lookup_index)
102 {
103 if (unlikely (done_lookups_glyph_count->in_error () ||
104 done_lookups_glyph_set->in_error ()))
105 return true;
106
107 /* Have we visited this lookup with the current set of glyphs? */
108 if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
109 {
110 done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
111
112 if (!done_lookups_glyph_set->has (lookup_index))
113 {
114 if (unlikely (!done_lookups_glyph_set->set (lookup_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
115 return true;
116 }
117
118 hb_set_clear (done_lookups_glyph_set->get (lookup_index));
119 }
120
121 hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
122 if (unlikely (covered_glyph_set->in_error ()))
123 return true;
124 if (parent_active_glyphs ().is_subset (*covered_glyph_set))
125 return true;
126
127 covered_glyph_set->union_ (parent_active_glyphs ());
128 return false;
129 }
130
previous_parent_active_glyphsOT::hb_closure_context_t131 const hb_set_t& previous_parent_active_glyphs () {
132 if (active_glyphs_stack.length <= 1)
133 return *glyphs;
134
135 return active_glyphs_stack[active_glyphs_stack.length - 2];
136 }
137
parent_active_glyphsOT::hb_closure_context_t138 const hb_set_t& parent_active_glyphs ()
139 {
140 if (!active_glyphs_stack)
141 return *glyphs;
142
143 return active_glyphs_stack.tail ();
144 }
145
push_cur_active_glyphsOT::hb_closure_context_t146 hb_set_t& push_cur_active_glyphs ()
147 {
148 return *active_glyphs_stack.push ();
149 }
150
pop_cur_done_glyphsOT::hb_closure_context_t151 bool pop_cur_done_glyphs ()
152 {
153 if (!active_glyphs_stack)
154 return false;
155
156 active_glyphs_stack.pop ();
157 return true;
158 }
159
160 hb_face_t *face;
161 hb_set_t *glyphs;
162 hb_set_t output[1];
163 hb_vector_t<hb_set_t> active_glyphs_stack;
164 recurse_func_t recurse_func = nullptr;
165 unsigned int nesting_level_left;
166
hb_closure_context_tOT::hb_closure_context_t167 hb_closure_context_t (hb_face_t *face_,
168 hb_set_t *glyphs_,
169 hb_map_t *done_lookups_glyph_count_,
170 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set_,
171 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
172 face (face_),
173 glyphs (glyphs_),
174 nesting_level_left (nesting_level_left_),
175 done_lookups_glyph_count (done_lookups_glyph_count_),
176 done_lookups_glyph_set (done_lookups_glyph_set_)
177 {}
178
~hb_closure_context_tOT::hb_closure_context_t179 ~hb_closure_context_t () { flush (); }
180
set_recurse_funcOT::hb_closure_context_t181 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
182
flushOT::hb_closure_context_t183 void flush ()
184 {
185 output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID); /* Remove invalid glyphs. */
186 glyphs->union_ (*output);
187 output->clear ();
188 active_glyphs_stack.pop ();
189 active_glyphs_stack.reset ();
190 }
191
192 private:
193 hb_map_t *done_lookups_glyph_count;
194 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set;
195 unsigned int lookup_count = 0;
196 };
197
198
199
200 struct hb_closure_lookups_context_t :
201 hb_dispatch_context_t<hb_closure_lookups_context_t>
202 {
203 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
204 template <typename T>
dispatchOT::hb_closure_lookups_context_t205 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_lookups_context_t206 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_lookups_context_t207 void recurse (unsigned lookup_index)
208 {
209 if (unlikely (nesting_level_left == 0 || !recurse_func))
210 return;
211
212 /* Return if new lookup was recursed to before. */
213 if (lookup_limit_exceeded ()
214 || visited_lookups->in_error ()
215 || visited_lookups->has (lookup_index))
216 // Don't increment lookup count here, that will be done in the call to closure_lookups()
217 // made by recurse_func.
218 return;
219
220 nesting_level_left--;
221 recurse_func (this, lookup_index);
222 nesting_level_left++;
223 }
224
set_lookup_visitedOT::hb_closure_lookups_context_t225 void set_lookup_visited (unsigned lookup_index)
226 { visited_lookups->add (lookup_index); }
227
set_lookup_inactiveOT::hb_closure_lookups_context_t228 void set_lookup_inactive (unsigned lookup_index)
229 { inactive_lookups->add (lookup_index); }
230
lookup_limit_exceededOT::hb_closure_lookups_context_t231 bool lookup_limit_exceeded ()
232 {
233 bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
234 if (ret)
235 DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
236 return ret; }
237
is_lookup_visitedOT::hb_closure_lookups_context_t238 bool is_lookup_visited (unsigned lookup_index)
239 {
240 if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
241 {
242 DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
243 lookup_count, lookup_index);
244 return true;
245 }
246
247 if (unlikely (visited_lookups->in_error ()))
248 return true;
249
250 return visited_lookups->has (lookup_index);
251 }
252
253 hb_face_t *face;
254 const hb_set_t *glyphs;
255 recurse_func_t recurse_func;
256 unsigned int nesting_level_left;
257
hb_closure_lookups_context_tOT::hb_closure_lookups_context_t258 hb_closure_lookups_context_t (hb_face_t *face_,
259 const hb_set_t *glyphs_,
260 hb_set_t *visited_lookups_,
261 hb_set_t *inactive_lookups_,
262 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
263 face (face_),
264 glyphs (glyphs_),
265 recurse_func (nullptr),
266 nesting_level_left (nesting_level_left_),
267 visited_lookups (visited_lookups_),
268 inactive_lookups (inactive_lookups_),
269 lookup_count (0) {}
270
set_recurse_funcOT::hb_closure_lookups_context_t271 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
272
273 private:
274 hb_set_t *visited_lookups;
275 hb_set_t *inactive_lookups;
276 unsigned int lookup_count;
277 };
278
279 struct hb_would_apply_context_t :
280 hb_dispatch_context_t<hb_would_apply_context_t, bool>
281 {
282 template <typename T>
dispatchOT::hb_would_apply_context_t283 return_t dispatch (const T &obj) { return obj.would_apply (this); }
default_return_valueOT::hb_would_apply_context_t284 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_would_apply_context_t285 bool stop_sublookup_iteration (return_t r) const { return r; }
286
287 hb_face_t *face;
288 const hb_codepoint_t *glyphs;
289 unsigned int len;
290 bool zero_context;
291
hb_would_apply_context_tOT::hb_would_apply_context_t292 hb_would_apply_context_t (hb_face_t *face_,
293 const hb_codepoint_t *glyphs_,
294 unsigned int len_,
295 bool zero_context_) :
296 face (face_),
297 glyphs (glyphs_),
298 len (len_),
299 zero_context (zero_context_) {}
300 };
301
302 struct hb_collect_glyphs_context_t :
303 hb_dispatch_context_t<hb_collect_glyphs_context_t>
304 {
305 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
306 template <typename T>
dispatchOT::hb_collect_glyphs_context_t307 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_glyphs_context_t308 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_collect_glyphs_context_t309 void recurse (unsigned int lookup_index)
310 {
311 if (unlikely (nesting_level_left == 0 || !recurse_func))
312 return;
313
314 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
315 * past the previous check. For GSUB, we only want to collect the output
316 * glyphs in the recursion. If output is not requested, we can go home now.
317 *
318 * Note further, that the above is not exactly correct. A recursed lookup
319 * is allowed to match input that is not matched in the context, but that's
320 * not how most fonts are built. It's possible to relax that and recurse
321 * with all sets here if it proves to be an issue.
322 */
323
324 if (output == hb_set_get_empty ())
325 return;
326
327 /* Return if new lookup was recursed to before. */
328 if (recursed_lookups->has (lookup_index))
329 return;
330
331 hb_set_t *old_before = before;
332 hb_set_t *old_input = input;
333 hb_set_t *old_after = after;
334 before = input = after = hb_set_get_empty ();
335
336 nesting_level_left--;
337 recurse_func (this, lookup_index);
338 nesting_level_left++;
339
340 before = old_before;
341 input = old_input;
342 after = old_after;
343
344 recursed_lookups->add (lookup_index);
345 }
346
347 hb_face_t *face;
348 hb_set_t *before;
349 hb_set_t *input;
350 hb_set_t *after;
351 hb_set_t *output;
352 recurse_func_t recurse_func;
353 hb_set_t *recursed_lookups;
354 unsigned int nesting_level_left;
355
hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t356 hb_collect_glyphs_context_t (hb_face_t *face_,
357 hb_set_t *glyphs_before, /* OUT. May be NULL */
358 hb_set_t *glyphs_input, /* OUT. May be NULL */
359 hb_set_t *glyphs_after, /* OUT. May be NULL */
360 hb_set_t *glyphs_output, /* OUT. May be NULL */
361 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
362 face (face_),
363 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
364 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
365 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
366 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
367 recurse_func (nullptr),
368 recursed_lookups (hb_set_create ()),
369 nesting_level_left (nesting_level_left_) {}
~hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t370 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
371
set_recurse_funcOT::hb_collect_glyphs_context_t372 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
373 };
374
375
376
377 template <typename set_t>
378 struct hb_collect_coverage_context_t :
379 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
380 {
381 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
382 template <typename T>
dispatchOT::hb_collect_coverage_context_t383 return_t dispatch (const T &obj) { return obj.get_coverage (); }
default_return_valueOT::hb_collect_coverage_context_t384 static return_t default_return_value () { return Null (Coverage); }
stop_sublookup_iterationOT::hb_collect_coverage_context_t385 bool stop_sublookup_iteration (return_t r) const
386 {
387 r.collect_coverage (set);
388 return false;
389 }
390
hb_collect_coverage_context_tOT::hb_collect_coverage_context_t391 hb_collect_coverage_context_t (set_t *set_) :
392 set (set_) {}
393
394 set_t *set;
395 };
396
397 struct hb_ot_apply_context_t :
398 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
399 {
400 struct matcher_t
401 {
matcher_tOT::hb_ot_apply_context_t::matcher_t402 matcher_t () :
403 lookup_props (0),
404 mask (-1),
405 ignore_zwnj (false),
406 ignore_zwj (false),
407 per_syllable (false),
408 syllable {0},
409 match_func (nullptr),
410 match_data (nullptr) {}
411
412 typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
413
set_ignore_zwnjOT::hb_ot_apply_context_t::matcher_t414 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
set_ignore_zwjOT::hb_ot_apply_context_t::matcher_t415 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
set_lookup_propsOT::hb_ot_apply_context_t::matcher_t416 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
set_maskOT::hb_ot_apply_context_t::matcher_t417 void set_mask (hb_mask_t mask_) { mask = mask_; }
set_per_syllableOT::hb_ot_apply_context_t::matcher_t418 void set_per_syllable (bool per_syllable_) { per_syllable = per_syllable_; }
set_syllableOT::hb_ot_apply_context_t::matcher_t419 void set_syllable (uint8_t syllable_) { syllable = per_syllable ? syllable_ : 0; }
set_match_funcOT::hb_ot_apply_context_t::matcher_t420 void set_match_func (match_func_t match_func_,
421 const void *match_data_)
422 { match_func = match_func_; match_data = match_data_; }
423
424 enum may_match_t {
425 MATCH_NO,
426 MATCH_YES,
427 MATCH_MAYBE
428 };
429
may_matchOT::hb_ot_apply_context_t::matcher_t430 may_match_t may_match (hb_glyph_info_t &info,
431 hb_codepoint_t glyph_data) const
432 {
433 if (!(info.mask & mask) ||
434 (syllable && syllable != info.syllable ()))
435 return MATCH_NO;
436
437 if (match_func)
438 return match_func (info, glyph_data, match_data) ? MATCH_YES : MATCH_NO;
439
440 return MATCH_MAYBE;
441 }
442
443 enum may_skip_t {
444 SKIP_NO,
445 SKIP_YES,
446 SKIP_MAYBE
447 };
448
may_skipOT::hb_ot_apply_context_t::matcher_t449 may_skip_t may_skip (const hb_ot_apply_context_t *c,
450 const hb_glyph_info_t &info) const
451 {
452 if (!c->check_glyph_property (&info, lookup_props))
453 return SKIP_YES;
454
455 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
456 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
457 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
458 return SKIP_MAYBE;
459
460 return SKIP_NO;
461 }
462
463 protected:
464 unsigned int lookup_props;
465 hb_mask_t mask;
466 bool ignore_zwnj;
467 bool ignore_zwj;
468 bool per_syllable;
469 uint8_t syllable;
470 match_func_t match_func;
471 const void *match_data;
472 };
473
474 struct skipping_iterator_t
475 {
initOT::hb_ot_apply_context_t::skipping_iterator_t476 void init (hb_ot_apply_context_t *c_, bool context_match = false)
477 {
478 c = c_;
479 match_glyph_data16 = nullptr;
480 #ifndef HB_NO_BEYOND_64K
481 match_glyph_data24 = nullptr;
482 #endif
483 matcher.set_match_func (nullptr, nullptr);
484 matcher.set_lookup_props (c->lookup_props);
485 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
486 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
487 /* Ignore ZWJ if we are matching context, or asked to. */
488 matcher.set_ignore_zwj (context_match || c->auto_zwj);
489 matcher.set_mask (context_match ? -1 : c->lookup_mask);
490 matcher.set_per_syllable (c->per_syllable);
491 }
set_lookup_propsOT::hb_ot_apply_context_t::skipping_iterator_t492 void set_lookup_props (unsigned int lookup_props)
493 {
494 matcher.set_lookup_props (lookup_props);
495 }
set_match_funcOT::hb_ot_apply_context_t::skipping_iterator_t496 void set_match_func (matcher_t::match_func_t match_func_,
497 const void *match_data_)
498 {
499 matcher.set_match_func (match_func_, match_data_);
500 }
set_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t501 void set_glyph_data (const HBUINT16 glyph_data[])
502 {
503 match_glyph_data16 = glyph_data;
504 #ifndef HB_NO_BEYOND_64K
505 match_glyph_data24 = nullptr;
506 #endif
507 }
508 #ifndef HB_NO_BEYOND_64K
set_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t509 void set_glyph_data (const HBUINT24 glyph_data[])
510 {
511 match_glyph_data16 = nullptr;
512 match_glyph_data24 = glyph_data;
513 }
514 #endif
515
resetOT::hb_ot_apply_context_t::skipping_iterator_t516 void reset (unsigned int start_index_,
517 unsigned int num_items_)
518 {
519 idx = start_index_;
520 num_items = num_items_;
521 end = c->buffer->len;
522 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
523 }
524
rejectOT::hb_ot_apply_context_t::skipping_iterator_t525 void reject ()
526 {
527 num_items++;
528 backup_glyph_data ();
529 }
530
531 matcher_t::may_skip_t
may_skipOT::hb_ot_apply_context_t::skipping_iterator_t532 may_skip (const hb_glyph_info_t &info) const
533 { return matcher.may_skip (c, info); }
534
nextOT::hb_ot_apply_context_t::skipping_iterator_t535 bool next (unsigned *unsafe_to = nullptr)
536 {
537 assert (num_items > 0);
538 /* The alternate condition below is faster at string boundaries,
539 * but produces subpar "unsafe-to-concat" values. */
540 signed stop = (signed) end - (signed) num_items;
541 if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
542 stop = (signed) end - 1;
543 while ((signed) idx < stop)
544 {
545 idx++;
546 hb_glyph_info_t &info = c->buffer->info[idx];
547
548 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
549 if (unlikely (skip == matcher_t::SKIP_YES))
550 continue;
551
552 matcher_t::may_match_t match = matcher.may_match (info, get_glyph_data ());
553 if (match == matcher_t::MATCH_YES ||
554 (match == matcher_t::MATCH_MAYBE &&
555 skip == matcher_t::SKIP_NO))
556 {
557 num_items--;
558 advance_glyph_data ();
559 return true;
560 }
561
562 if (skip == matcher_t::SKIP_NO)
563 {
564 if (unsafe_to)
565 *unsafe_to = idx + 1;
566 return false;
567 }
568 }
569 if (unsafe_to)
570 *unsafe_to = end;
571 return false;
572 }
prevOT::hb_ot_apply_context_t::skipping_iterator_t573 bool prev (unsigned *unsafe_from = nullptr)
574 {
575 assert (num_items > 0);
576 /* The alternate condition below is faster at string boundaries,
577 * but produces subpar "unsafe-to-concat" values. */
578 unsigned stop = num_items - 1;
579 if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
580 stop = 1 - 1;
581 while (idx > stop)
582 {
583 idx--;
584 hb_glyph_info_t &info = c->buffer->out_info[idx];
585
586 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
587 if (unlikely (skip == matcher_t::SKIP_YES))
588 continue;
589
590 matcher_t::may_match_t match = matcher.may_match (info, get_glyph_data ());
591 if (match == matcher_t::MATCH_YES ||
592 (match == matcher_t::MATCH_MAYBE &&
593 skip == matcher_t::SKIP_NO))
594 {
595 num_items--;
596 advance_glyph_data ();
597 return true;
598 }
599
600 if (skip == matcher_t::SKIP_NO)
601 {
602 if (unsafe_from)
603 *unsafe_from = hb_max (1u, idx) - 1u;
604 return false;
605 }
606 }
607 if (unsafe_from)
608 *unsafe_from = 0;
609 return false;
610 }
611
612 hb_codepoint_t
get_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t613 get_glyph_data ()
614 {
615 if (match_glyph_data16) return *match_glyph_data16;
616 #ifndef HB_NO_BEYOND_64K
617 else
618 if (match_glyph_data24) return *match_glyph_data24;
619 #endif
620 return 0;
621 }
622 void
advance_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t623 advance_glyph_data ()
624 {
625 if (match_glyph_data16) match_glyph_data16++;
626 #ifndef HB_NO_BEYOND_64K
627 else
628 if (match_glyph_data24) match_glyph_data24++;
629 #endif
630 }
631 void
backup_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t632 backup_glyph_data ()
633 {
634 if (match_glyph_data16) match_glyph_data16--;
635 #ifndef HB_NO_BEYOND_64K
636 else
637 if (match_glyph_data24) match_glyph_data24--;
638 #endif
639 }
640
641 unsigned int idx;
642 protected:
643 hb_ot_apply_context_t *c;
644 matcher_t matcher;
645 const HBUINT16 *match_glyph_data16;
646 #ifndef HB_NO_BEYOND_64K
647 const HBUINT24 *match_glyph_data24;
648 #endif
649
650 unsigned int num_items;
651 unsigned int end;
652 };
653
654
get_nameOT::hb_ot_apply_context_t655 const char *get_name () { return "APPLY"; }
656 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
657 template <typename T>
dispatchOT::hb_ot_apply_context_t658 return_t dispatch (const T &obj) { return obj.apply (this); }
default_return_valueOT::hb_ot_apply_context_t659 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_ot_apply_context_t660 bool stop_sublookup_iteration (return_t r) const { return r; }
recurseOT::hb_ot_apply_context_t661 return_t recurse (unsigned int sub_lookup_index)
662 {
663 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
664 {
665 buffer->shaping_failed = true;
666 return default_return_value ();
667 }
668
669 nesting_level_left--;
670 bool ret = recurse_func (this, sub_lookup_index);
671 nesting_level_left++;
672 return ret;
673 }
674
675 skipping_iterator_t iter_input, iter_context;
676
677 unsigned int table_index; /* GSUB/GPOS */
678 hb_font_t *font;
679 hb_face_t *face;
680 hb_buffer_t *buffer;
681 recurse_func_t recurse_func = nullptr;
682 const GDEF &gdef;
683 const VariationStore &var_store;
684 VariationStore::cache_t *var_store_cache;
685 hb_set_digest_t digest;
686
687 hb_direction_t direction;
688 hb_mask_t lookup_mask = 1;
689 unsigned int lookup_index = (unsigned) -1;
690 unsigned int lookup_props = 0;
691 unsigned int nesting_level_left = HB_MAX_NESTING_LEVEL;
692
693 bool has_glyph_classes;
694 bool auto_zwnj = true;
695 bool auto_zwj = true;
696 bool per_syllable = false;
697 bool random = false;
698 uint32_t random_state = 1;
699 unsigned new_syllables = (unsigned) -1;
700
hb_ot_apply_context_tOT::hb_ot_apply_context_t701 hb_ot_apply_context_t (unsigned int table_index_,
702 hb_font_t *font_,
703 hb_buffer_t *buffer_) :
704 table_index (table_index_),
705 font (font_), face (font->face), buffer (buffer_),
706 gdef (
707 #ifndef HB_NO_OT_LAYOUT
708 *face->table.GDEF->table
709 #else
710 Null (GDEF)
711 #endif
712 ),
713 var_store (gdef.get_var_store ()),
714 var_store_cache (
715 #ifndef HB_NO_VAR
716 table_index == 1 && font->num_coords ? var_store.create_cache () : nullptr
717 #else
718 nullptr
719 #endif
720 ),
721 digest (buffer_->digest ()),
722 direction (buffer_->props.direction),
723 has_glyph_classes (gdef.has_glyph_classes ())
724 { init_iters (); }
725
~hb_ot_apply_context_tOT::hb_ot_apply_context_t726 ~hb_ot_apply_context_t ()
727 {
728 #ifndef HB_NO_VAR
729 VariationStore::destroy_cache (var_store_cache);
730 #endif
731 }
732
init_itersOT::hb_ot_apply_context_t733 void init_iters ()
734 {
735 iter_input.init (this, false);
736 iter_context.init (this, true);
737 }
738
set_lookup_maskOT::hb_ot_apply_context_t739 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; init_iters (); }
set_auto_zwjOT::hb_ot_apply_context_t740 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
set_auto_zwnjOT::hb_ot_apply_context_t741 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
set_per_syllableOT::hb_ot_apply_context_t742 void set_per_syllable (bool per_syllable_) { per_syllable = per_syllable_; init_iters (); }
set_randomOT::hb_ot_apply_context_t743 void set_random (bool random_) { random = random_; }
set_recurse_funcOT::hb_ot_apply_context_t744 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
set_lookup_indexOT::hb_ot_apply_context_t745 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
set_lookup_propsOT::hb_ot_apply_context_t746 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
747
random_numberOT::hb_ot_apply_context_t748 uint32_t random_number ()
749 {
750 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
751 random_state = random_state * 48271 % 2147483647;
752 return random_state;
753 }
754
match_properties_markOT::hb_ot_apply_context_t755 bool match_properties_mark (hb_codepoint_t glyph,
756 unsigned int glyph_props,
757 unsigned int match_props) const
758 {
759 /* If using mark filtering sets, the high short of
760 * match_props has the set index.
761 */
762 if (match_props & LookupFlag::UseMarkFilteringSet)
763 return gdef.mark_set_covers (match_props >> 16, glyph);
764
765 /* The second byte of match_props has the meaning
766 * "ignore marks of attachment type different than
767 * the attachment type specified."
768 */
769 if (match_props & LookupFlag::MarkAttachmentType)
770 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
771
772 return true;
773 }
774
check_glyph_propertyOT::hb_ot_apply_context_t775 bool check_glyph_property (const hb_glyph_info_t *info,
776 unsigned int match_props) const
777 {
778 hb_codepoint_t glyph = info->codepoint;
779 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
780
781 /* Not covered, if, for example, glyph class is ligature and
782 * match_props includes LookupFlags::IgnoreLigatures
783 */
784 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
785 return false;
786
787 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
788 return match_properties_mark (glyph, glyph_props, match_props);
789
790 return true;
791 }
792
_set_glyph_classOT::hb_ot_apply_context_t793 void _set_glyph_class (hb_codepoint_t glyph_index,
794 unsigned int class_guess = 0,
795 bool ligature = false,
796 bool component = false)
797 {
798 digest.add (glyph_index);
799
800 if (new_syllables != (unsigned) -1)
801 buffer->cur().syllable() = new_syllables;
802
803 unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
804 props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
805 if (ligature)
806 {
807 props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
808 /* In the only place that the MULTIPLIED bit is used, Uniscribe
809 * seems to only care about the "last" transformation between
810 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
811 * and ligate again, it forgives the multiplication and acts as
812 * if only ligation happened. As such, clear MULTIPLIED bit.
813 */
814 props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
815 }
816 if (component)
817 props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
818 if (likely (has_glyph_classes))
819 {
820 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
821 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef.get_glyph_props (glyph_index));
822 }
823 else if (class_guess)
824 {
825 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
826 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess);
827 }
828 else
829 _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
830 }
831
replace_glyphOT::hb_ot_apply_context_t832 void replace_glyph (hb_codepoint_t glyph_index)
833 {
834 _set_glyph_class (glyph_index);
835 (void) buffer->replace_glyph (glyph_index);
836 }
replace_glyph_inplaceOT::hb_ot_apply_context_t837 void replace_glyph_inplace (hb_codepoint_t glyph_index)
838 {
839 _set_glyph_class (glyph_index);
840 buffer->cur().codepoint = glyph_index;
841 }
replace_glyph_with_ligatureOT::hb_ot_apply_context_t842 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
843 unsigned int class_guess)
844 {
845 _set_glyph_class (glyph_index, class_guess, true);
846 (void) buffer->replace_glyph (glyph_index);
847 }
output_glyph_for_componentOT::hb_ot_apply_context_t848 void output_glyph_for_component (hb_codepoint_t glyph_index,
849 unsigned int class_guess)
850 {
851 _set_glyph_class (glyph_index, class_guess, false, true);
852 (void) buffer->output_glyph (glyph_index);
853 }
854 };
855
856
857 struct hb_accelerate_subtables_context_t :
858 hb_dispatch_context_t<hb_accelerate_subtables_context_t>
859 {
860 template <typename Type>
apply_toOT::hb_accelerate_subtables_context_t861 static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c)
862 {
863 const Type *typed_obj = (const Type *) obj;
864 return typed_obj->apply (c);
865 }
866
867 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
868 template <typename T>
apply_cached_OT::hb_accelerate_subtables_context_t869 static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply (c, true) )
870 template <typename T>
871 static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
872 template <typename Type>
873 static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c)
874 {
875 const Type *typed_obj = (const Type *) obj;
876 return apply_cached_ (typed_obj, c, hb_prioritize);
877 }
878
879 template <typename T>
cache_func_OT::hb_accelerate_subtables_context_t880 static inline auto cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
881 template <typename T>
882 static inline bool cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
883 template <typename Type>
cache_func_toOT::hb_accelerate_subtables_context_t884 static inline bool cache_func_to (const void *obj, hb_ot_apply_context_t *c, bool enter)
885 {
886 const Type *typed_obj = (const Type *) obj;
887 return cache_func_ (typed_obj, c, enter, hb_prioritize);
888 }
889 #endif
890
891 typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c);
892 typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter);
893
894 struct hb_applicable_t
895 {
896 friend struct hb_accelerate_subtables_context_t;
897 friend struct hb_ot_layout_lookup_accelerator_t;
898
899 template <typename T>
initOT::hb_accelerate_subtables_context_t::hb_applicable_t900 void init (const T &obj_,
901 hb_apply_func_t apply_func_
902 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
903 , hb_apply_func_t apply_cached_func_
904 , hb_cache_func_t cache_func_
905 #endif
906 )
907 {
908 obj = &obj_;
909 apply_func = apply_func_;
910 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
911 apply_cached_func = apply_cached_func_;
912 cache_func = cache_func_;
913 #endif
914 digest.init ();
915 obj_.get_coverage ().collect_coverage (&digest);
916 }
917
applyOT::hb_accelerate_subtables_context_t::hb_applicable_t918 bool apply (hb_ot_apply_context_t *c) const
919 {
920 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
921 }
922 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
apply_cachedOT::hb_accelerate_subtables_context_t::hb_applicable_t923 bool apply_cached (hb_ot_apply_context_t *c) const
924 {
925 return digest.may_have (c->buffer->cur().codepoint) && apply_cached_func (obj, c);
926 }
cache_enterOT::hb_accelerate_subtables_context_t::hb_applicable_t927 bool cache_enter (hb_ot_apply_context_t *c) const
928 {
929 return cache_func (obj, c, true);
930 }
cache_leaveOT::hb_accelerate_subtables_context_t::hb_applicable_t931 void cache_leave (hb_ot_apply_context_t *c) const
932 {
933 cache_func (obj, c, false);
934 }
935 #endif
936
937 private:
938 const void *obj;
939 hb_apply_func_t apply_func;
940 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
941 hb_apply_func_t apply_cached_func;
942 hb_cache_func_t cache_func;
943 #endif
944 hb_set_digest_t digest;
945 };
946
947 typedef hb_vector_t<hb_applicable_t> array_t;
948
949 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
950 template <typename T>
cache_costOT::hb_accelerate_subtables_context_t951 auto cache_cost (const T &obj, hb_priority<1>) HB_AUTO_RETURN ( obj.cache_cost () )
952 template <typename T>
953 auto cache_cost (const T &obj, hb_priority<0>) HB_AUTO_RETURN ( 0u )
954 #endif
955
956 /* Dispatch interface. */
957 template <typename T>
958 return_t dispatch (const T &obj)
959 {
960 hb_applicable_t entry;
961
962 entry.init (obj,
963 apply_to<T>
964 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
965 , apply_cached_to<T>
966 , cache_func_to<T>
967 #endif
968 );
969
970 array.push (entry);
971
972 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
973 /* Cache handling
974 *
975 * We allow one subtable from each lookup to use a cache. The assumption
976 * being that multiple subtables of the same lookup cannot use a cache
977 * because the resources they would use will collide. As such, we ask
978 * each subtable to tell us how much it costs (which a cache would avoid),
979 * and we allocate the cache opportunity to the costliest subtable.
980 */
981 unsigned cost = cache_cost (obj, hb_prioritize);
982 if (cost > cache_user_cost && !array.in_error ())
983 {
984 cache_user_idx = array.length - 1;
985 cache_user_cost = cost;
986 }
987 #endif
988
989 return hb_empty_t ();
990 }
default_return_valueOT::hb_accelerate_subtables_context_t991 static return_t default_return_value () { return hb_empty_t (); }
992
hb_accelerate_subtables_context_tOT::hb_accelerate_subtables_context_t993 hb_accelerate_subtables_context_t (array_t &array_) :
994 array (array_) {}
995
996 array_t &array;
997
998 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
999 unsigned cache_user_idx = (unsigned) -1;
1000 unsigned cache_user_cost = 0;
1001 #endif
1002 };
1003
1004
1005 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data, void *cache);
1006 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache);
1007 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, unsigned value, const void *data);
1008 typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
1009
1010 struct ContextClosureFuncs
1011 {
1012 intersects_func_t intersects;
1013 intersected_glyphs_func_t intersected_glyphs;
1014 };
1015 struct ContextCollectGlyphsFuncs
1016 {
1017 collect_glyphs_func_t collect;
1018 };
1019 struct ContextApplyFuncs
1020 {
1021 match_func_t match;
1022 };
1023 struct ChainContextApplyFuncs
1024 {
1025 match_func_t match[3];
1026 };
1027
1028
intersects_glyph(const hb_set_t * glyphs,unsigned value,const void * data HB_UNUSED,void * cache HB_UNUSED)1029 static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED, void *cache HB_UNUSED)
1030 {
1031 return glyphs->has (value);
1032 }
intersects_class(const hb_set_t * glyphs,unsigned value,const void * data,void * cache)1033 static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data, void *cache)
1034 {
1035 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1036 hb_map_t *map = (hb_map_t *) cache;
1037
1038 hb_codepoint_t *cached_v;
1039 if (map->has (value, &cached_v))
1040 return *cached_v;
1041
1042 bool v = class_def.intersects_class (glyphs, value);
1043 map->set (value, v);
1044
1045 return v;
1046 }
intersects_coverage(const hb_set_t * glyphs,unsigned value,const void * data,void * cache HB_UNUSED)1047 static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data, void *cache HB_UNUSED)
1048 {
1049 Offset16To<Coverage> coverage;
1050 coverage = value;
1051 return (data+coverage).intersects (glyphs);
1052 }
1053
1054
intersected_glyph(const hb_set_t * glyphs HB_UNUSED,const void * data,unsigned value,hb_set_t * intersected_glyphs,HB_UNUSED void * cache)1055 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
1056 {
1057 unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
1058 intersected_glyphs->add (g);
1059 }
1060
1061 using intersected_class_cache_t = hb_hashmap_t<unsigned, hb_set_t>;
1062
intersected_class_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs,void * cache)1063 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache)
1064 {
1065 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1066
1067 intersected_class_cache_t *map = (intersected_class_cache_t *) cache;
1068
1069 hb_set_t *cached_v;
1070 if (map->has (value, &cached_v))
1071 {
1072 intersected_glyphs->union_ (*cached_v);
1073 return;
1074 }
1075
1076 hb_set_t v;
1077 class_def.intersected_class_glyphs (glyphs, value, &v);
1078
1079 intersected_glyphs->union_ (v);
1080
1081 map->set (value, std::move (v));
1082 }
1083
intersected_coverage_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs,HB_UNUSED void * cache)1084 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
1085 {
1086 Offset16To<Coverage> coverage;
1087 coverage = value;
1088 (data+coverage).intersect_set (*glyphs, *intersected_glyphs);
1089 }
1090
1091
1092 template <typename HBUINT>
array_is_subset_of(const hb_set_t * glyphs,unsigned int count,const HBUINT values[],intersects_func_t intersects_func,const void * intersects_data,void * cache)1093 static inline bool array_is_subset_of (const hb_set_t *glyphs,
1094 unsigned int count,
1095 const HBUINT values[],
1096 intersects_func_t intersects_func,
1097 const void *intersects_data,
1098 void *cache)
1099 {
1100 for (const auto &_ : + hb_iter (values, count))
1101 if (!intersects_func (glyphs, _, intersects_data, cache)) return false;
1102 return true;
1103 }
1104
1105
collect_glyph(hb_set_t * glyphs,unsigned value,const void * data HB_UNUSED)1106 static inline void collect_glyph (hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED)
1107 {
1108 glyphs->add (value);
1109 }
collect_class(hb_set_t * glyphs,unsigned value,const void * data)1110 static inline void collect_class (hb_set_t *glyphs, unsigned value, const void *data)
1111 {
1112 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1113 class_def.collect_class (glyphs, value);
1114 }
collect_coverage(hb_set_t * glyphs,unsigned value,const void * data)1115 static inline void collect_coverage (hb_set_t *glyphs, unsigned value, const void *data)
1116 {
1117 Offset16To<Coverage> coverage;
1118 coverage = value;
1119 (data+coverage).collect_coverage (glyphs);
1120 }
1121 template <typename HBUINT>
collect_array(hb_collect_glyphs_context_t * c HB_UNUSED,hb_set_t * glyphs,unsigned int count,const HBUINT values[],collect_glyphs_func_t collect_func,const void * collect_data)1122 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
1123 hb_set_t *glyphs,
1124 unsigned int count,
1125 const HBUINT values[],
1126 collect_glyphs_func_t collect_func,
1127 const void *collect_data)
1128 {
1129 return
1130 + hb_iter (values, count)
1131 | hb_apply ([&] (const HBUINT &_) { collect_func (glyphs, _, collect_data); })
1132 ;
1133 }
1134
1135
match_glyph(hb_glyph_info_t & info,unsigned value,const void * data HB_UNUSED)1136 static inline bool match_glyph (hb_glyph_info_t &info, unsigned value, const void *data HB_UNUSED)
1137 {
1138 return info.codepoint == value;
1139 }
match_class(hb_glyph_info_t & info,unsigned value,const void * data)1140 static inline bool match_class (hb_glyph_info_t &info, unsigned value, const void *data)
1141 {
1142 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1143 return class_def.get_class (info.codepoint) == value;
1144 }
match_class_cached(hb_glyph_info_t & info,unsigned value,const void * data)1145 static inline bool match_class_cached (hb_glyph_info_t &info, unsigned value, const void *data)
1146 {
1147 unsigned klass = info.syllable();
1148 if (klass < 255)
1149 return klass == value;
1150 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1151 klass = class_def.get_class (info.codepoint);
1152 if (likely (klass < 255))
1153 info.syllable() = klass;
1154 return klass == value;
1155 }
match_coverage(hb_glyph_info_t & info,unsigned value,const void * data)1156 static inline bool match_coverage (hb_glyph_info_t &info, unsigned value, const void *data)
1157 {
1158 Offset16To<Coverage> coverage;
1159 coverage = value;
1160 return (data+coverage).get_coverage (info.codepoint) != NOT_COVERED;
1161 }
1162
1163 template <typename HBUINT>
would_match_input(hb_would_apply_context_t * c,unsigned int count,const HBUINT input[],match_func_t match_func,const void * match_data)1164 static inline bool would_match_input (hb_would_apply_context_t *c,
1165 unsigned int count, /* Including the first glyph (not matched) */
1166 const HBUINT input[], /* Array of input values--start with second glyph */
1167 match_func_t match_func,
1168 const void *match_data)
1169 {
1170 if (count != c->len)
1171 return false;
1172
1173 for (unsigned int i = 1; i < count; i++)
1174 {
1175 hb_glyph_info_t info;
1176 info.codepoint = c->glyphs[i];
1177 if (likely (!match_func (info, input[i - 1], match_data)))
1178 return false;
1179 }
1180
1181 return true;
1182 }
1183 template <typename HBUINT>
match_input(hb_ot_apply_context_t * c,unsigned int count,const HBUINT input[],match_func_t match_func,const void * match_data,unsigned int * end_position,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int * p_total_component_count=nullptr)1184 static inline bool match_input (hb_ot_apply_context_t *c,
1185 unsigned int count, /* Including the first glyph (not matched) */
1186 const HBUINT input[], /* Array of input values--start with second glyph */
1187 match_func_t match_func,
1188 const void *match_data,
1189 unsigned int *end_position,
1190 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
1191 unsigned int *p_total_component_count = nullptr)
1192 {
1193 TRACE_APPLY (nullptr);
1194
1195 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
1196
1197 hb_buffer_t *buffer = c->buffer;
1198
1199 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1200 skippy_iter.reset (buffer->idx, count - 1);
1201 skippy_iter.set_match_func (match_func, match_data);
1202 skippy_iter.set_glyph_data (input);
1203
1204 /*
1205 * This is perhaps the trickiest part of OpenType... Remarks:
1206 *
1207 * - If all components of the ligature were marks, we call this a mark ligature.
1208 *
1209 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
1210 * it as a ligature glyph.
1211 *
1212 * - Ligatures cannot be formed across glyphs attached to different components
1213 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
1214 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
1215 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
1216 * There are a couple of exceptions to this:
1217 *
1218 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
1219 * assuming that the font designer knows what they are doing (otherwise it can
1220 * break Indic stuff when a matra wants to ligate with a conjunct,
1221 *
1222 * o If two marks want to ligate and they belong to different components of the
1223 * same ligature glyph, and said ligature glyph is to be ignored according to
1224 * mark-filtering rules, then allow.
1225 * https://github.com/harfbuzz/harfbuzz/issues/545
1226 */
1227
1228 unsigned int total_component_count = 0;
1229 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1230
1231 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1232 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1233
1234 enum {
1235 LIGBASE_NOT_CHECKED,
1236 LIGBASE_MAY_NOT_SKIP,
1237 LIGBASE_MAY_SKIP
1238 } ligbase = LIGBASE_NOT_CHECKED;
1239
1240 match_positions[0] = buffer->idx;
1241 for (unsigned int i = 1; i < count; i++)
1242 {
1243 unsigned unsafe_to;
1244 if (!skippy_iter.next (&unsafe_to))
1245 {
1246 *end_position = unsafe_to;
1247 return_trace (false);
1248 }
1249
1250 match_positions[i] = skippy_iter.idx;
1251
1252 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
1253 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
1254
1255 if (first_lig_id && first_lig_comp)
1256 {
1257 /* If first component was attached to a previous ligature component,
1258 * all subsequent components should be attached to the same ligature
1259 * component, otherwise we shouldn't ligate them... */
1260 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1261 {
1262 /* ...unless, we are attached to a base ligature and that base
1263 * ligature is ignorable. */
1264 if (ligbase == LIGBASE_NOT_CHECKED)
1265 {
1266 bool found = false;
1267 const auto *out = buffer->out_info;
1268 unsigned int j = buffer->out_len;
1269 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1270 {
1271 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1272 {
1273 j--;
1274 found = true;
1275 break;
1276 }
1277 j--;
1278 }
1279
1280 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1281 ligbase = LIGBASE_MAY_SKIP;
1282 else
1283 ligbase = LIGBASE_MAY_NOT_SKIP;
1284 }
1285
1286 if (ligbase == LIGBASE_MAY_NOT_SKIP)
1287 return_trace (false);
1288 }
1289 }
1290 else
1291 {
1292 /* If first component was NOT attached to a previous ligature component,
1293 * all subsequent components should also NOT be attached to any ligature
1294 * component, unless they are attached to the first component itself! */
1295 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1296 return_trace (false);
1297 }
1298
1299 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1300 }
1301
1302 *end_position = skippy_iter.idx + 1;
1303
1304 if (p_total_component_count)
1305 *p_total_component_count = total_component_count;
1306
1307 return_trace (true);
1308 }
ligate_input(hb_ot_apply_context_t * c,unsigned int count,const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int match_end,hb_codepoint_t lig_glyph,unsigned int total_component_count)1309 static inline bool ligate_input (hb_ot_apply_context_t *c,
1310 unsigned int count, /* Including the first glyph */
1311 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1312 unsigned int match_end,
1313 hb_codepoint_t lig_glyph,
1314 unsigned int total_component_count)
1315 {
1316 TRACE_APPLY (nullptr);
1317
1318 hb_buffer_t *buffer = c->buffer;
1319
1320 buffer->merge_clusters (buffer->idx, match_end);
1321
1322 /* - If a base and one or more marks ligate, consider that as a base, NOT
1323 * ligature, such that all following marks can still attach to it.
1324 * https://github.com/harfbuzz/harfbuzz/issues/1109
1325 *
1326 * - If all components of the ligature were marks, we call this a mark ligature.
1327 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1328 * the ligature to keep its old ligature id. This will allow it to attach to
1329 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1330 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1331 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1332 * later, we don't want them to lose their ligature id/component, otherwise
1333 * GPOS will fail to correctly position the mark ligature on top of the
1334 * LAM,LAM,HEH ligature. See:
1335 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1336 *
1337 * - If a ligature is formed of components that some of which are also ligatures
1338 * themselves, and those ligature components had marks attached to *their*
1339 * components, we have to attach the marks to the new ligature component
1340 * positions! Now *that*'s tricky! And these marks may be following the
1341 * last component of the whole sequence, so we should loop forward looking
1342 * for them and update them.
1343 *
1344 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1345 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1346 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1347 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1348 * the new ligature with a component value of 2.
1349 *
1350 * This in fact happened to a font... See:
1351 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1352 */
1353
1354 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1355 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1356 for (unsigned int i = 1; i < count; i++)
1357 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1358 {
1359 is_base_ligature = false;
1360 is_mark_ligature = false;
1361 break;
1362 }
1363 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1364
1365 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1366 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1367 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1368 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1369 unsigned int components_so_far = last_num_components;
1370
1371 if (is_ligature)
1372 {
1373 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1374 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1375 {
1376 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1377 }
1378 }
1379 c->replace_glyph_with_ligature (lig_glyph, klass);
1380
1381 for (unsigned int i = 1; i < count; i++)
1382 {
1383 while (buffer->idx < match_positions[i] && buffer->successful)
1384 {
1385 if (is_ligature)
1386 {
1387 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1388 if (this_comp == 0)
1389 this_comp = last_num_components;
1390 unsigned int new_lig_comp = components_so_far - last_num_components +
1391 hb_min (this_comp, last_num_components);
1392 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1393 }
1394 (void) buffer->next_glyph ();
1395 }
1396
1397 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1398 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1399 components_so_far += last_num_components;
1400
1401 /* Skip the base glyph */
1402 buffer->idx++;
1403 }
1404
1405 if (!is_mark_ligature && last_lig_id)
1406 {
1407 /* Re-adjust components for any marks following. */
1408 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1409 {
1410 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1411
1412 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1413 if (!this_comp) break;
1414
1415 unsigned new_lig_comp = components_so_far - last_num_components +
1416 hb_min (this_comp, last_num_components);
1417 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1418 }
1419 }
1420 return_trace (true);
1421 }
1422
1423 template <typename HBUINT>
match_backtrack(hb_ot_apply_context_t * c,unsigned int count,const HBUINT backtrack[],match_func_t match_func,const void * match_data,unsigned int * match_start)1424 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1425 unsigned int count,
1426 const HBUINT backtrack[],
1427 match_func_t match_func,
1428 const void *match_data,
1429 unsigned int *match_start)
1430 {
1431 TRACE_APPLY (nullptr);
1432
1433 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1434 skippy_iter.reset (c->buffer->backtrack_len (), count);
1435 skippy_iter.set_match_func (match_func, match_data);
1436 skippy_iter.set_glyph_data (backtrack);
1437
1438 for (unsigned int i = 0; i < count; i++)
1439 {
1440 unsigned unsafe_from;
1441 if (!skippy_iter.prev (&unsafe_from))
1442 {
1443 *match_start = unsafe_from;
1444 return_trace (false);
1445 }
1446 }
1447
1448 *match_start = skippy_iter.idx;
1449 return_trace (true);
1450 }
1451
1452 template <typename HBUINT>
match_lookahead(hb_ot_apply_context_t * c,unsigned int count,const HBUINT lookahead[],match_func_t match_func,const void * match_data,unsigned int start_index,unsigned int * end_index)1453 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1454 unsigned int count,
1455 const HBUINT lookahead[],
1456 match_func_t match_func,
1457 const void *match_data,
1458 unsigned int start_index,
1459 unsigned int *end_index)
1460 {
1461 TRACE_APPLY (nullptr);
1462
1463 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1464 skippy_iter.reset (start_index - 1, count);
1465 skippy_iter.set_match_func (match_func, match_data);
1466 skippy_iter.set_glyph_data (lookahead);
1467
1468 for (unsigned int i = 0; i < count; i++)
1469 {
1470 unsigned unsafe_to;
1471 if (!skippy_iter.next (&unsafe_to))
1472 {
1473 *end_index = unsafe_to;
1474 return_trace (false);
1475 }
1476 }
1477
1478 *end_index = skippy_iter.idx + 1;
1479 return_trace (true);
1480 }
1481
1482
1483
1484 struct LookupRecord
1485 {
serializeOT::LookupRecord1486 bool serialize (hb_serialize_context_t *c,
1487 const hb_map_t *lookup_map) const
1488 {
1489 TRACE_SERIALIZE (this);
1490 auto *out = c->embed (*this);
1491 if (unlikely (!out)) return_trace (false);
1492
1493 return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
1494 }
1495
sanitizeOT::LookupRecord1496 bool sanitize (hb_sanitize_context_t *c) const
1497 {
1498 TRACE_SANITIZE (this);
1499 return_trace (c->check_struct (this));
1500 }
1501
1502 HBUINT16 sequenceIndex; /* Index into current glyph
1503 * sequence--first glyph = 0 */
1504 HBUINT16 lookupListIndex; /* Lookup to apply to that
1505 * position--zero--based */
1506 public:
1507 DEFINE_SIZE_STATIC (4);
1508 };
1509
serialize_lookuprecord_array(hb_serialize_context_t * c,const hb_array_t<const LookupRecord> lookupRecords,const hb_map_t * lookup_map)1510 static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
1511 const hb_array_t<const LookupRecord> lookupRecords,
1512 const hb_map_t *lookup_map)
1513 {
1514 unsigned count = 0;
1515 for (const LookupRecord& r : lookupRecords)
1516 {
1517 if (!lookup_map->has (r.lookupListIndex))
1518 continue;
1519
1520 if (!r.serialize (c, lookup_map))
1521 return 0;
1522
1523 count++;
1524 }
1525 return count;
1526 }
1527
1528 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1529
1530 template <typename HBUINT>
context_closure_recurse_lookups(hb_closure_context_t * c,unsigned inputCount,const HBUINT input[],unsigned lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextFormat context_format,const void * data,intersected_glyphs_func_t intersected_glyphs_func,void * cache)1531 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1532 unsigned inputCount, const HBUINT input[],
1533 unsigned lookupCount,
1534 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1535 unsigned value,
1536 ContextFormat context_format,
1537 const void *data,
1538 intersected_glyphs_func_t intersected_glyphs_func,
1539 void *cache)
1540 {
1541 hb_set_t *covered_seq_indicies = hb_set_create ();
1542 for (unsigned int i = 0; i < lookupCount; i++)
1543 {
1544 unsigned seqIndex = lookupRecord[i].sequenceIndex;
1545 if (seqIndex >= inputCount) continue;
1546
1547 bool has_pos_glyphs = false;
1548 hb_set_t pos_glyphs;
1549
1550 if (!hb_set_has (covered_seq_indicies, seqIndex))
1551 {
1552 has_pos_glyphs = true;
1553 if (seqIndex == 0)
1554 {
1555 switch (context_format) {
1556 case ContextFormat::SimpleContext:
1557 pos_glyphs.add (value);
1558 break;
1559 case ContextFormat::ClassBasedContext:
1560 intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs, cache);
1561 break;
1562 case ContextFormat::CoverageBasedContext:
1563 pos_glyphs.set (c->parent_active_glyphs ());
1564 break;
1565 }
1566 }
1567 else
1568 {
1569 const void *input_data = input;
1570 unsigned input_value = seqIndex - 1;
1571 if (context_format != ContextFormat::SimpleContext)
1572 {
1573 input_data = data;
1574 input_value = input[seqIndex - 1];
1575 }
1576
1577 intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs, cache);
1578 }
1579 }
1580
1581 covered_seq_indicies->add (seqIndex);
1582 if (has_pos_glyphs) {
1583 c->push_cur_active_glyphs () = std::move (pos_glyphs);
1584 } else {
1585 c->push_cur_active_glyphs ().set (*c->glyphs);
1586 }
1587
1588 unsigned endIndex = inputCount;
1589 if (context_format == ContextFormat::CoverageBasedContext)
1590 endIndex += 1;
1591
1592 c->recurse (lookupRecord[i].lookupListIndex, covered_seq_indicies, seqIndex, endIndex);
1593
1594 c->pop_cur_done_glyphs ();
1595 }
1596
1597 hb_set_destroy (covered_seq_indicies);
1598 }
1599
1600 template <typename context_t>
recurse_lookups(context_t * c,unsigned int lookupCount,const LookupRecord lookupRecord[])1601 static inline void recurse_lookups (context_t *c,
1602 unsigned int lookupCount,
1603 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1604 {
1605 for (unsigned int i = 0; i < lookupCount; i++)
1606 c->recurse (lookupRecord[i].lookupListIndex);
1607 }
1608
apply_lookup(hb_ot_apply_context_t * c,unsigned int count,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned int match_end)1609 static inline void apply_lookup (hb_ot_apply_context_t *c,
1610 unsigned int count, /* Including the first glyph */
1611 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1612 unsigned int lookupCount,
1613 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1614 unsigned int match_end)
1615 {
1616 hb_buffer_t *buffer = c->buffer;
1617 int end;
1618
1619 /* All positions are distance from beginning of *output* buffer.
1620 * Adjust. */
1621 {
1622 unsigned int bl = buffer->backtrack_len ();
1623 end = bl + match_end - buffer->idx;
1624
1625 int delta = bl - buffer->idx;
1626 /* Convert positions to new indexing. */
1627 for (unsigned int j = 0; j < count; j++)
1628 match_positions[j] += delta;
1629 }
1630
1631 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1632 {
1633 unsigned int idx = lookupRecord[i].sequenceIndex;
1634 if (idx >= count)
1635 continue;
1636
1637 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1638
1639 /* This can happen if earlier recursed lookups deleted many entries. */
1640 if (unlikely (match_positions[idx] >= orig_len))
1641 continue;
1642
1643 if (unlikely (!buffer->move_to (match_positions[idx])))
1644 break;
1645
1646 if (unlikely (buffer->max_ops <= 0))
1647 break;
1648
1649 if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
1650 {
1651 if (buffer->have_output)
1652 c->buffer->sync_so_far ();
1653 c->buffer->message (c->font,
1654 "recursing to lookup %u at %d",
1655 (unsigned) lookupRecord[i].lookupListIndex,
1656 buffer->idx);
1657 }
1658
1659 if (!c->recurse (lookupRecord[i].lookupListIndex))
1660 continue;
1661
1662 if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
1663 {
1664 if (buffer->have_output)
1665 c->buffer->sync_so_far ();
1666 c->buffer->message (c->font,
1667 "recursed to lookup %u",
1668 (unsigned) lookupRecord[i].lookupListIndex);
1669 }
1670
1671 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1672 int delta = new_len - orig_len;
1673
1674 if (!delta)
1675 continue;
1676
1677 /* Recursed lookup changed buffer len. Adjust.
1678 *
1679 * TODO:
1680 *
1681 * Right now, if buffer length increased by n, we assume n new glyphs
1682 * were added right after the current position, and if buffer length
1683 * was decreased by n, we assume n match positions after the current
1684 * one where removed. The former (buffer length increased) case is
1685 * fine, but the decrease case can be improved in at least two ways,
1686 * both of which are significant:
1687 *
1688 * - If recursed-to lookup is MultipleSubst and buffer length
1689 * decreased, then it's current match position that was deleted,
1690 * NOT the one after it.
1691 *
1692 * - If buffer length was decreased by n, it does not necessarily
1693 * mean that n match positions where removed, as there recursed-to
1694 * lookup might had a different LookupFlag. Here's a constructed
1695 * case of that:
1696 * https://github.com/harfbuzz/harfbuzz/discussions/3538
1697 *
1698 * It should be possible to construct tests for both of these cases.
1699 */
1700
1701 end += delta;
1702 if (end < int (match_positions[idx]))
1703 {
1704 /* End might end up being smaller than match_positions[idx] if the recursed
1705 * lookup ended up removing many items.
1706 * Just never rewind end beyond start of current position, since that is
1707 * not possible in the recursed lookup. Also adjust delta as such.
1708 *
1709 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496
1710 * https://github.com/harfbuzz/harfbuzz/issues/1611
1711 */
1712 delta += match_positions[idx] - end;
1713 end = match_positions[idx];
1714 }
1715
1716 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1717
1718 if (delta > 0)
1719 {
1720 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1721 break;
1722 }
1723 else
1724 {
1725 /* NOTE: delta is non-positive. */
1726 delta = hb_max (delta, (int) next - (int) count);
1727 next -= delta;
1728 }
1729
1730 /* Shift! */
1731 memmove (match_positions + next + delta, match_positions + next,
1732 (count - next) * sizeof (match_positions[0]));
1733 next += delta;
1734 count += delta;
1735
1736 /* Fill in new entries. */
1737 for (unsigned int j = idx + 1; j < next; j++)
1738 match_positions[j] = match_positions[j - 1] + 1;
1739
1740 /* And fixup the rest. */
1741 for (; next < count; next++)
1742 match_positions[next] += delta;
1743 }
1744
1745 (void) buffer->move_to (end);
1746 }
1747
1748
1749
1750 /* Contextual lookups */
1751
1752 struct ContextClosureLookupContext
1753 {
1754 ContextClosureFuncs funcs;
1755 ContextFormat context_format;
1756 const void *intersects_data;
1757 void *intersects_cache;
1758 void *intersected_glyphs_cache;
1759 };
1760
1761 struct ContextCollectGlyphsLookupContext
1762 {
1763 ContextCollectGlyphsFuncs funcs;
1764 const void *collect_data;
1765 };
1766
1767 struct ContextApplyLookupContext
1768 {
1769 ContextApplyFuncs funcs;
1770 const void *match_data;
1771 };
1772
1773 template <typename HBUINT>
context_intersects(const hb_set_t * glyphs,unsigned int inputCount,const HBUINT input[],ContextClosureLookupContext & lookup_context)1774 static inline bool context_intersects (const hb_set_t *glyphs,
1775 unsigned int inputCount, /* Including the first glyph (not matched) */
1776 const HBUINT input[], /* Array of input values--start with second glyph */
1777 ContextClosureLookupContext &lookup_context)
1778 {
1779 return array_is_subset_of (glyphs,
1780 inputCount ? inputCount - 1 : 0, input,
1781 lookup_context.funcs.intersects,
1782 lookup_context.intersects_data,
1783 lookup_context.intersects_cache);
1784 }
1785
1786 template <typename HBUINT>
context_closure_lookup(hb_closure_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextClosureLookupContext & lookup_context)1787 static inline void context_closure_lookup (hb_closure_context_t *c,
1788 unsigned int inputCount, /* Including the first glyph (not matched) */
1789 const HBUINT input[], /* Array of input values--start with second glyph */
1790 unsigned int lookupCount,
1791 const LookupRecord lookupRecord[],
1792 unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1793 ContextClosureLookupContext &lookup_context)
1794 {
1795 if (context_intersects (c->glyphs,
1796 inputCount, input,
1797 lookup_context))
1798 context_closure_recurse_lookups (c,
1799 inputCount, input,
1800 lookupCount, lookupRecord,
1801 value,
1802 lookup_context.context_format,
1803 lookup_context.intersects_data,
1804 lookup_context.funcs.intersected_glyphs,
1805 lookup_context.intersected_glyphs_cache);
1806 }
1807
1808 template <typename HBUINT>
context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextCollectGlyphsLookupContext & lookup_context)1809 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1810 unsigned int inputCount, /* Including the first glyph (not matched) */
1811 const HBUINT input[], /* Array of input values--start with second glyph */
1812 unsigned int lookupCount,
1813 const LookupRecord lookupRecord[],
1814 ContextCollectGlyphsLookupContext &lookup_context)
1815 {
1816 collect_array (c, c->input,
1817 inputCount ? inputCount - 1 : 0, input,
1818 lookup_context.funcs.collect, lookup_context.collect_data);
1819 recurse_lookups (c,
1820 lookupCount, lookupRecord);
1821 }
1822
1823 template <typename HBUINT>
context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,const ContextApplyLookupContext & lookup_context)1824 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1825 unsigned int inputCount, /* Including the first glyph (not matched) */
1826 const HBUINT input[], /* Array of input values--start with second glyph */
1827 unsigned int lookupCount HB_UNUSED,
1828 const LookupRecord lookupRecord[] HB_UNUSED,
1829 const ContextApplyLookupContext &lookup_context)
1830 {
1831 return would_match_input (c,
1832 inputCount, input,
1833 lookup_context.funcs.match, lookup_context.match_data);
1834 }
1835
1836 template <typename HBUINT>
context_apply_lookup(hb_ot_apply_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],const ContextApplyLookupContext & lookup_context)1837 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1838 unsigned int inputCount, /* Including the first glyph (not matched) */
1839 const HBUINT input[], /* Array of input values--start with second glyph */
1840 unsigned int lookupCount,
1841 const LookupRecord lookupRecord[],
1842 const ContextApplyLookupContext &lookup_context)
1843 {
1844 unsigned match_end = 0;
1845 unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
1846 if (match_input (c,
1847 inputCount, input,
1848 lookup_context.funcs.match, lookup_context.match_data,
1849 &match_end, match_positions))
1850 {
1851 c->buffer->unsafe_to_break (c->buffer->idx, match_end);
1852 apply_lookup (c,
1853 inputCount, match_positions,
1854 lookupCount, lookupRecord,
1855 match_end);
1856 return true;
1857 }
1858 else
1859 {
1860 c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
1861 return false;
1862 }
1863 }
1864
1865 template <typename Types>
1866 struct Rule
1867 {
intersectsOT::Rule1868 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1869 {
1870 return context_intersects (glyphs,
1871 inputCount, inputZ.arrayZ,
1872 lookup_context);
1873 }
1874
closureOT::Rule1875 void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
1876 {
1877 if (unlikely (c->lookup_limit_exceeded ())) return;
1878
1879 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1880 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1881 context_closure_lookup (c,
1882 inputCount, inputZ.arrayZ,
1883 lookupCount, lookupRecord.arrayZ,
1884 value, lookup_context);
1885 }
1886
closure_lookupsOT::Rule1887 void closure_lookups (hb_closure_lookups_context_t *c,
1888 ContextClosureLookupContext &lookup_context) const
1889 {
1890 if (unlikely (c->lookup_limit_exceeded ())) return;
1891 if (!intersects (c->glyphs, lookup_context)) return;
1892
1893 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1894 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1895 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1896 }
1897
collect_glyphsOT::Rule1898 void collect_glyphs (hb_collect_glyphs_context_t *c,
1899 ContextCollectGlyphsLookupContext &lookup_context) const
1900 {
1901 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1902 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1903 context_collect_glyphs_lookup (c,
1904 inputCount, inputZ.arrayZ,
1905 lookupCount, lookupRecord.arrayZ,
1906 lookup_context);
1907 }
1908
would_applyOT::Rule1909 bool would_apply (hb_would_apply_context_t *c,
1910 const ContextApplyLookupContext &lookup_context) const
1911 {
1912 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1913 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1914 return context_would_apply_lookup (c,
1915 inputCount, inputZ.arrayZ,
1916 lookupCount, lookupRecord.arrayZ,
1917 lookup_context);
1918 }
1919
applyOT::Rule1920 bool apply (hb_ot_apply_context_t *c,
1921 const ContextApplyLookupContext &lookup_context) const
1922 {
1923 TRACE_APPLY (this);
1924 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1925 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1926 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1927 }
1928
serializeOT::Rule1929 bool serialize (hb_serialize_context_t *c,
1930 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1931 const hb_map_t *lookup_map) const
1932 {
1933 TRACE_SERIALIZE (this);
1934 auto *out = c->start_embed (this);
1935 if (unlikely (!c->extend_min (out))) return_trace (false);
1936
1937 out->inputCount = inputCount;
1938 const auto input = inputZ.as_array (inputCount - 1);
1939 for (const auto org : input)
1940 {
1941 HBUINT16 d;
1942 d = input_mapping->get (org);
1943 c->copy (d);
1944 }
1945
1946 const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1947 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1948
1949 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
1950 return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
1951 }
1952
subsetOT::Rule1953 bool subset (hb_subset_context_t *c,
1954 const hb_map_t *lookup_map,
1955 const hb_map_t *klass_map = nullptr) const
1956 {
1957 TRACE_SUBSET (this);
1958 if (unlikely (!inputCount)) return_trace (false);
1959 const auto input = inputZ.as_array (inputCount - 1);
1960
1961 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1962 if (!hb_all (input, mapping)) return_trace (false);
1963 return_trace (serialize (c->serializer, mapping, lookup_map));
1964 }
1965
1966 public:
sanitizeOT::Rule1967 bool sanitize (hb_sanitize_context_t *c) const
1968 {
1969 TRACE_SANITIZE (this);
1970 return_trace (inputCount.sanitize (c) &&
1971 lookupCount.sanitize (c) &&
1972 c->check_range (inputZ.arrayZ,
1973 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1974 LookupRecord::static_size * lookupCount));
1975 }
1976
1977 protected:
1978 HBUINT16 inputCount; /* Total number of glyphs in input
1979 * glyph sequence--includes the first
1980 * glyph */
1981 HBUINT16 lookupCount; /* Number of LookupRecords */
1982 UnsizedArrayOf<typename Types::HBUINT>
1983 inputZ; /* Array of match inputs--start with
1984 * second glyph */
1985 /*UnsizedArrayOf<LookupRecord>
1986 lookupRecordX;*/ /* Array of LookupRecords--in
1987 * design order */
1988 public:
1989 DEFINE_SIZE_ARRAY (4, inputZ);
1990 };
1991
1992 template <typename Types>
1993 struct RuleSet
1994 {
1995 using Rule = OT::Rule<Types>;
1996
intersectsOT::RuleSet1997 bool intersects (const hb_set_t *glyphs,
1998 ContextClosureLookupContext &lookup_context) const
1999 {
2000 return
2001 + hb_iter (rule)
2002 | hb_map (hb_add (this))
2003 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
2004 | hb_any
2005 ;
2006 }
2007
closureOT::RuleSet2008 void closure (hb_closure_context_t *c, unsigned value,
2009 ContextClosureLookupContext &lookup_context) const
2010 {
2011 if (unlikely (c->lookup_limit_exceeded ())) return;
2012
2013 return
2014 + hb_iter (rule)
2015 | hb_map (hb_add (this))
2016 | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
2017 ;
2018 }
2019
closure_lookupsOT::RuleSet2020 void closure_lookups (hb_closure_lookups_context_t *c,
2021 ContextClosureLookupContext &lookup_context) const
2022 {
2023 if (unlikely (c->lookup_limit_exceeded ())) return;
2024 + hb_iter (rule)
2025 | hb_map (hb_add (this))
2026 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
2027 ;
2028 }
2029
collect_glyphsOT::RuleSet2030 void collect_glyphs (hb_collect_glyphs_context_t *c,
2031 ContextCollectGlyphsLookupContext &lookup_context) const
2032 {
2033 return
2034 + hb_iter (rule)
2035 | hb_map (hb_add (this))
2036 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
2037 ;
2038 }
2039
would_applyOT::RuleSet2040 bool would_apply (hb_would_apply_context_t *c,
2041 const ContextApplyLookupContext &lookup_context) const
2042 {
2043 return
2044 + hb_iter (rule)
2045 | hb_map (hb_add (this))
2046 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
2047 | hb_any
2048 ;
2049 }
2050
applyOT::RuleSet2051 bool apply (hb_ot_apply_context_t *c,
2052 const ContextApplyLookupContext &lookup_context) const
2053 {
2054 TRACE_APPLY (this);
2055 return_trace (
2056 + hb_iter (rule)
2057 | hb_map (hb_add (this))
2058 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
2059 | hb_any
2060 )
2061 ;
2062 }
2063
subsetOT::RuleSet2064 bool subset (hb_subset_context_t *c,
2065 const hb_map_t *lookup_map,
2066 const hb_map_t *klass_map = nullptr) const
2067 {
2068 TRACE_SUBSET (this);
2069
2070 auto snap = c->serializer->snapshot ();
2071 auto *out = c->serializer->start_embed (*this);
2072 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2073
2074 for (const Offset16To<Rule>& _ : rule)
2075 {
2076 if (!_) continue;
2077 auto o_snap = c->serializer->snapshot ();
2078 auto *o = out->rule.serialize_append (c->serializer);
2079 if (unlikely (!o)) continue;
2080
2081 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
2082 {
2083 out->rule.pop ();
2084 c->serializer->revert (o_snap);
2085 }
2086 }
2087
2088 bool ret = bool (out->rule);
2089 if (!ret) c->serializer->revert (snap);
2090
2091 return_trace (ret);
2092 }
2093
sanitizeOT::RuleSet2094 bool sanitize (hb_sanitize_context_t *c) const
2095 {
2096 TRACE_SANITIZE (this);
2097 return_trace (rule.sanitize (c, this));
2098 }
2099
2100 protected:
2101 Array16OfOffset16To<Rule>
2102 rule; /* Array of Rule tables
2103 * ordered by preference */
2104 public:
2105 DEFINE_SIZE_ARRAY (2, rule);
2106 };
2107
2108
2109 template <typename Types>
2110 struct ContextFormat1_4
2111 {
2112 using RuleSet = OT::RuleSet<Types>;
2113
intersectsOT::ContextFormat1_42114 bool intersects (const hb_set_t *glyphs) const
2115 {
2116 struct ContextClosureLookupContext lookup_context = {
2117 {intersects_glyph, intersected_glyph},
2118 ContextFormat::SimpleContext,
2119 nullptr
2120 };
2121
2122 return
2123 + hb_zip (this+coverage, ruleSet)
2124 | hb_filter (*glyphs, hb_first)
2125 | hb_map (hb_second)
2126 | hb_map (hb_add (this))
2127 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
2128 | hb_any
2129 ;
2130 }
2131
may_have_non_1to1OT::ContextFormat1_42132 bool may_have_non_1to1 () const
2133 { return true; }
2134
closureOT::ContextFormat1_42135 void closure (hb_closure_context_t *c) const
2136 {
2137 hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
2138 get_coverage ().intersect_set (c->previous_parent_active_glyphs (), cur_active_glyphs);
2139
2140 struct ContextClosureLookupContext lookup_context = {
2141 {intersects_glyph, intersected_glyph},
2142 ContextFormat::SimpleContext,
2143 nullptr
2144 };
2145
2146 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2147 | hb_filter ([&] (hb_codepoint_t _) {
2148 return c->previous_parent_active_glyphs ().has (_);
2149 }, hb_first)
2150 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
2151 | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2152 ;
2153
2154 c->pop_cur_done_glyphs ();
2155 }
2156
closure_lookupsOT::ContextFormat1_42157 void closure_lookups (hb_closure_lookups_context_t *c) const
2158 {
2159 struct ContextClosureLookupContext lookup_context = {
2160 {intersects_glyph, nullptr},
2161 ContextFormat::SimpleContext,
2162 nullptr
2163 };
2164
2165 + hb_zip (this+coverage, ruleSet)
2166 | hb_filter (*c->glyphs, hb_first)
2167 | hb_map (hb_second)
2168 | hb_map (hb_add (this))
2169 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
2170 ;
2171 }
2172
collect_variation_indicesOT::ContextFormat1_42173 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2174
collect_glyphsOT::ContextFormat1_42175 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2176 {
2177 (this+coverage).collect_coverage (c->input);
2178
2179 struct ContextCollectGlyphsLookupContext lookup_context = {
2180 {collect_glyph},
2181 nullptr
2182 };
2183
2184 + hb_iter (ruleSet)
2185 | hb_map (hb_add (this))
2186 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2187 ;
2188 }
2189
would_applyOT::ContextFormat1_42190 bool would_apply (hb_would_apply_context_t *c) const
2191 {
2192 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2193 struct ContextApplyLookupContext lookup_context = {
2194 {match_glyph},
2195 nullptr
2196 };
2197 return rule_set.would_apply (c, lookup_context);
2198 }
2199
get_coverageOT::ContextFormat1_42200 const Coverage &get_coverage () const { return this+coverage; }
2201
applyOT::ContextFormat1_42202 bool apply (hb_ot_apply_context_t *c) const
2203 {
2204 TRACE_APPLY (this);
2205 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2206 if (likely (index == NOT_COVERED))
2207 return_trace (false);
2208
2209 const RuleSet &rule_set = this+ruleSet[index];
2210 struct ContextApplyLookupContext lookup_context = {
2211 {match_glyph},
2212 nullptr
2213 };
2214 return_trace (rule_set.apply (c, lookup_context));
2215 }
2216
subsetOT::ContextFormat1_42217 bool subset (hb_subset_context_t *c) const
2218 {
2219 TRACE_SUBSET (this);
2220 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2221 const hb_map_t &glyph_map = *c->plan->glyph_map;
2222
2223 auto *out = c->serializer->start_embed (*this);
2224 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2225 out->format = format;
2226
2227 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2228 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2229 + hb_zip (this+coverage, ruleSet)
2230 | hb_filter (glyphset, hb_first)
2231 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2232 | hb_map (hb_first)
2233 | hb_map (glyph_map)
2234 | hb_sink (new_coverage)
2235 ;
2236
2237 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
2238 return_trace (bool (new_coverage));
2239 }
2240
sanitizeOT::ContextFormat1_42241 bool sanitize (hb_sanitize_context_t *c) const
2242 {
2243 TRACE_SANITIZE (this);
2244 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2245 }
2246
2247 protected:
2248 HBUINT16 format; /* Format identifier--format = 1 */
2249 typename Types::template OffsetTo<Coverage>
2250 coverage; /* Offset to Coverage table--from
2251 * beginning of table */
2252 Array16Of<typename Types::template OffsetTo<RuleSet>>
2253 ruleSet; /* Array of RuleSet tables
2254 * ordered by Coverage Index */
2255 public:
2256 DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
2257 };
2258
2259
2260 template <typename Types>
2261 struct ContextFormat2_5
2262 {
2263 using RuleSet = OT::RuleSet<SmallTypes>;
2264
intersectsOT::ContextFormat2_52265 bool intersects (const hb_set_t *glyphs) const
2266 {
2267 if (!(this+coverage).intersects (glyphs))
2268 return false;
2269
2270 const ClassDef &class_def = this+classDef;
2271
2272 hb_map_t cache;
2273 struct ContextClosureLookupContext lookup_context = {
2274 {intersects_class, nullptr},
2275 ContextFormat::ClassBasedContext,
2276 &class_def,
2277 &cache
2278 };
2279
2280 hb_set_t retained_coverage_glyphs;
2281 (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
2282
2283 hb_set_t coverage_glyph_classes;
2284 class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2285
2286
2287 return
2288 + hb_iter (ruleSet)
2289 | hb_map (hb_add (this))
2290 | hb_enumerate
2291 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2292 { return class_def.intersects_class (glyphs, p.first) &&
2293 coverage_glyph_classes.has (p.first) &&
2294 p.second.intersects (glyphs, lookup_context); })
2295 | hb_any
2296 ;
2297 }
2298
may_have_non_1to1OT::ContextFormat2_52299 bool may_have_non_1to1 () const
2300 { return true; }
2301
closureOT::ContextFormat2_52302 void closure (hb_closure_context_t *c) const
2303 {
2304 if (!(this+coverage).intersects (c->glyphs))
2305 return;
2306
2307 hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
2308 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
2309 cur_active_glyphs);
2310
2311 const ClassDef &class_def = this+classDef;
2312
2313 hb_map_t cache;
2314 intersected_class_cache_t intersected_cache;
2315 struct ContextClosureLookupContext lookup_context = {
2316 {intersects_class, intersected_class_glyphs},
2317 ContextFormat::ClassBasedContext,
2318 &class_def,
2319 &cache,
2320 &intersected_cache
2321 };
2322
2323 + hb_enumerate (ruleSet)
2324 | hb_filter ([&] (unsigned _)
2325 { return class_def.intersects_class (&c->parent_active_glyphs (), _); },
2326 hb_first)
2327 | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<RuleSet>&> _)
2328 {
2329 const RuleSet& rule_set = this+_.second;
2330 rule_set.closure (c, _.first, lookup_context);
2331 })
2332 ;
2333
2334 c->pop_cur_done_glyphs ();
2335 }
2336
closure_lookupsOT::ContextFormat2_52337 void closure_lookups (hb_closure_lookups_context_t *c) const
2338 {
2339 if (!(this+coverage).intersects (c->glyphs))
2340 return;
2341
2342 const ClassDef &class_def = this+classDef;
2343
2344 hb_map_t cache;
2345 struct ContextClosureLookupContext lookup_context = {
2346 {intersects_class, nullptr},
2347 ContextFormat::ClassBasedContext,
2348 &class_def,
2349 &cache
2350 };
2351
2352 + hb_iter (ruleSet)
2353 | hb_map (hb_add (this))
2354 | hb_enumerate
2355 | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2356 { return class_def.intersects_class (c->glyphs, p.first); })
2357 | hb_map (hb_second)
2358 | hb_apply ([&] (const RuleSet & _)
2359 { _.closure_lookups (c, lookup_context); });
2360 }
2361
collect_variation_indicesOT::ContextFormat2_52362 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2363
collect_glyphsOT::ContextFormat2_52364 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2365 {
2366 (this+coverage).collect_coverage (c->input);
2367
2368 const ClassDef &class_def = this+classDef;
2369 struct ContextCollectGlyphsLookupContext lookup_context = {
2370 {collect_class},
2371 &class_def
2372 };
2373
2374 + hb_iter (ruleSet)
2375 | hb_map (hb_add (this))
2376 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2377 ;
2378 }
2379
would_applyOT::ContextFormat2_52380 bool would_apply (hb_would_apply_context_t *c) const
2381 {
2382 const ClassDef &class_def = this+classDef;
2383 unsigned int index = class_def.get_class (c->glyphs[0]);
2384 const RuleSet &rule_set = this+ruleSet[index];
2385 struct ContextApplyLookupContext lookup_context = {
2386 {match_class},
2387 &class_def
2388 };
2389 return rule_set.would_apply (c, lookup_context);
2390 }
2391
get_coverageOT::ContextFormat2_52392 const Coverage &get_coverage () const { return this+coverage; }
2393
cache_costOT::ContextFormat2_52394 unsigned cache_cost () const
2395 {
2396 unsigned c = (this+classDef).cost () * ruleSet.len;
2397 return c >= 4 ? c : 0;
2398 }
cache_funcOT::ContextFormat2_52399 bool cache_func (hb_ot_apply_context_t *c, bool enter) const
2400 {
2401 if (enter)
2402 {
2403 if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
2404 return false;
2405 auto &info = c->buffer->info;
2406 unsigned count = c->buffer->len;
2407 for (unsigned i = 0; i < count; i++)
2408 info[i].syllable() = 255;
2409 c->new_syllables = 255;
2410 return true;
2411 }
2412 else
2413 {
2414 c->new_syllables = (unsigned) -1;
2415 HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
2416 return true;
2417 }
2418 }
2419
applyOT::ContextFormat2_52420 bool apply (hb_ot_apply_context_t *c, bool cached = false) const
2421 {
2422 TRACE_APPLY (this);
2423 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2424 if (likely (index == NOT_COVERED)) return_trace (false);
2425
2426 const ClassDef &class_def = this+classDef;
2427
2428 struct ContextApplyLookupContext lookup_context = {
2429 {cached ? match_class_cached : match_class},
2430 &class_def
2431 };
2432
2433 if (cached && c->buffer->cur().syllable() < 255)
2434 index = c->buffer->cur().syllable ();
2435 else
2436 {
2437 index = class_def.get_class (c->buffer->cur().codepoint);
2438 if (cached && index < 255)
2439 c->buffer->cur().syllable() = index;
2440 }
2441 const RuleSet &rule_set = this+ruleSet[index];
2442 return_trace (rule_set.apply (c, lookup_context));
2443 }
2444
subsetOT::ContextFormat2_52445 bool subset (hb_subset_context_t *c) const
2446 {
2447 TRACE_SUBSET (this);
2448 auto *out = c->serializer->start_embed (*this);
2449 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2450 out->format = format;
2451 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2452 return_trace (false);
2453
2454 hb_map_t klass_map;
2455 out->classDef.serialize_subset (c, classDef, this, &klass_map);
2456
2457 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
2458 hb_set_t retained_coverage_glyphs;
2459 (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
2460
2461 hb_set_t coverage_glyph_classes;
2462 (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2463
2464 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2465 bool ret = true;
2466 int non_zero_index = -1, index = 0;
2467 auto snapshot = c->serializer->snapshot();
2468 for (const auto& _ : + hb_enumerate (ruleSet)
2469 | hb_filter (klass_map, hb_first))
2470 {
2471 auto *o = out->ruleSet.serialize_append (c->serializer);
2472 if (unlikely (!o))
2473 {
2474 ret = false;
2475 break;
2476 }
2477
2478 if (coverage_glyph_classes.has (_.first) &&
2479 o->serialize_subset (c, _.second, this, lookup_map, &klass_map)) {
2480 non_zero_index = index;
2481 snapshot = c->serializer->snapshot();
2482 }
2483
2484 index++;
2485 }
2486
2487 if (!ret || non_zero_index == -1) return_trace (false);
2488
2489 //prune empty trailing ruleSets
2490 --index;
2491 while (index > non_zero_index)
2492 {
2493 out->ruleSet.pop ();
2494 index--;
2495 }
2496 c->serializer->revert (snapshot);
2497
2498 return_trace (bool (out->ruleSet));
2499 }
2500
sanitizeOT::ContextFormat2_52501 bool sanitize (hb_sanitize_context_t *c) const
2502 {
2503 TRACE_SANITIZE (this);
2504 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2505 }
2506
2507 protected:
2508 HBUINT16 format; /* Format identifier--format = 2 */
2509 typename Types::template OffsetTo<Coverage>
2510 coverage; /* Offset to Coverage table--from
2511 * beginning of table */
2512 typename Types::template OffsetTo<ClassDef>
2513 classDef; /* Offset to glyph ClassDef table--from
2514 * beginning of table */
2515 Array16Of<typename Types::template OffsetTo<RuleSet>>
2516 ruleSet; /* Array of RuleSet tables
2517 * ordered by class */
2518 public:
2519 DEFINE_SIZE_ARRAY (4 + 2 * Types::size, ruleSet);
2520 };
2521
2522
2523 struct ContextFormat3
2524 {
2525 using RuleSet = OT::RuleSet<SmallTypes>;
2526
intersectsOT::ContextFormat32527 bool intersects (const hb_set_t *glyphs) const
2528 {
2529 if (!(this+coverageZ[0]).intersects (glyphs))
2530 return false;
2531
2532 struct ContextClosureLookupContext lookup_context = {
2533 {intersects_coverage, nullptr},
2534 ContextFormat::CoverageBasedContext,
2535 this
2536 };
2537 return context_intersects (glyphs,
2538 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2539 lookup_context);
2540 }
2541
may_have_non_1to1OT::ContextFormat32542 bool may_have_non_1to1 () const
2543 { return true; }
2544
closureOT::ContextFormat32545 void closure (hb_closure_context_t *c) const
2546 {
2547 if (!(this+coverageZ[0]).intersects (c->glyphs))
2548 return;
2549
2550 hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
2551 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
2552 cur_active_glyphs);
2553
2554
2555 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2556 struct ContextClosureLookupContext lookup_context = {
2557 {intersects_coverage, intersected_coverage_glyphs},
2558 ContextFormat::CoverageBasedContext,
2559 this
2560 };
2561 context_closure_lookup (c,
2562 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2563 lookupCount, lookupRecord,
2564 0, lookup_context);
2565
2566 c->pop_cur_done_glyphs ();
2567 }
2568
closure_lookupsOT::ContextFormat32569 void closure_lookups (hb_closure_lookups_context_t *c) const
2570 {
2571 if (!intersects (c->glyphs))
2572 return;
2573 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2574 recurse_lookups (c, lookupCount, lookupRecord);
2575 }
2576
collect_variation_indicesOT::ContextFormat32577 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2578
collect_glyphsOT::ContextFormat32579 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2580 {
2581 (this+coverageZ[0]).collect_coverage (c->input);
2582
2583 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2584 struct ContextCollectGlyphsLookupContext lookup_context = {
2585 {collect_coverage},
2586 this
2587 };
2588
2589 context_collect_glyphs_lookup (c,
2590 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2591 lookupCount, lookupRecord,
2592 lookup_context);
2593 }
2594
would_applyOT::ContextFormat32595 bool would_apply (hb_would_apply_context_t *c) const
2596 {
2597 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2598 struct ContextApplyLookupContext lookup_context = {
2599 {match_coverage},
2600 this
2601 };
2602 return context_would_apply_lookup (c,
2603 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2604 lookupCount, lookupRecord,
2605 lookup_context);
2606 }
2607
get_coverageOT::ContextFormat32608 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2609
applyOT::ContextFormat32610 bool apply (hb_ot_apply_context_t *c) const
2611 {
2612 TRACE_APPLY (this);
2613 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2614 if (likely (index == NOT_COVERED)) return_trace (false);
2615
2616 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2617 struct ContextApplyLookupContext lookup_context = {
2618 {match_coverage},
2619 this
2620 };
2621 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2622 }
2623
subsetOT::ContextFormat32624 bool subset (hb_subset_context_t *c) const
2625 {
2626 TRACE_SUBSET (this);
2627 auto *out = c->serializer->start_embed (this);
2628 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2629
2630 out->format = format;
2631 out->glyphCount = glyphCount;
2632
2633 auto coverages = coverageZ.as_array (glyphCount);
2634
2635 for (const Offset16To<Coverage>& offset : coverages)
2636 {
2637 /* TODO(subset) This looks like should not be necessary to write this way. */
2638 auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2639 if (unlikely (!o)) return_trace (false);
2640 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2641 }
2642
2643 const auto& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
2644 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2645
2646
2647 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
2648 return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2649 }
2650
sanitizeOT::ContextFormat32651 bool sanitize (hb_sanitize_context_t *c) const
2652 {
2653 TRACE_SANITIZE (this);
2654 if (!c->check_struct (this)) return_trace (false);
2655 unsigned int count = glyphCount;
2656 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2657 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2658 for (unsigned int i = 0; i < count; i++)
2659 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2660 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2661 return_trace (c->check_array (lookupRecord, lookupCount));
2662 }
2663
2664 protected:
2665 HBUINT16 format; /* Format identifier--format = 3 */
2666 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2667 * sequence */
2668 HBUINT16 lookupCount; /* Number of LookupRecords */
2669 UnsizedArrayOf<Offset16To<Coverage>>
2670 coverageZ; /* Array of offsets to Coverage
2671 * table in glyph sequence order */
2672 /*UnsizedArrayOf<LookupRecord>
2673 lookupRecordX;*/ /* Array of LookupRecords--in
2674 * design order */
2675 public:
2676 DEFINE_SIZE_ARRAY (6, coverageZ);
2677 };
2678
2679 struct Context
2680 {
2681 template <typename context_t, typename ...Ts>
dispatchOT::Context2682 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2683 {
2684 TRACE_DISPATCH (this, u.format);
2685 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2686 switch (u.format) {
2687 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2688 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
2689 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2690 #ifndef HB_NO_BEYOND_64K
2691 case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
2692 case 5: return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
2693 #endif
2694 default:return_trace (c->default_return_value ());
2695 }
2696 }
2697
2698 protected:
2699 union {
2700 HBUINT16 format; /* Format identifier */
2701 ContextFormat1_4<SmallTypes> format1;
2702 ContextFormat2_5<SmallTypes> format2;
2703 ContextFormat3 format3;
2704 #ifndef HB_NO_BEYOND_64K
2705 ContextFormat1_4<MediumTypes> format4;
2706 ContextFormat2_5<MediumTypes> format5;
2707 #endif
2708 } u;
2709 };
2710
2711
2712 /* Chaining Contextual lookups */
2713
2714 struct ChainContextClosureLookupContext
2715 {
2716 ContextClosureFuncs funcs;
2717 ContextFormat context_format;
2718 const void *intersects_data[3];
2719 void *intersects_cache[3];
2720 void *intersected_glyphs_cache;
2721 };
2722
2723 struct ChainContextCollectGlyphsLookupContext
2724 {
2725 ContextCollectGlyphsFuncs funcs;
2726 const void *collect_data[3];
2727 };
2728
2729 struct ChainContextApplyLookupContext
2730 {
2731 ChainContextApplyFuncs funcs;
2732 const void *match_data[3];
2733 };
2734
2735 template <typename HBUINT>
chain_context_intersects(const hb_set_t * glyphs,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],ChainContextClosureLookupContext & lookup_context)2736 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2737 unsigned int backtrackCount,
2738 const HBUINT backtrack[],
2739 unsigned int inputCount, /* Including the first glyph (not matched) */
2740 const HBUINT input[], /* Array of input values--start with second glyph */
2741 unsigned int lookaheadCount,
2742 const HBUINT lookahead[],
2743 ChainContextClosureLookupContext &lookup_context)
2744 {
2745 return array_is_subset_of (glyphs,
2746 backtrackCount, backtrack,
2747 lookup_context.funcs.intersects,
2748 lookup_context.intersects_data[0],
2749 lookup_context.intersects_cache[0])
2750 && array_is_subset_of (glyphs,
2751 inputCount ? inputCount - 1 : 0, input,
2752 lookup_context.funcs.intersects,
2753 lookup_context.intersects_data[1],
2754 lookup_context.intersects_cache[1])
2755 && array_is_subset_of (glyphs,
2756 lookaheadCount, lookahead,
2757 lookup_context.funcs.intersects,
2758 lookup_context.intersects_data[2],
2759 lookup_context.intersects_cache[2]);
2760 }
2761
2762 template <typename HBUINT>
chain_context_closure_lookup(hb_closure_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ChainContextClosureLookupContext & lookup_context)2763 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2764 unsigned int backtrackCount,
2765 const HBUINT backtrack[],
2766 unsigned int inputCount, /* Including the first glyph (not matched) */
2767 const HBUINT input[], /* Array of input values--start with second glyph */
2768 unsigned int lookaheadCount,
2769 const HBUINT lookahead[],
2770 unsigned int lookupCount,
2771 const LookupRecord lookupRecord[],
2772 unsigned value,
2773 ChainContextClosureLookupContext &lookup_context)
2774 {
2775 if (chain_context_intersects (c->glyphs,
2776 backtrackCount, backtrack,
2777 inputCount, input,
2778 lookaheadCount, lookahead,
2779 lookup_context))
2780 context_closure_recurse_lookups (c,
2781 inputCount, input,
2782 lookupCount, lookupRecord,
2783 value,
2784 lookup_context.context_format,
2785 lookup_context.intersects_data[1],
2786 lookup_context.funcs.intersected_glyphs,
2787 lookup_context.intersected_glyphs_cache);
2788 }
2789
2790 template <typename HBUINT>
chain_context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextCollectGlyphsLookupContext & lookup_context)2791 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2792 unsigned int backtrackCount,
2793 const HBUINT backtrack[],
2794 unsigned int inputCount, /* Including the first glyph (not matched) */
2795 const HBUINT input[], /* Array of input values--start with second glyph */
2796 unsigned int lookaheadCount,
2797 const HBUINT lookahead[],
2798 unsigned int lookupCount,
2799 const LookupRecord lookupRecord[],
2800 ChainContextCollectGlyphsLookupContext &lookup_context)
2801 {
2802 collect_array (c, c->before,
2803 backtrackCount, backtrack,
2804 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2805 collect_array (c, c->input,
2806 inputCount ? inputCount - 1 : 0, input,
2807 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2808 collect_array (c, c->after,
2809 lookaheadCount, lookahead,
2810 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2811 recurse_lookups (c,
2812 lookupCount, lookupRecord);
2813 }
2814
2815 template <typename HBUINT>
chain_context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[]HB_UNUSED,unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[]HB_UNUSED,unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,const ChainContextApplyLookupContext & lookup_context)2816 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2817 unsigned int backtrackCount,
2818 const HBUINT backtrack[] HB_UNUSED,
2819 unsigned int inputCount, /* Including the first glyph (not matched) */
2820 const HBUINT input[], /* Array of input values--start with second glyph */
2821 unsigned int lookaheadCount,
2822 const HBUINT lookahead[] HB_UNUSED,
2823 unsigned int lookupCount HB_UNUSED,
2824 const LookupRecord lookupRecord[] HB_UNUSED,
2825 const ChainContextApplyLookupContext &lookup_context)
2826 {
2827 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2828 && would_match_input (c,
2829 inputCount, input,
2830 lookup_context.funcs.match[1], lookup_context.match_data[1]);
2831 }
2832
2833 template <typename HBUINT>
chain_context_apply_lookup(hb_ot_apply_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],const ChainContextApplyLookupContext & lookup_context)2834 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2835 unsigned int backtrackCount,
2836 const HBUINT backtrack[],
2837 unsigned int inputCount, /* Including the first glyph (not matched) */
2838 const HBUINT input[], /* Array of input values--start with second glyph */
2839 unsigned int lookaheadCount,
2840 const HBUINT lookahead[],
2841 unsigned int lookupCount,
2842 const LookupRecord lookupRecord[],
2843 const ChainContextApplyLookupContext &lookup_context)
2844 {
2845 unsigned end_index = c->buffer->idx;
2846 unsigned match_end = 0;
2847 unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
2848 if (!(match_input (c,
2849 inputCount, input,
2850 lookup_context.funcs.match[1], lookup_context.match_data[1],
2851 &match_end, match_positions) && (end_index = match_end)
2852 && match_lookahead (c,
2853 lookaheadCount, lookahead,
2854 lookup_context.funcs.match[2], lookup_context.match_data[2],
2855 match_end, &end_index)))
2856 {
2857 c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
2858 return false;
2859 }
2860
2861 unsigned start_index = c->buffer->out_len;
2862 if (!match_backtrack (c,
2863 backtrackCount, backtrack,
2864 lookup_context.funcs.match[0], lookup_context.match_data[0],
2865 &start_index))
2866 {
2867 c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
2868 return false;
2869 }
2870
2871 c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
2872 apply_lookup (c,
2873 inputCount, match_positions,
2874 lookupCount, lookupRecord,
2875 match_end);
2876 return true;
2877 }
2878
2879 template <typename Types>
2880 struct ChainRule
2881 {
intersectsOT::ChainRule2882 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2883 {
2884 const auto &input = StructAfter<decltype (inputX)> (backtrack);
2885 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
2886 return chain_context_intersects (glyphs,
2887 backtrack.len, backtrack.arrayZ,
2888 input.lenP1, input.arrayZ,
2889 lookahead.len, lookahead.arrayZ,
2890 lookup_context);
2891 }
2892
closureOT::ChainRule2893 void closure (hb_closure_context_t *c, unsigned value,
2894 ChainContextClosureLookupContext &lookup_context) const
2895 {
2896 if (unlikely (c->lookup_limit_exceeded ())) return;
2897
2898 const auto &input = StructAfter<decltype (inputX)> (backtrack);
2899 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
2900 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2901 chain_context_closure_lookup (c,
2902 backtrack.len, backtrack.arrayZ,
2903 input.lenP1, input.arrayZ,
2904 lookahead.len, lookahead.arrayZ,
2905 lookup.len, lookup.arrayZ,
2906 value,
2907 lookup_context);
2908 }
2909
closure_lookupsOT::ChainRule2910 void closure_lookups (hb_closure_lookups_context_t *c,
2911 ChainContextClosureLookupContext &lookup_context) const
2912 {
2913 if (unlikely (c->lookup_limit_exceeded ())) return;
2914 if (!intersects (c->glyphs, lookup_context)) return;
2915
2916 const auto &input = StructAfter<decltype (inputX)> (backtrack);
2917 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
2918 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2919 recurse_lookups (c, lookup.len, lookup.arrayZ);
2920 }
2921
collect_glyphsOT::ChainRule2922 void collect_glyphs (hb_collect_glyphs_context_t *c,
2923 ChainContextCollectGlyphsLookupContext &lookup_context) const
2924 {
2925 const auto &input = StructAfter<decltype (inputX)> (backtrack);
2926 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
2927 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2928 chain_context_collect_glyphs_lookup (c,
2929 backtrack.len, backtrack.arrayZ,
2930 input.lenP1, input.arrayZ,
2931 lookahead.len, lookahead.arrayZ,
2932 lookup.len, lookup.arrayZ,
2933 lookup_context);
2934 }
2935
would_applyOT::ChainRule2936 bool would_apply (hb_would_apply_context_t *c,
2937 const ChainContextApplyLookupContext &lookup_context) const
2938 {
2939 const auto &input = StructAfter<decltype (inputX)> (backtrack);
2940 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
2941 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2942 return chain_context_would_apply_lookup (c,
2943 backtrack.len, backtrack.arrayZ,
2944 input.lenP1, input.arrayZ,
2945 lookahead.len, lookahead.arrayZ, lookup.len,
2946 lookup.arrayZ, lookup_context);
2947 }
2948
applyOT::ChainRule2949 bool apply (hb_ot_apply_context_t *c,
2950 const ChainContextApplyLookupContext &lookup_context) const
2951 {
2952 TRACE_APPLY (this);
2953 const auto &input = StructAfter<decltype (inputX)> (backtrack);
2954 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
2955 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2956 return_trace (chain_context_apply_lookup (c,
2957 backtrack.len, backtrack.arrayZ,
2958 input.lenP1, input.arrayZ,
2959 lookahead.len, lookahead.arrayZ, lookup.len,
2960 lookup.arrayZ, lookup_context));
2961 }
2962
2963 template<typename Iterator,
2964 hb_requires (hb_is_iterator (Iterator))>
serialize_arrayOT::ChainRule2965 void serialize_array (hb_serialize_context_t *c,
2966 HBUINT16 len,
2967 Iterator it) const
2968 {
2969 c->copy (len);
2970 for (const auto g : it)
2971 c->copy ((HBUINT16) g);
2972 }
2973
serializeOT::ChainRule2974 bool serialize (hb_serialize_context_t *c,
2975 const hb_map_t *lookup_map,
2976 const hb_map_t *backtrack_map,
2977 const hb_map_t *input_map = nullptr,
2978 const hb_map_t *lookahead_map = nullptr) const
2979 {
2980 TRACE_SERIALIZE (this);
2981 auto *out = c->start_embed (this);
2982 if (unlikely (!out)) return_trace (false);
2983
2984 const hb_map_t *mapping = backtrack_map;
2985 serialize_array (c, backtrack.len, + backtrack.iter ()
2986 | hb_map (mapping));
2987
2988 const auto &input = StructAfter<decltype (inputX)> (backtrack);
2989 if (input_map) mapping = input_map;
2990 serialize_array (c, input.lenP1, + input.iter ()
2991 | hb_map (mapping));
2992
2993 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
2994 if (lookahead_map) mapping = lookahead_map;
2995 serialize_array (c, lookahead.len, + lookahead.iter ()
2996 | hb_map (mapping));
2997
2998 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2999
3000 HBUINT16* lookupCount = c->embed (&(lookup.len));
3001 if (!lookupCount) return_trace (false);
3002
3003 unsigned count = serialize_lookuprecord_array (c, lookup.as_array (), lookup_map);
3004 return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
3005 }
3006
subsetOT::ChainRule3007 bool subset (hb_subset_context_t *c,
3008 const hb_map_t *lookup_map,
3009 const hb_map_t *backtrack_map = nullptr,
3010 const hb_map_t *input_map = nullptr,
3011 const hb_map_t *lookahead_map = nullptr) const
3012 {
3013 TRACE_SUBSET (this);
3014
3015 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3016 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3017
3018 if (!backtrack_map)
3019 {
3020 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
3021 if (!hb_all (backtrack, glyphset) ||
3022 !hb_all (input, glyphset) ||
3023 !hb_all (lookahead, glyphset))
3024 return_trace (false);
3025
3026 serialize (c->serializer, lookup_map, c->plan->glyph_map);
3027 }
3028 else
3029 {
3030 if (!hb_all (backtrack, backtrack_map) ||
3031 !hb_all (input, input_map) ||
3032 !hb_all (lookahead, lookahead_map))
3033 return_trace (false);
3034
3035 serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
3036 }
3037
3038 return_trace (true);
3039 }
3040
sanitizeOT::ChainRule3041 bool sanitize (hb_sanitize_context_t *c) const
3042 {
3043 TRACE_SANITIZE (this);
3044 if (!backtrack.sanitize (c)) return_trace (false);
3045 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3046 if (!input.sanitize (c)) return_trace (false);
3047 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3048 if (!lookahead.sanitize (c)) return_trace (false);
3049 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3050 return_trace (lookup.sanitize (c));
3051 }
3052
3053 protected:
3054 Array16Of<typename Types::HBUINT>
3055 backtrack; /* Array of backtracking values
3056 * (to be matched before the input
3057 * sequence) */
3058 HeadlessArrayOf<typename Types::HBUINT>
3059 inputX; /* Array of input values (start with
3060 * second glyph) */
3061 Array16Of<typename Types::HBUINT>
3062 lookaheadX; /* Array of lookahead values's (to be
3063 * matched after the input sequence) */
3064 Array16Of<LookupRecord>
3065 lookupX; /* Array of LookupRecords--in
3066 * design order) */
3067 public:
3068 DEFINE_SIZE_MIN (8);
3069 };
3070
3071 template <typename Types>
3072 struct ChainRuleSet
3073 {
3074 using ChainRule = OT::ChainRule<Types>;
3075
intersectsOT::ChainRuleSet3076 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
3077 {
3078 return
3079 + hb_iter (rule)
3080 | hb_map (hb_add (this))
3081 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
3082 | hb_any
3083 ;
3084 }
closureOT::ChainRuleSet3085 void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
3086 {
3087 if (unlikely (c->lookup_limit_exceeded ())) return;
3088
3089 return
3090 + hb_iter (rule)
3091 | hb_map (hb_add (this))
3092 | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
3093 ;
3094 }
3095
closure_lookupsOT::ChainRuleSet3096 void closure_lookups (hb_closure_lookups_context_t *c,
3097 ChainContextClosureLookupContext &lookup_context) const
3098 {
3099 if (unlikely (c->lookup_limit_exceeded ())) return;
3100
3101 + hb_iter (rule)
3102 | hb_map (hb_add (this))
3103 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
3104 ;
3105 }
3106
collect_glyphsOT::ChainRuleSet3107 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
3108 {
3109 return
3110 + hb_iter (rule)
3111 | hb_map (hb_add (this))
3112 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
3113 ;
3114 }
3115
would_applyOT::ChainRuleSet3116 bool would_apply (hb_would_apply_context_t *c,
3117 const ChainContextApplyLookupContext &lookup_context) const
3118 {
3119 return
3120 + hb_iter (rule)
3121 | hb_map (hb_add (this))
3122 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
3123 | hb_any
3124 ;
3125 }
3126
applyOT::ChainRuleSet3127 bool apply (hb_ot_apply_context_t *c,
3128 const ChainContextApplyLookupContext &lookup_context) const
3129 {
3130 TRACE_APPLY (this);
3131 return_trace (
3132 + hb_iter (rule)
3133 | hb_map (hb_add (this))
3134 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
3135 | hb_any
3136 )
3137 ;
3138 }
3139
subsetOT::ChainRuleSet3140 bool subset (hb_subset_context_t *c,
3141 const hb_map_t *lookup_map,
3142 const hb_map_t *backtrack_klass_map = nullptr,
3143 const hb_map_t *input_klass_map = nullptr,
3144 const hb_map_t *lookahead_klass_map = nullptr) const
3145 {
3146 TRACE_SUBSET (this);
3147
3148 auto snap = c->serializer->snapshot ();
3149 auto *out = c->serializer->start_embed (*this);
3150 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3151
3152 for (const Offset16To<ChainRule>& _ : rule)
3153 {
3154 if (!_) continue;
3155 auto o_snap = c->serializer->snapshot ();
3156 auto *o = out->rule.serialize_append (c->serializer);
3157 if (unlikely (!o)) continue;
3158
3159 if (!o->serialize_subset (c, _, this,
3160 lookup_map,
3161 backtrack_klass_map,
3162 input_klass_map,
3163 lookahead_klass_map))
3164 {
3165 out->rule.pop ();
3166 c->serializer->revert (o_snap);
3167 }
3168 }
3169
3170 bool ret = bool (out->rule);
3171 if (!ret) c->serializer->revert (snap);
3172
3173 return_trace (ret);
3174 }
3175
sanitizeOT::ChainRuleSet3176 bool sanitize (hb_sanitize_context_t *c) const
3177 {
3178 TRACE_SANITIZE (this);
3179 return_trace (rule.sanitize (c, this));
3180 }
3181
3182 protected:
3183 Array16OfOffset16To<ChainRule>
3184 rule; /* Array of ChainRule tables
3185 * ordered by preference */
3186 public:
3187 DEFINE_SIZE_ARRAY (2, rule);
3188 };
3189
3190 template <typename Types>
3191 struct ChainContextFormat1_4
3192 {
3193 using ChainRuleSet = OT::ChainRuleSet<Types>;
3194
intersectsOT::ChainContextFormat1_43195 bool intersects (const hb_set_t *glyphs) const
3196 {
3197 struct ChainContextClosureLookupContext lookup_context = {
3198 {intersects_glyph, intersected_glyph},
3199 ContextFormat::SimpleContext,
3200 {nullptr, nullptr, nullptr}
3201 };
3202
3203 return
3204 + hb_zip (this+coverage, ruleSet)
3205 | hb_filter (*glyphs, hb_first)
3206 | hb_map (hb_second)
3207 | hb_map (hb_add (this))
3208 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
3209 | hb_any
3210 ;
3211 }
3212
may_have_non_1to1OT::ChainContextFormat1_43213 bool may_have_non_1to1 () const
3214 { return true; }
3215
closureOT::ChainContextFormat1_43216 void closure (hb_closure_context_t *c) const
3217 {
3218 hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
3219 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
3220 cur_active_glyphs);
3221
3222 struct ChainContextClosureLookupContext lookup_context = {
3223 {intersects_glyph, intersected_glyph},
3224 ContextFormat::SimpleContext,
3225 {nullptr, nullptr, nullptr}
3226 };
3227
3228 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
3229 | hb_filter ([&] (hb_codepoint_t _) {
3230 return c->previous_parent_active_glyphs ().has (_);
3231 }, hb_first)
3232 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
3233 | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
3234 ;
3235
3236 c->pop_cur_done_glyphs ();
3237 }
3238
closure_lookupsOT::ChainContextFormat1_43239 void closure_lookups (hb_closure_lookups_context_t *c) const
3240 {
3241 struct ChainContextClosureLookupContext lookup_context = {
3242 {intersects_glyph, nullptr},
3243 ContextFormat::SimpleContext,
3244 {nullptr, nullptr, nullptr}
3245 };
3246
3247 + hb_zip (this+coverage, ruleSet)
3248 | hb_filter (*c->glyphs, hb_first)
3249 | hb_map (hb_second)
3250 | hb_map (hb_add (this))
3251 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
3252 ;
3253 }
3254
collect_variation_indicesOT::ChainContextFormat1_43255 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3256
collect_glyphsOT::ChainContextFormat1_43257 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3258 {
3259 (this+coverage).collect_coverage (c->input);
3260
3261 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3262 {collect_glyph},
3263 {nullptr, nullptr, nullptr}
3264 };
3265
3266 + hb_iter (ruleSet)
3267 | hb_map (hb_add (this))
3268 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3269 ;
3270 }
3271
would_applyOT::ChainContextFormat1_43272 bool would_apply (hb_would_apply_context_t *c) const
3273 {
3274 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
3275 struct ChainContextApplyLookupContext lookup_context = {
3276 {{match_glyph, match_glyph, match_glyph}},
3277 {nullptr, nullptr, nullptr}
3278 };
3279 return rule_set.would_apply (c, lookup_context);
3280 }
3281
get_coverageOT::ChainContextFormat1_43282 const Coverage &get_coverage () const { return this+coverage; }
3283
applyOT::ChainContextFormat1_43284 bool apply (hb_ot_apply_context_t *c) const
3285 {
3286 TRACE_APPLY (this);
3287 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3288 if (likely (index == NOT_COVERED)) return_trace (false);
3289
3290 const ChainRuleSet &rule_set = this+ruleSet[index];
3291 struct ChainContextApplyLookupContext lookup_context = {
3292 {{match_glyph, match_glyph, match_glyph}},
3293 {nullptr, nullptr, nullptr}
3294 };
3295 return_trace (rule_set.apply (c, lookup_context));
3296 }
3297
subsetOT::ChainContextFormat1_43298 bool subset (hb_subset_context_t *c) const
3299 {
3300 TRACE_SUBSET (this);
3301 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
3302 const hb_map_t &glyph_map = *c->plan->glyph_map;
3303
3304 auto *out = c->serializer->start_embed (*this);
3305 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3306 out->format = format;
3307
3308 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3309 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
3310 + hb_zip (this+coverage, ruleSet)
3311 | hb_filter (glyphset, hb_first)
3312 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
3313 | hb_map (hb_first)
3314 | hb_map (glyph_map)
3315 | hb_sink (new_coverage)
3316 ;
3317
3318 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
3319 return_trace (bool (new_coverage));
3320 }
3321
sanitizeOT::ChainContextFormat1_43322 bool sanitize (hb_sanitize_context_t *c) const
3323 {
3324 TRACE_SANITIZE (this);
3325 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
3326 }
3327
3328 protected:
3329 HBUINT16 format; /* Format identifier--format = 1 */
3330 typename Types::template OffsetTo<Coverage>
3331 coverage; /* Offset to Coverage table--from
3332 * beginning of table */
3333 Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
3334 ruleSet; /* Array of ChainRuleSet tables
3335 * ordered by Coverage Index */
3336 public:
3337 DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
3338 };
3339
3340 template <typename Types>
3341 struct ChainContextFormat2_5
3342 {
3343 using ChainRuleSet = OT::ChainRuleSet<SmallTypes>;
3344
intersectsOT::ChainContextFormat2_53345 bool intersects (const hb_set_t *glyphs) const
3346 {
3347 if (!(this+coverage).intersects (glyphs))
3348 return false;
3349
3350 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3351 const ClassDef &input_class_def = this+inputClassDef;
3352 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3353
3354 hb_map_t caches[3] = {};
3355 struct ChainContextClosureLookupContext lookup_context = {
3356 {intersects_class, nullptr},
3357 ContextFormat::ClassBasedContext,
3358 {&backtrack_class_def,
3359 &input_class_def,
3360 &lookahead_class_def},
3361 {&caches[0], &caches[1], &caches[2]}
3362 };
3363
3364 hb_set_t retained_coverage_glyphs;
3365 (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
3366
3367 hb_set_t coverage_glyph_classes;
3368 input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3369
3370 return
3371 + hb_iter (ruleSet)
3372 | hb_map (hb_add (this))
3373 | hb_enumerate
3374 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
3375 { return input_class_def.intersects_class (glyphs, p.first) &&
3376 coverage_glyph_classes.has (p.first) &&
3377 p.second.intersects (glyphs, lookup_context); })
3378 | hb_any
3379 ;
3380 }
3381
may_have_non_1to1OT::ChainContextFormat2_53382 bool may_have_non_1to1 () const
3383 { return true; }
3384
closureOT::ChainContextFormat2_53385 void closure (hb_closure_context_t *c) const
3386 {
3387 if (!(this+coverage).intersects (c->glyphs))
3388 return;
3389
3390 hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
3391 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
3392 cur_active_glyphs);
3393
3394
3395 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3396 const ClassDef &input_class_def = this+inputClassDef;
3397 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3398
3399 hb_map_t caches[3] = {};
3400 intersected_class_cache_t intersected_cache;
3401 struct ChainContextClosureLookupContext lookup_context = {
3402 {intersects_class, intersected_class_glyphs},
3403 ContextFormat::ClassBasedContext,
3404 {&backtrack_class_def,
3405 &input_class_def,
3406 &lookahead_class_def},
3407 {&caches[0], &caches[1], &caches[2]},
3408 &intersected_cache
3409 };
3410
3411 + hb_enumerate (ruleSet)
3412 | hb_filter ([&] (unsigned _)
3413 { return input_class_def.intersects_class (&c->parent_active_glyphs (), _); },
3414 hb_first)
3415 | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<ChainRuleSet>&> _)
3416 {
3417 const ChainRuleSet& chainrule_set = this+_.second;
3418 chainrule_set.closure (c, _.first, lookup_context);
3419 })
3420 ;
3421
3422 c->pop_cur_done_glyphs ();
3423 }
3424
closure_lookupsOT::ChainContextFormat2_53425 void closure_lookups (hb_closure_lookups_context_t *c) const
3426 {
3427 if (!(this+coverage).intersects (c->glyphs))
3428 return;
3429
3430 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3431 const ClassDef &input_class_def = this+inputClassDef;
3432 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3433
3434 hb_map_t caches[3] = {};
3435 struct ChainContextClosureLookupContext lookup_context = {
3436 {intersects_class, nullptr},
3437 ContextFormat::ClassBasedContext,
3438 {&backtrack_class_def,
3439 &input_class_def,
3440 &lookahead_class_def},
3441 {&caches[0], &caches[1], &caches[2]}
3442 };
3443
3444 + hb_iter (ruleSet)
3445 | hb_map (hb_add (this))
3446 | hb_enumerate
3447 | hb_filter([&] (unsigned klass)
3448 { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
3449 | hb_map (hb_second)
3450 | hb_apply ([&] (const ChainRuleSet &_)
3451 { _.closure_lookups (c, lookup_context); })
3452 ;
3453 }
3454
collect_variation_indicesOT::ChainContextFormat2_53455 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3456
collect_glyphsOT::ChainContextFormat2_53457 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3458 {
3459 (this+coverage).collect_coverage (c->input);
3460
3461 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3462 const ClassDef &input_class_def = this+inputClassDef;
3463 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3464
3465 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3466 {collect_class},
3467 {&backtrack_class_def,
3468 &input_class_def,
3469 &lookahead_class_def}
3470 };
3471
3472 + hb_iter (ruleSet)
3473 | hb_map (hb_add (this))
3474 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3475 ;
3476 }
3477
would_applyOT::ChainContextFormat2_53478 bool would_apply (hb_would_apply_context_t *c) const
3479 {
3480 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3481 const ClassDef &input_class_def = this+inputClassDef;
3482 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3483
3484 unsigned int index = input_class_def.get_class (c->glyphs[0]);
3485 const ChainRuleSet &rule_set = this+ruleSet[index];
3486 struct ChainContextApplyLookupContext lookup_context = {
3487 {{match_class, match_class, match_class}},
3488 {&backtrack_class_def,
3489 &input_class_def,
3490 &lookahead_class_def}
3491 };
3492 return rule_set.would_apply (c, lookup_context);
3493 }
3494
get_coverageOT::ChainContextFormat2_53495 const Coverage &get_coverage () const { return this+coverage; }
3496
cache_costOT::ChainContextFormat2_53497 unsigned cache_cost () const
3498 {
3499 unsigned c = (this+lookaheadClassDef).cost () * ruleSet.len;
3500 return c >= 4 ? c : 0;
3501 }
cache_funcOT::ChainContextFormat2_53502 bool cache_func (hb_ot_apply_context_t *c, bool enter) const
3503 {
3504 if (enter)
3505 {
3506 if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
3507 return false;
3508 auto &info = c->buffer->info;
3509 unsigned count = c->buffer->len;
3510 for (unsigned i = 0; i < count; i++)
3511 info[i].syllable() = 255;
3512 c->new_syllables = 255;
3513 return true;
3514 }
3515 else
3516 {
3517 c->new_syllables = (unsigned) -1;
3518 HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
3519 return true;
3520 }
3521 }
3522
applyOT::ChainContextFormat2_53523 bool apply (hb_ot_apply_context_t *c, bool cached = false) const
3524 {
3525 TRACE_APPLY (this);
3526 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3527 if (likely (index == NOT_COVERED)) return_trace (false);
3528
3529 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3530 const ClassDef &input_class_def = this+inputClassDef;
3531 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3532
3533 /* For ChainContextFormat2_5 we cache the LookaheadClassDef instead of InputClassDef.
3534 * The reason is that most heavy fonts want to identify a glyph in context and apply
3535 * a lookup to it. In this scenario, the length of the input sequence is one, whereas
3536 * the lookahead / backtrack are typically longer. The one glyph in input sequence is
3537 * looked-up below and no input glyph is looked up in individual rules, whereas the
3538 * lookahead and backtrack glyphs are tried. Since we match lookahead before backtrack,
3539 * we should cache lookahead. This decisions showed a 20% improvement in shaping of
3540 * the Gulzar font.
3541 */
3542
3543 struct ChainContextApplyLookupContext lookup_context = {
3544 {{cached && &backtrack_class_def == &input_class_def ? match_class_cached : match_class,
3545 cached && &input_class_def == &lookahead_class_def ? match_class_cached : match_class,
3546 cached ? match_class_cached : match_class}},
3547 {&backtrack_class_def,
3548 &input_class_def,
3549 &lookahead_class_def}
3550 };
3551
3552 index = input_class_def.get_class (c->buffer->cur().codepoint);
3553 const ChainRuleSet &rule_set = this+ruleSet[index];
3554 return_trace (rule_set.apply (c, lookup_context));
3555 }
3556
subsetOT::ChainContextFormat2_53557 bool subset (hb_subset_context_t *c) const
3558 {
3559 TRACE_SUBSET (this);
3560 auto *out = c->serializer->start_embed (*this);
3561 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3562 out->format = format;
3563 out->coverage.serialize_subset (c, coverage, this);
3564
3565 hb_map_t backtrack_klass_map;
3566 hb_map_t input_klass_map;
3567 hb_map_t lookahead_klass_map;
3568
3569 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3570 // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3571 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3572 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3573
3574 if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3575 input_klass_map,
3576 lookahead_klass_map)))
3577 return_trace (false);
3578
3579 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
3580 hb_set_t retained_coverage_glyphs;
3581 (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
3582
3583 hb_set_t coverage_glyph_classes;
3584 (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3585
3586 int non_zero_index = -1, index = 0;
3587 bool ret = true;
3588 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3589 auto last_non_zero = c->serializer->snapshot ();
3590 for (const auto& _ : + hb_enumerate (ruleSet)
3591 | hb_filter (input_klass_map, hb_first))
3592 {
3593 auto *o = out->ruleSet.serialize_append (c->serializer);
3594 if (unlikely (!o))
3595 {
3596 ret = false;
3597 break;
3598 }
3599 if (coverage_glyph_classes.has (_.first) &&
3600 o->serialize_subset (c, _.second, this,
3601 lookup_map,
3602 &backtrack_klass_map,
3603 &input_klass_map,
3604 &lookahead_klass_map))
3605 {
3606 last_non_zero = c->serializer->snapshot ();
3607 non_zero_index = index;
3608 }
3609
3610 index++;
3611 }
3612
3613 if (!ret || non_zero_index == -1) return_trace (false);
3614
3615 // prune empty trailing ruleSets
3616 if (index > non_zero_index) {
3617 c->serializer->revert (last_non_zero);
3618 out->ruleSet.len = non_zero_index + 1;
3619 }
3620
3621 return_trace (bool (out->ruleSet));
3622 }
3623
sanitizeOT::ChainContextFormat2_53624 bool sanitize (hb_sanitize_context_t *c) const
3625 {
3626 TRACE_SANITIZE (this);
3627 return_trace (coverage.sanitize (c, this) &&
3628 backtrackClassDef.sanitize (c, this) &&
3629 inputClassDef.sanitize (c, this) &&
3630 lookaheadClassDef.sanitize (c, this) &&
3631 ruleSet.sanitize (c, this));
3632 }
3633
3634 protected:
3635 HBUINT16 format; /* Format identifier--format = 2 */
3636 typename Types::template OffsetTo<Coverage>
3637 coverage; /* Offset to Coverage table--from
3638 * beginning of table */
3639 typename Types::template OffsetTo<ClassDef>
3640 backtrackClassDef; /* Offset to glyph ClassDef table
3641 * containing backtrack sequence
3642 * data--from beginning of table */
3643 typename Types::template OffsetTo<ClassDef>
3644 inputClassDef; /* Offset to glyph ClassDef
3645 * table containing input sequence
3646 * data--from beginning of table */
3647 typename Types::template OffsetTo<ClassDef>
3648 lookaheadClassDef; /* Offset to glyph ClassDef table
3649 * containing lookahead sequence
3650 * data--from beginning of table */
3651 Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
3652 ruleSet; /* Array of ChainRuleSet tables
3653 * ordered by class */
3654 public:
3655 DEFINE_SIZE_ARRAY (4 + 4 * Types::size, ruleSet);
3656 };
3657
3658 struct ChainContextFormat3
3659 {
3660 using RuleSet = OT::RuleSet<SmallTypes>;
3661
intersectsOT::ChainContextFormat33662 bool intersects (const hb_set_t *glyphs) const
3663 {
3664 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3665
3666 if (!(this+input[0]).intersects (glyphs))
3667 return false;
3668
3669 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3670 struct ChainContextClosureLookupContext lookup_context = {
3671 {intersects_coverage, nullptr},
3672 ContextFormat::CoverageBasedContext,
3673 {this, this, this}
3674 };
3675 return chain_context_intersects (glyphs,
3676 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3677 input.len, (const HBUINT16 *) input.arrayZ + 1,
3678 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3679 lookup_context);
3680 }
3681
may_have_non_1to1OT::ChainContextFormat33682 bool may_have_non_1to1 () const
3683 { return true; }
3684
closureOT::ChainContextFormat33685 void closure (hb_closure_context_t *c) const
3686 {
3687 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3688
3689 if (!(this+input[0]).intersects (c->glyphs))
3690 return;
3691
3692 hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
3693 get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
3694 cur_active_glyphs);
3695
3696
3697 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3698 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3699 struct ChainContextClosureLookupContext lookup_context = {
3700 {intersects_coverage, intersected_coverage_glyphs},
3701 ContextFormat::CoverageBasedContext,
3702 {this, this, this}
3703 };
3704 chain_context_closure_lookup (c,
3705 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3706 input.len, (const HBUINT16 *) input.arrayZ + 1,
3707 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3708 lookup.len, lookup.arrayZ,
3709 0, lookup_context);
3710
3711 c->pop_cur_done_glyphs ();
3712 }
3713
closure_lookupsOT::ChainContextFormat33714 void closure_lookups (hb_closure_lookups_context_t *c) const
3715 {
3716 if (!intersects (c->glyphs))
3717 return;
3718
3719 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3720 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3721 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3722 recurse_lookups (c, lookup.len, lookup.arrayZ);
3723 }
3724
collect_variation_indicesOT::ChainContextFormat33725 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3726
collect_glyphsOT::ChainContextFormat33727 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3728 {
3729 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3730
3731 (this+input[0]).collect_coverage (c->input);
3732
3733 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3734 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3735
3736 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3737 {collect_coverage},
3738 {this, this, this}
3739 };
3740 chain_context_collect_glyphs_lookup (c,
3741 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3742 input.len, (const HBUINT16 *) input.arrayZ + 1,
3743 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3744 lookup.len, lookup.arrayZ,
3745 lookup_context);
3746 }
3747
would_applyOT::ChainContextFormat33748 bool would_apply (hb_would_apply_context_t *c) const
3749 {
3750 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3751 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3752 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3753 struct ChainContextApplyLookupContext lookup_context = {
3754 {{match_coverage, match_coverage, match_coverage}},
3755 {this, this, this}
3756 };
3757 return chain_context_would_apply_lookup (c,
3758 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3759 input.len, (const HBUINT16 *) input.arrayZ + 1,
3760 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3761 lookup.len, lookup.arrayZ, lookup_context);
3762 }
3763
get_coverageOT::ChainContextFormat33764 const Coverage &get_coverage () const
3765 {
3766 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3767 return this+input[0];
3768 }
3769
applyOT::ChainContextFormat33770 bool apply (hb_ot_apply_context_t *c) const
3771 {
3772 TRACE_APPLY (this);
3773 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3774
3775 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
3776 if (likely (index == NOT_COVERED)) return_trace (false);
3777
3778 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3779 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3780 struct ChainContextApplyLookupContext lookup_context = {
3781 {{match_coverage, match_coverage, match_coverage}},
3782 {this, this, this}
3783 };
3784 return_trace (chain_context_apply_lookup (c,
3785 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3786 input.len, (const HBUINT16 *) input.arrayZ + 1,
3787 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3788 lookup.len, lookup.arrayZ, lookup_context));
3789 }
3790
3791 template<typename Iterator,
3792 hb_requires (hb_is_iterator (Iterator))>
serialize_coverage_offsetsOT::ChainContextFormat33793 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
3794 {
3795 TRACE_SERIALIZE (this);
3796 auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
3797
3798 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
3799 return_trace (false);
3800
3801 for (auto& offset : it) {
3802 auto *o = out->serialize_append (c->serializer);
3803 if (unlikely (!o) || !o->serialize_subset (c, offset, base))
3804 return_trace (false);
3805 }
3806
3807 return_trace (true);
3808 }
3809
subsetOT::ChainContextFormat33810 bool subset (hb_subset_context_t *c) const
3811 {
3812 TRACE_SUBSET (this);
3813
3814 auto *out = c->serializer->start_embed (this);
3815 if (unlikely (!out)) return_trace (false);
3816 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3817
3818 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3819 return_trace (false);
3820
3821 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3822 if (!serialize_coverage_offsets (c, input.iter (), this))
3823 return_trace (false);
3824
3825 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3826 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3827 return_trace (false);
3828
3829 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3830 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3831
3832 HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookup.len);
3833 if (!lookupCount) return_trace (false);
3834
3835 unsigned count = serialize_lookuprecord_array (c->serializer, lookup.as_array (), lookup_map);
3836 return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
3837 }
3838
sanitizeOT::ChainContextFormat33839 bool sanitize (hb_sanitize_context_t *c) const
3840 {
3841 TRACE_SANITIZE (this);
3842 if (!backtrack.sanitize (c, this)) return_trace (false);
3843 const auto &input = StructAfter<decltype (inputX)> (backtrack);
3844 if (!input.sanitize (c, this)) return_trace (false);
3845 if (!input.len) return_trace (false); /* To be consistent with Context. */
3846 const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3847 if (!lookahead.sanitize (c, this)) return_trace (false);
3848 const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3849 return_trace (lookup.sanitize (c));
3850 }
3851
3852 protected:
3853 HBUINT16 format; /* Format identifier--format = 3 */
3854 Array16OfOffset16To<Coverage>
3855 backtrack; /* Array of coverage tables
3856 * in backtracking sequence, in glyph
3857 * sequence order */
3858 Array16OfOffset16To<Coverage>
3859 inputX ; /* Array of coverage
3860 * tables in input sequence, in glyph
3861 * sequence order */
3862 Array16OfOffset16To<Coverage>
3863 lookaheadX; /* Array of coverage tables
3864 * in lookahead sequence, in glyph
3865 * sequence order */
3866 Array16Of<LookupRecord>
3867 lookupX; /* Array of LookupRecords--in
3868 * design order) */
3869 public:
3870 DEFINE_SIZE_MIN (10);
3871 };
3872
3873 struct ChainContext
3874 {
3875 template <typename context_t, typename ...Ts>
dispatchOT::ChainContext3876 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3877 {
3878 TRACE_DISPATCH (this, u.format);
3879 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3880 switch (u.format) {
3881 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3882 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
3883 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
3884 #ifndef HB_NO_BEYOND_64K
3885 case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
3886 case 5: return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
3887 #endif
3888 default:return_trace (c->default_return_value ());
3889 }
3890 }
3891
3892 protected:
3893 union {
3894 HBUINT16 format; /* Format identifier */
3895 ChainContextFormat1_4<SmallTypes> format1;
3896 ChainContextFormat2_5<SmallTypes> format2;
3897 ChainContextFormat3 format3;
3898 #ifndef HB_NO_BEYOND_64K
3899 ChainContextFormat1_4<MediumTypes> format4;
3900 ChainContextFormat2_5<MediumTypes> format5;
3901 #endif
3902 } u;
3903 };
3904
3905
3906 template <typename T>
3907 struct ExtensionFormat1
3908 {
get_typeOT::ExtensionFormat13909 unsigned int get_type () const { return extensionLookupType; }
3910
3911 template <typename X>
get_subtableOT::ExtensionFormat13912 const X& get_subtable () const
3913 { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
3914
3915 template <typename context_t, typename ...Ts>
dispatchOT::ExtensionFormat13916 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3917 {
3918 TRACE_DISPATCH (this, format);
3919 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3920 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
3921 }
3922
collect_variation_indicesOT::ExtensionFormat13923 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3924 { dispatch (c); }
3925
3926 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
sanitizeOT::ExtensionFormat13927 bool sanitize (hb_sanitize_context_t *c) const
3928 {
3929 TRACE_SANITIZE (this);
3930 return_trace (c->check_struct (this) &&
3931 extensionLookupType != T::SubTable::Extension);
3932 }
3933
subsetOT::ExtensionFormat13934 bool subset (hb_subset_context_t *c) const
3935 {
3936 TRACE_SUBSET (this);
3937
3938 auto *out = c->serializer->start_embed (this);
3939 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3940
3941 out->format = format;
3942 out->extensionLookupType = extensionLookupType;
3943
3944 const auto& src_offset =
3945 reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
3946 auto& dest_offset =
3947 reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
3948
3949 return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
3950 }
3951
3952 protected:
3953 HBUINT16 format; /* Format identifier. Set to 1. */
3954 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3955 * by ExtensionOffset (i.e. the
3956 * extension subtable). */
3957 Offset32 extensionOffset; /* Offset to the extension subtable,
3958 * of lookup type subtable. */
3959 public:
3960 DEFINE_SIZE_STATIC (8);
3961 };
3962
3963 template <typename T>
3964 struct Extension
3965 {
get_typeOT::Extension3966 unsigned int get_type () const
3967 {
3968 switch (u.format) {
3969 case 1: return u.format1.get_type ();
3970 default:return 0;
3971 }
3972 }
3973 template <typename X>
get_subtableOT::Extension3974 const X& get_subtable () const
3975 {
3976 switch (u.format) {
3977 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3978 default:return Null (typename T::SubTable);
3979 }
3980 }
3981
3982 // Specialization of dispatch for subset. dispatch() normally just
3983 // dispatches to the sub table this points too, but for subset
3984 // we need to run subset on this subtable too.
3985 template <typename ...Ts>
dispatchOT::Extension3986 typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
3987 {
3988 switch (u.format) {
3989 case 1: return u.format1.subset (c);
3990 default: return c->default_return_value ();
3991 }
3992 }
3993
3994 template <typename context_t, typename ...Ts>
dispatchOT::Extension3995 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3996 {
3997 TRACE_DISPATCH (this, u.format);
3998 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3999 switch (u.format) {
4000 case 1: return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
4001 default:return_trace (c->default_return_value ());
4002 }
4003 }
4004
4005 protected:
4006 union {
4007 HBUINT16 format; /* Format identifier */
4008 ExtensionFormat1<T> format1;
4009 } u;
4010 };
4011
4012
4013 /*
4014 * GSUB/GPOS Common
4015 */
4016
4017 struct hb_ot_layout_lookup_accelerator_t
4018 {
4019 template <typename TLookup>
initOT::hb_ot_layout_lookup_accelerator_t4020 void init (const TLookup &lookup)
4021 {
4022 subtables.init ();
4023 hb_accelerate_subtables_context_t c_accelerate_subtables (subtables);
4024 lookup.dispatch (&c_accelerate_subtables);
4025
4026 digest.init ();
4027 for (auto& subtable : hb_iter (subtables))
4028 digest.add (subtable.digest);
4029
4030 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4031 cache_user_idx = c_accelerate_subtables.cache_user_idx;
4032 for (unsigned i = 0; i < subtables.length; i++)
4033 if (i != cache_user_idx)
4034 subtables[i].apply_cached_func = subtables[i].apply_func;
4035 #endif
4036 }
finiOT::hb_ot_layout_lookup_accelerator_t4037 void fini () { subtables.fini (); }
4038
may_haveOT::hb_ot_layout_lookup_accelerator_t4039 bool may_have (hb_codepoint_t g) const
4040 { return digest.may_have (g); }
4041
applyOT::hb_ot_layout_lookup_accelerator_t4042 bool apply (hb_ot_apply_context_t *c, bool use_cache) const
4043 {
4044 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4045 if (use_cache)
4046 {
4047 return
4048 + hb_iter (subtables)
4049 | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply_cached (c); })
4050 | hb_any
4051 ;
4052 }
4053 else
4054 #endif
4055 {
4056 return
4057 + hb_iter (subtables)
4058 | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply (c); })
4059 | hb_any
4060 ;
4061 }
4062 return false;
4063 }
4064
cache_enterOT::hb_ot_layout_lookup_accelerator_t4065 bool cache_enter (hb_ot_apply_context_t *c) const
4066 {
4067 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4068 return cache_user_idx != (unsigned) -1 &&
4069 subtables[cache_user_idx].cache_enter (c);
4070 #else
4071 return false;
4072 #endif
4073 }
cache_leaveOT::hb_ot_layout_lookup_accelerator_t4074 void cache_leave (hb_ot_apply_context_t *c) const
4075 {
4076 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4077 subtables[cache_user_idx].cache_leave (c);
4078 #endif
4079 }
4080
4081
4082 hb_set_digest_t digest;
4083 private:
4084 hb_accelerate_subtables_context_t::array_t subtables;
4085 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4086 unsigned cache_user_idx = (unsigned) -1;
4087 #endif
4088 };
4089
4090 template <typename Types>
4091 struct GSUBGPOSVersion1_2
4092 {
4093 friend struct GSUBGPOS;
4094
4095 protected:
4096 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
4097 * to 0x00010000u */
4098 typename Types:: template OffsetTo<ScriptList>
4099 scriptList; /* ScriptList table */
4100 typename Types::template OffsetTo<FeatureList>
4101 featureList; /* FeatureList table */
4102 typename Types::template OffsetTo<LookupList<Types>>
4103 lookupList; /* LookupList table */
4104 Offset32To<FeatureVariations>
4105 featureVars; /* Offset to Feature Variations
4106 table--from beginning of table
4107 * (may be NULL). Introduced
4108 * in version 0x00010001. */
4109 public:
4110 DEFINE_SIZE_MIN (4 + 3 * Types::size);
4111
get_sizeOT::GSUBGPOSVersion1_24112 unsigned int get_size () const
4113 {
4114 return min_size +
4115 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
4116 }
4117
get_lookup_list_offsetOT::GSUBGPOSVersion1_24118 const typename Types::template OffsetTo<LookupList<Types>>* get_lookup_list_offset () const
4119 {
4120 return &lookupList;
4121 }
4122
4123 template <typename TLookup>
sanitizeOT::GSUBGPOSVersion1_24124 bool sanitize (hb_sanitize_context_t *c) const
4125 {
4126 TRACE_SANITIZE (this);
4127 typedef List16OfOffsetTo<TLookup, typename Types::HBUINT> TLookupList;
4128 if (unlikely (!(scriptList.sanitize (c, this) &&
4129 featureList.sanitize (c, this) &&
4130 reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList).sanitize (c, this))))
4131 return_trace (false);
4132
4133 #ifndef HB_NO_VAR
4134 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
4135 return_trace (false);
4136 #endif
4137
4138 return_trace (true);
4139 }
4140
4141 template <typename TLookup>
subsetOT::GSUBGPOSVersion1_24142 bool subset (hb_subset_layout_context_t *c) const
4143 {
4144 TRACE_SUBSET (this);
4145 auto *out = c->subset_context->serializer->embed (*this);
4146 if (unlikely (!out)) return_trace (false);
4147
4148 typedef LookupOffsetList<TLookup, typename Types::HBUINT> TLookupList;
4149 reinterpret_cast<typename Types::template OffsetTo<TLookupList> &> (out->lookupList)
4150 .serialize_subset (c->subset_context,
4151 reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList),
4152 this,
4153 c);
4154
4155 reinterpret_cast<typename Types::template OffsetTo<RecordListOfFeature> &> (out->featureList)
4156 .serialize_subset (c->subset_context,
4157 reinterpret_cast<const typename Types::template OffsetTo<RecordListOfFeature> &> (featureList),
4158 this,
4159 c);
4160
4161 out->scriptList.serialize_subset (c->subset_context,
4162 scriptList,
4163 this,
4164 c);
4165
4166 #ifndef HB_NO_VAR
4167 if (version.to_int () >= 0x00010001u)
4168 {
4169 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
4170 if (!ret && version.major == 1)
4171 {
4172 out->version.major = 1;
4173 out->version.minor = 0;
4174 }
4175 }
4176 #endif
4177
4178 return_trace (true);
4179 }
4180 };
4181
4182 struct GSUBGPOS
4183 {
get_sizeOT::GSUBGPOS4184 unsigned int get_size () const
4185 {
4186 switch (u.version.major) {
4187 case 1: return u.version1.get_size ();
4188 #ifndef HB_NO_BEYOND_64K
4189 case 2: return u.version2.get_size ();
4190 #endif
4191 default: return u.version.static_size;
4192 }
4193 }
4194
4195 template <typename TLookup>
sanitizeOT::GSUBGPOS4196 bool sanitize (hb_sanitize_context_t *c) const
4197 {
4198 TRACE_SANITIZE (this);
4199 if (unlikely (!u.version.sanitize (c))) return_trace (false);
4200 switch (u.version.major) {
4201 case 1: return_trace (u.version1.sanitize<TLookup> (c));
4202 #ifndef HB_NO_BEYOND_64K
4203 case 2: return_trace (u.version2.sanitize<TLookup> (c));
4204 #endif
4205 default: return_trace (true);
4206 }
4207 }
4208
4209 template <typename TLookup>
subsetOT::GSUBGPOS4210 bool subset (hb_subset_layout_context_t *c) const
4211 {
4212 switch (u.version.major) {
4213 case 1: return u.version1.subset<TLookup> (c);
4214 #ifndef HB_NO_BEYOND_64K
4215 case 2: return u.version2.subset<TLookup> (c);
4216 #endif
4217 default: return false;
4218 }
4219 }
4220
get_script_listOT::GSUBGPOS4221 const ScriptList &get_script_list () const
4222 {
4223 switch (u.version.major) {
4224 case 1: return this+u.version1.scriptList;
4225 #ifndef HB_NO_BEYOND_64K
4226 case 2: return this+u.version2.scriptList;
4227 #endif
4228 default: return Null (ScriptList);
4229 }
4230 }
get_feature_listOT::GSUBGPOS4231 const FeatureList &get_feature_list () const
4232 {
4233 switch (u.version.major) {
4234 case 1: return this+u.version1.featureList;
4235 #ifndef HB_NO_BEYOND_64K
4236 case 2: return this+u.version2.featureList;
4237 #endif
4238 default: return Null (FeatureList);
4239 }
4240 }
get_lookup_countOT::GSUBGPOS4241 unsigned int get_lookup_count () const
4242 {
4243 switch (u.version.major) {
4244 case 1: return (this+u.version1.lookupList).len;
4245 #ifndef HB_NO_BEYOND_64K
4246 case 2: return (this+u.version2.lookupList).len;
4247 #endif
4248 default: return 0;
4249 }
4250 }
get_lookupOT::GSUBGPOS4251 const Lookup& get_lookup (unsigned int i) const
4252 {
4253 switch (u.version.major) {
4254 case 1: return (this+u.version1.lookupList)[i];
4255 #ifndef HB_NO_BEYOND_64K
4256 case 2: return (this+u.version2.lookupList)[i];
4257 #endif
4258 default: return Null (Lookup);
4259 }
4260 }
get_feature_variationsOT::GSUBGPOS4261 const FeatureVariations &get_feature_variations () const
4262 {
4263 switch (u.version.major) {
4264 case 1: return (u.version.to_int () >= 0x00010001u ? this+u.version1.featureVars : Null (FeatureVariations));
4265 #ifndef HB_NO_BEYOND_64K
4266 case 2: return this+u.version2.featureVars;
4267 #endif
4268 default: return Null (FeatureVariations);
4269 }
4270 }
4271
has_dataOT::GSUBGPOS4272 bool has_data () const { return u.version.to_int (); }
get_script_countOT::GSUBGPOS4273 unsigned int get_script_count () const
4274 { return get_script_list ().len; }
get_script_tagOT::GSUBGPOS4275 const Tag& get_script_tag (unsigned int i) const
4276 { return get_script_list ().get_tag (i); }
get_script_tagsOT::GSUBGPOS4277 unsigned int get_script_tags (unsigned int start_offset,
4278 unsigned int *script_count /* IN/OUT */,
4279 hb_tag_t *script_tags /* OUT */) const
4280 { return get_script_list ().get_tags (start_offset, script_count, script_tags); }
get_scriptOT::GSUBGPOS4281 const Script& get_script (unsigned int i) const
4282 { return get_script_list ()[i]; }
find_script_indexOT::GSUBGPOS4283 bool find_script_index (hb_tag_t tag, unsigned int *index) const
4284 { return get_script_list ().find_index (tag, index); }
4285
get_feature_countOT::GSUBGPOS4286 unsigned int get_feature_count () const
4287 { return get_feature_list ().len; }
get_feature_tagOT::GSUBGPOS4288 hb_tag_t get_feature_tag (unsigned int i) const
4289 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : get_feature_list ().get_tag (i); }
get_feature_tagsOT::GSUBGPOS4290 unsigned int get_feature_tags (unsigned int start_offset,
4291 unsigned int *feature_count /* IN/OUT */,
4292 hb_tag_t *feature_tags /* OUT */) const
4293 { return get_feature_list ().get_tags (start_offset, feature_count, feature_tags); }
get_featureOT::GSUBGPOS4294 const Feature& get_feature (unsigned int i) const
4295 { return get_feature_list ()[i]; }
find_feature_indexOT::GSUBGPOS4296 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
4297 { return get_feature_list ().find_index (tag, index); }
4298
find_variations_indexOT::GSUBGPOS4299 bool find_variations_index (const int *coords, unsigned int num_coords,
4300 unsigned int *index) const
4301 {
4302 #ifdef HB_NO_VAR
4303 *index = FeatureVariations::NOT_FOUND_INDEX;
4304 return false;
4305 #endif
4306 return get_feature_variations ().find_index (coords, num_coords, index);
4307 }
get_feature_variationOT::GSUBGPOS4308 const Feature& get_feature_variation (unsigned int feature_index,
4309 unsigned int variations_index) const
4310 {
4311 #ifndef HB_NO_VAR
4312 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
4313 u.version.to_int () >= 0x00010001u)
4314 {
4315 const Feature *feature = get_feature_variations ().find_substitute (variations_index,
4316 feature_index);
4317 if (feature)
4318 return *feature;
4319 }
4320 #endif
4321 return get_feature (feature_index);
4322 }
4323
feature_variation_collect_lookupsOT::GSUBGPOS4324 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
4325 const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
4326 hb_set_t *lookup_indexes /* OUT */) const
4327 {
4328 #ifndef HB_NO_VAR
4329 get_feature_variations ().collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
4330 #endif
4331 }
4332
4333 #ifndef HB_NO_VAR
collect_feature_substitutes_with_variationsOT::GSUBGPOS4334 void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
4335 { get_feature_variations ().collect_feature_substitutes_with_variations (c); }
4336 #endif
4337
4338 template <typename TLookup>
closure_lookupsOT::GSUBGPOS4339 void closure_lookups (hb_face_t *face,
4340 const hb_set_t *glyphs,
4341 hb_set_t *lookup_indexes /* IN/OUT */) const
4342 {
4343 hb_set_t visited_lookups, inactive_lookups;
4344 hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
4345
4346 c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>);
4347
4348 for (unsigned lookup_index : *lookup_indexes)
4349 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
4350
4351 hb_set_union (lookup_indexes, &visited_lookups);
4352 hb_set_subtract (lookup_indexes, &inactive_lookups);
4353 }
4354
prune_langsysOT::GSUBGPOS4355 void prune_langsys (const hb_map_t *duplicate_feature_map,
4356 const hb_set_t *layout_scripts,
4357 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map,
4358 hb_set_t *new_feature_indexes /* OUT */) const
4359 {
4360 hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
4361
4362 unsigned count = get_script_count ();
4363 for (unsigned script_index = 0; script_index < count; script_index++)
4364 {
4365 const Tag& tag = get_script_tag (script_index);
4366 if (!layout_scripts->has (tag)) continue;
4367 const Script& s = get_script (script_index);
4368 s.prune_langsys (&c, script_index);
4369 }
4370 }
4371
prune_featuresOT::GSUBGPOS4372 void prune_features (const hb_map_t *lookup_indices, /* IN */
4373 const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* IN */
4374 const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, /* IN */
4375 hb_set_t *feature_indices /* IN/OUT */) const
4376 {
4377 #ifndef HB_NO_VAR
4378 // This is the set of feature indices which have alternate versions defined
4379 // if the FeatureVariation's table and the alternate version(s) intersect the
4380 // set of lookup indices.
4381 hb_set_t alternate_feature_indices;
4382 get_feature_variations ().closure_features (lookup_indices, feature_record_cond_idx_map, &alternate_feature_indices);
4383 if (unlikely (alternate_feature_indices.in_error()))
4384 {
4385 feature_indices->err ();
4386 return;
4387 }
4388 #endif
4389
4390 for (unsigned i : hb_iter (feature_indices))
4391 {
4392 hb_tag_t tag = get_feature_tag (i);
4393 if (tag == HB_TAG ('p', 'r', 'e', 'f'))
4394 // Note: Never ever drop feature 'pref', even if it's empty.
4395 // HarfBuzz chooses shaper for Khmer based on presence of this
4396 // feature. See thread at:
4397 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
4398 continue;
4399
4400
4401 const Feature *f = &(get_feature (i));
4402 const Feature** p = nullptr;
4403 if (feature_substitutes_map->has (i, &p))
4404 f = *p;
4405
4406 if (!f->featureParams.is_null () &&
4407 tag == HB_TAG ('s', 'i', 'z', 'e'))
4408 continue;
4409
4410 if (!f->intersects_lookup_indexes (lookup_indices)
4411 #ifndef HB_NO_VAR
4412 && !alternate_feature_indices.has (i)
4413 #endif
4414 )
4415 feature_indices->del (i);
4416 }
4417 }
4418
4419 template <typename T>
4420 struct accelerator_t
4421 {
accelerator_tOT::GSUBGPOS::accelerator_t4422 accelerator_t (hb_face_t *face)
4423 {
4424 this->table = hb_sanitize_context_t ().reference_table<T> (face);
4425 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
4426 {
4427 hb_blob_destroy (this->table.get_blob ());
4428 this->table = hb_blob_get_empty ();
4429 }
4430
4431 this->lookup_count = table->get_lookup_count ();
4432
4433 this->accels = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
4434 if (unlikely (!this->accels))
4435 {
4436 this->lookup_count = 0;
4437 this->table.destroy ();
4438 this->table = hb_blob_get_empty ();
4439 }
4440
4441 for (unsigned int i = 0; i < this->lookup_count; i++)
4442 this->accels[i].init (table->get_lookup (i));
4443 }
~accelerator_tOT::GSUBGPOS::accelerator_t4444 ~accelerator_t ()
4445 {
4446 for (unsigned int i = 0; i < this->lookup_count; i++)
4447 this->accels[i].fini ();
4448 hb_free (this->accels);
4449 this->table.destroy ();
4450 }
4451
4452 hb_blob_ptr_t<T> table;
4453 unsigned int lookup_count;
4454 hb_ot_layout_lookup_accelerator_t *accels;
4455 };
4456
4457 protected:
4458 union {
4459 FixedVersion<> version; /* Version identifier */
4460 GSUBGPOSVersion1_2<SmallTypes> version1;
4461 #ifndef HB_NO_BEYOND_64K
4462 GSUBGPOSVersion1_2<MediumTypes> version2;
4463 #endif
4464 } u;
4465 public:
4466 DEFINE_SIZE_MIN (4);
4467 };
4468
4469
4470 } /* namespace OT */
4471
4472
4473 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */
4474