• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2019  Adobe Inc.
3  * Copyright © 2019  Ebrahim Byagowi
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Adobe Author(s): Michiharu Ariza
26  */
27 
28 #ifndef HB_OT_VAR_GVAR_TABLE_HH
29 #define HB_OT_VAR_GVAR_TABLE_HH
30 
31 #include "hb-open-type.hh"
32 
33 /*
34  * gvar -- Glyph Variation Table
35  * https://docs.microsoft.com/en-us/typography/opentype/spec/gvar
36  */
37 #define HB_OT_TAG_gvar HB_TAG('g','v','a','r')
38 
39 namespace OT {
40 
41 struct contour_point_t
42 {
initOT::contour_point_t43   void init (float x_ = 0.f, float y_ = 0.f, bool is_end_point_ = false)
44   { flag = 0; x = x_; y = y_; is_end_point = is_end_point_; }
45 
translateOT::contour_point_t46   void translate (const contour_point_t &p) { x += p.x; y += p.y; }
47 
48   float x = 0.f;
49   float y = 0.f;
50   uint8_t flag = 0;
51   bool is_end_point = false;
52 };
53 
54 struct contour_point_vector_t : hb_vector_t<contour_point_t>
55 {
extendOT::contour_point_vector_t56   void extend (const hb_array_t<contour_point_t> &a)
57   {
58     unsigned int old_len = length;
59     if (unlikely (!resize (old_len + a.length, false)))
60       return;
61     auto arrayZ = this->arrayZ + old_len;
62     unsigned count = a.length;
63     hb_memcpy (arrayZ, a.arrayZ, count * sizeof (arrayZ[0]));
64   }
65 
transformOT::contour_point_vector_t66   void transform (const float (&matrix)[4])
67   {
68     if (matrix[0] == 1.f && matrix[1] == 0.f &&
69 	matrix[2] == 0.f && matrix[3] == 1.f)
70       return;
71     auto arrayZ = this->arrayZ;
72     unsigned count = length;
73     for (unsigned i = 0; i < count; i++)
74     {
75       contour_point_t &p = arrayZ[i];
76       float x_ = p.x * matrix[0] + p.y * matrix[2];
77 	   p.y = p.x * matrix[1] + p.y * matrix[3];
78       p.x = x_;
79     }
80   }
81 
translateOT::contour_point_vector_t82   void translate (const contour_point_t& delta)
83   {
84     if (delta.x == 0.f && delta.y == 0.f)
85       return;
86     auto arrayZ = this->arrayZ;
87     unsigned count = length;
88     for (unsigned i = 0; i < count; i++)
89       arrayZ[i].translate (delta);
90   }
91 };
92 
93 /* https://docs.microsoft.com/en-us/typography/opentype/spec/otvarcommonformats#tuplevariationheader */
94 struct TupleVariationHeader
95 {
get_sizeOT::TupleVariationHeader96   unsigned get_size (unsigned axis_count) const
97   { return min_size + get_all_tuples (axis_count).get_size (); }
98 
get_data_sizeOT::TupleVariationHeader99   unsigned get_data_size () const { return varDataSize; }
100 
get_nextOT::TupleVariationHeader101   const TupleVariationHeader &get_next (unsigned axis_count) const
102   { return StructAtOffset<TupleVariationHeader> (this, get_size (axis_count)); }
103 
calculate_scalarOT::TupleVariationHeader104   float calculate_scalar (hb_array_t<int> coords, unsigned int coord_count,
105 			  const hb_array_t<const F2DOT14> shared_tuples) const
106   {
107     hb_array_t<const F2DOT14> peak_tuple;
108 
109     if (has_peak ())
110       peak_tuple = get_peak_tuple (coord_count);
111     else
112     {
113       unsigned int index = get_index ();
114       if (unlikely (index * coord_count >= shared_tuples.length))
115 	return 0.f;
116       peak_tuple = shared_tuples.sub_array (coord_count * index, coord_count);
117     }
118 
119     hb_array_t<const F2DOT14> start_tuple;
120     hb_array_t<const F2DOT14> end_tuple;
121     if (has_intermediate ())
122     {
123       start_tuple = get_start_tuple (coord_count);
124       end_tuple = get_end_tuple (coord_count);
125     }
126 
127     float scalar = 1.f;
128     for (unsigned int i = 0; i < coord_count; i++)
129     {
130       int v = coords[i];
131       int peak = peak_tuple[i];
132       if (!peak || v == peak) continue;
133 
134       if (has_intermediate ())
135       {
136 	int start = start_tuple[i];
137 	int end = end_tuple[i];
138 	if (unlikely (start > peak || peak > end ||
139 		      (start < 0 && end > 0 && peak))) continue;
140 	if (v < start || v > end) return 0.f;
141 	if (v < peak)
142 	{ if (peak != start) scalar *= (float) (v - start) / (peak - start); }
143 	else
144 	{ if (peak != end) scalar *= (float) (end - v) / (end - peak); }
145       }
146       else if (!v || v < hb_min (0, peak) || v > hb_max (0, peak)) return 0.f;
147       else
148 	scalar *= (float) v / peak;
149     }
150     return scalar;
151   }
152 
has_peakOT::TupleVariationHeader153   bool           has_peak () const { return tupleIndex & TuppleIndex::EmbeddedPeakTuple; }
has_intermediateOT::TupleVariationHeader154   bool   has_intermediate () const { return tupleIndex & TuppleIndex::IntermediateRegion; }
has_private_pointsOT::TupleVariationHeader155   bool has_private_points () const { return tupleIndex & TuppleIndex::PrivatePointNumbers; }
get_indexOT::TupleVariationHeader156   unsigned      get_index () const { return tupleIndex & TuppleIndex::TupleIndexMask; }
157 
158   protected:
159   struct TuppleIndex : HBUINT16
160   {
161     enum Flags {
162       EmbeddedPeakTuple   = 0x8000u,
163       IntermediateRegion  = 0x4000u,
164       PrivatePointNumbers = 0x2000u,
165       TupleIndexMask      = 0x0FFFu
166     };
167 
168     DEFINE_SIZE_STATIC (2);
169   };
170 
get_all_tuplesOT::TupleVariationHeader171   hb_array_t<const F2DOT14> get_all_tuples (unsigned axis_count) const
172   { return StructAfter<UnsizedArrayOf<F2DOT14>> (tupleIndex).as_array ((has_peak () + has_intermediate () * 2) * axis_count); }
get_peak_tupleOT::TupleVariationHeader173   hb_array_t<const F2DOT14> get_peak_tuple (unsigned axis_count) const
174   { return get_all_tuples (axis_count).sub_array (0, axis_count); }
get_start_tupleOT::TupleVariationHeader175   hb_array_t<const F2DOT14> get_start_tuple (unsigned axis_count) const
176   { return get_all_tuples (axis_count).sub_array (has_peak () * axis_count, axis_count); }
get_end_tupleOT::TupleVariationHeader177   hb_array_t<const F2DOT14> get_end_tuple (unsigned axis_count) const
178   { return get_all_tuples (axis_count).sub_array (has_peak () * axis_count + axis_count, axis_count); }
179 
180   HBUINT16	varDataSize;	/* The size in bytes of the serialized
181 				 * data for this tuple variation table. */
182   TuppleIndex	tupleIndex;	/* A packed field. The high 4 bits are flags (see below).
183 				   The low 12 bits are an index into a shared tuple
184 				   records array. */
185   /* UnsizedArrayOf<F2DOT14> peakTuple - optional */
186 				/* Peak tuple record for this tuple variation table — optional,
187 				 * determined by flags in the tupleIndex value.
188 				 *
189 				 * Note that this must always be included in the 'cvar' table. */
190   /* UnsizedArrayOf<F2DOT14> intermediateStartTuple - optional */
191 				/* Intermediate start tuple record for this tuple variation table — optional,
192 				   determined by flags in the tupleIndex value. */
193   /* UnsizedArrayOf<F2DOT14> intermediateEndTuple - optional */
194 				/* Intermediate end tuple record for this tuple variation table — optional,
195 				 * determined by flags in the tupleIndex value. */
196   public:
197   DEFINE_SIZE_MIN (4);
198 };
199 
200 struct GlyphVariationData
201 {
get_tuple_var_headerOT::GlyphVariationData202   const TupleVariationHeader &get_tuple_var_header (void) const
203   { return StructAfter<TupleVariationHeader> (data); }
204 
205   struct tuple_iterator_t
206   {
initOT::GlyphVariationData::tuple_iterator_t207     void init (hb_bytes_t var_data_bytes_, unsigned int axis_count_)
208     {
209       var_data_bytes = var_data_bytes_;
210       var_data = var_data_bytes_.as<GlyphVariationData> ();
211       index = 0;
212       axis_count = axis_count_;
213       current_tuple = &var_data->get_tuple_var_header ();
214       data_offset = 0;
215     }
216 
get_shared_indicesOT::GlyphVariationData::tuple_iterator_t217     bool get_shared_indices (hb_vector_t<unsigned int> &shared_indices /* OUT */)
218     {
219       if (var_data->has_shared_point_numbers ())
220       {
221 	const HBUINT8 *base = &(var_data+var_data->data);
222 	const HBUINT8 *p = base;
223 	if (!unpack_points (p, shared_indices, (const HBUINT8 *) (var_data_bytes.arrayZ + var_data_bytes.length))) return false;
224 	data_offset = p - base;
225       }
226       return true;
227     }
228 
is_validOT::GlyphVariationData::tuple_iterator_t229     bool is_valid () const
230     {
231       return (index < var_data->tupleVarCount.get_count ()) &&
232 	     var_data_bytes.check_range (current_tuple, TupleVariationHeader::min_size) &&
233 	     var_data_bytes.check_range (current_tuple, hb_max (current_tuple->get_data_size (),
234 								current_tuple->get_size (axis_count)));
235     }
236 
move_to_nextOT::GlyphVariationData::tuple_iterator_t237     bool move_to_next ()
238     {
239       data_offset += current_tuple->get_data_size ();
240       current_tuple = &current_tuple->get_next (axis_count);
241       index++;
242       return is_valid ();
243     }
244 
get_serialized_dataOT::GlyphVariationData::tuple_iterator_t245     const HBUINT8 *get_serialized_data () const
246     { return &(var_data+var_data->data) + data_offset; }
247 
248     private:
249     const GlyphVariationData *var_data;
250     unsigned int index;
251     unsigned int axis_count;
252     unsigned int data_offset;
253 
254     public:
255     hb_bytes_t var_data_bytes;
256     const TupleVariationHeader *current_tuple;
257   };
258 
get_tuple_iteratorOT::GlyphVariationData259   static bool get_tuple_iterator (hb_bytes_t var_data_bytes, unsigned axis_count,
260 				  hb_vector_t<unsigned int> &shared_indices /* OUT */,
261 				  tuple_iterator_t *iterator /* OUT */)
262   {
263     iterator->init (var_data_bytes, axis_count);
264     if (!iterator->get_shared_indices (shared_indices))
265       return false;
266     return iterator->is_valid ();
267   }
268 
has_shared_point_numbersOT::GlyphVariationData269   bool has_shared_point_numbers () const { return tupleVarCount.has_shared_point_numbers (); }
270 
unpack_pointsOT::GlyphVariationData271   static bool unpack_points (const HBUINT8 *&p /* IN/OUT */,
272 			     hb_vector_t<unsigned int> &points /* OUT */,
273 			     const HBUINT8 *end)
274   {
275     enum packed_point_flag_t
276     {
277       POINTS_ARE_WORDS     = 0x80,
278       POINT_RUN_COUNT_MASK = 0x7F
279     };
280 
281     if (unlikely (p + 1 > end)) return false;
282 
283     unsigned count = *p++;
284     if (count & POINTS_ARE_WORDS)
285     {
286       if (unlikely (p + 1 > end)) return false;
287       count = ((count & POINT_RUN_COUNT_MASK) << 8) | *p++;
288     }
289     if (unlikely (!points.resize (count, false))) return false;
290 
291     unsigned n = 0;
292     unsigned i = 0;
293     while (i < count)
294     {
295       if (unlikely (p + 1 > end)) return false;
296       unsigned control = *p++;
297       unsigned run_count = (control & POINT_RUN_COUNT_MASK) + 1;
298       if (unlikely (i + run_count > count)) return false;
299       unsigned j;
300       if (control & POINTS_ARE_WORDS)
301       {
302 	if (unlikely (p + run_count * HBUINT16::static_size > end)) return false;
303 	for (j = 0; j < run_count; j++, i++)
304 	{
305 	  n += *(const HBUINT16 *)p;
306 	  points.arrayZ[i] = n;
307 	  p += HBUINT16::static_size;
308 	}
309       }
310       else
311       {
312 	if (unlikely (p + run_count > end)) return false;
313 	for (j = 0; j < run_count; j++, i++)
314 	{
315 	  n += *p++;
316 	  points.arrayZ[i] = n;
317 	}
318       }
319     }
320     return true;
321   }
322 
unpack_deltasOT::GlyphVariationData323   static bool unpack_deltas (const HBUINT8 *&p /* IN/OUT */,
324 			     hb_vector_t<int> &deltas /* IN/OUT */,
325 			     const HBUINT8 *end)
326   {
327     enum packed_delta_flag_t
328     {
329       DELTAS_ARE_ZERO      = 0x80,
330       DELTAS_ARE_WORDS     = 0x40,
331       DELTA_RUN_COUNT_MASK = 0x3F
332     };
333 
334     unsigned i = 0;
335     unsigned count = deltas.length;
336     while (i < count)
337     {
338       if (unlikely (p + 1 > end)) return false;
339       unsigned control = *p++;
340       unsigned run_count = (control & DELTA_RUN_COUNT_MASK) + 1;
341       if (unlikely (i + run_count > count)) return false;
342       unsigned j;
343       if (control & DELTAS_ARE_ZERO)
344       {
345 	for (j = 0; j < run_count; j++, i++)
346 	  deltas.arrayZ[i] = 0;
347       }
348       else if (control & DELTAS_ARE_WORDS)
349       {
350 	if (unlikely (p + run_count * HBUINT16::static_size > end)) return false;
351 	for (j = 0; j < run_count; j++, i++)
352 	{
353 	  deltas.arrayZ[i] = * (const HBINT16 *) p;
354 	  p += HBUINT16::static_size;
355 	}
356       }
357       else
358       {
359 	if (unlikely (p + run_count > end)) return false;
360 	for (j = 0; j < run_count; j++, i++)
361 	{
362 	  deltas.arrayZ[i] = * (const HBINT8 *) p++;
363 	}
364       }
365     }
366     return true;
367   }
368 
has_dataOT::GlyphVariationData369   bool has_data () const { return tupleVarCount; }
370 
371   protected:
372   struct TupleVarCount : HBUINT16
373   {
has_shared_point_numbersOT::GlyphVariationData::TupleVarCount374     bool has_shared_point_numbers () const { return ((*this) & SharedPointNumbers); }
get_countOT::GlyphVariationData::TupleVarCount375     unsigned int get_count () const { return (*this) & CountMask; }
376 
377     protected:
378     enum Flags
379     {
380       SharedPointNumbers= 0x8000u,
381       CountMask		= 0x0FFFu
382     };
383     public:
384     DEFINE_SIZE_STATIC (2);
385   };
386 
387   TupleVarCount	tupleVarCount;  /* A packed field. The high 4 bits are flags, and the
388 				 * low 12 bits are the number of tuple variation tables
389 				 * for this glyph. The number of tuple variation tables
390 				 * can be any number between 1 and 4095. */
391   Offset16To<HBUINT8>
392 		data;		/* Offset from the start of the GlyphVariationData table
393 				 * to the serialized data. */
394   /* TupleVariationHeader tupleVariationHeaders[] *//* Array of tuple variation headers. */
395   public:
396   DEFINE_SIZE_MIN (4);
397 };
398 
399 struct gvar
400 {
401   static constexpr hb_tag_t tableTag = HB_OT_TAG_gvar;
402 
sanitize_shallowOT::gvar403   bool sanitize_shallow (hb_sanitize_context_t *c) const
404   {
405     TRACE_SANITIZE (this);
406     return_trace (c->check_struct (this) && (version.major == 1) &&
407 		  sharedTuples.sanitize (c, this, axisCount * sharedTupleCount) &&
408 		  (is_long_offset () ?
409 		     c->check_array (get_long_offset_array (), glyphCount+1) :
410 		     c->check_array (get_short_offset_array (), glyphCount+1)));
411   }
412 
413   /* GlyphVariationData not sanitized here; must be checked while accessing each glyph variation data */
sanitizeOT::gvar414   bool sanitize (hb_sanitize_context_t *c) const
415   { return sanitize_shallow (c); }
416 
subsetOT::gvar417   bool subset (hb_subset_context_t *c) const
418   {
419     TRACE_SUBSET (this);
420 
421     gvar *out = c->serializer->allocate_min<gvar> ();
422     if (unlikely (!out)) return_trace (false);
423 
424     out->version.major = 1;
425     out->version.minor = 0;
426     out->axisCount = axisCount;
427     out->sharedTupleCount = sharedTupleCount;
428 
429     unsigned int num_glyphs = c->plan->num_output_glyphs ();
430     out->glyphCount = num_glyphs;
431 
432     unsigned int subset_data_size = 0;
433     for (hb_codepoint_t gid = (c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE) ? 0 : 1;
434          gid < num_glyphs;
435          gid++)
436     {
437       hb_codepoint_t old_gid;
438       if (!c->plan->old_gid_for_new_gid (gid, &old_gid)) continue;
439       subset_data_size += get_glyph_var_data_bytes (c->source_blob, old_gid).length;
440     }
441 
442     bool long_offset = subset_data_size & ~0xFFFFu;
443     out->flags = long_offset ? 1 : 0;
444 
445     HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset ? 4 : 2) * (num_glyphs + 1));
446     if (!subset_offsets) return_trace (false);
447 
448     /* shared tuples */
449     if (!sharedTupleCount || !sharedTuples)
450       out->sharedTuples = 0;
451     else
452     {
453       unsigned int shared_tuple_size = F2DOT14::static_size * axisCount * sharedTupleCount;
454       F2DOT14 *tuples = c->serializer->allocate_size<F2DOT14> (shared_tuple_size);
455       if (!tuples) return_trace (false);
456       out->sharedTuples = (char *) tuples - (char *) out;
457       hb_memcpy (tuples, this+sharedTuples, shared_tuple_size);
458     }
459 
460     char *subset_data = c->serializer->allocate_size<char> (subset_data_size);
461     if (!subset_data) return_trace (false);
462     out->dataZ = subset_data - (char *) out;
463 
464     unsigned int glyph_offset = 0;
465     for (hb_codepoint_t gid = (c->plan->flags & HB_SUBSET_FLAGS_NOTDEF_OUTLINE) ? 0 : 1;
466          gid < num_glyphs;
467          gid++)
468     {
469       hb_codepoint_t old_gid;
470       hb_bytes_t var_data_bytes = c->plan->old_gid_for_new_gid (gid, &old_gid)
471 				? get_glyph_var_data_bytes (c->source_blob, old_gid)
472 				: hb_bytes_t ();
473 
474       if (long_offset)
475 	((HBUINT32 *) subset_offsets)[gid] = glyph_offset;
476       else
477 	((HBUINT16 *) subset_offsets)[gid] = glyph_offset / 2;
478 
479       if (var_data_bytes.length > 0)
480 	hb_memcpy (subset_data, var_data_bytes.arrayZ, var_data_bytes.length);
481       subset_data += var_data_bytes.length;
482       glyph_offset += var_data_bytes.length;
483     }
484     if (long_offset)
485       ((HBUINT32 *) subset_offsets)[num_glyphs] = glyph_offset;
486     else
487       ((HBUINT16 *) subset_offsets)[num_glyphs] = glyph_offset / 2;
488 
489     return_trace (true);
490   }
491 
492   protected:
get_glyph_var_data_bytesOT::gvar493   const hb_bytes_t get_glyph_var_data_bytes (hb_blob_t *blob, hb_codepoint_t glyph) const
494   {
495     unsigned start_offset = get_offset (glyph);
496     unsigned end_offset = get_offset (glyph+1);
497     if (unlikely (end_offset < start_offset)) return hb_bytes_t ();
498     unsigned length = end_offset - start_offset;
499     hb_bytes_t var_data = blob->as_bytes ().sub_array (((unsigned) dataZ) + start_offset, length);
500     return likely (var_data.length >= GlyphVariationData::min_size) ? var_data : hb_bytes_t ();
501   }
502 
is_long_offsetOT::gvar503   bool is_long_offset () const { return flags & 1; }
504 
get_offsetOT::gvar505   unsigned get_offset (unsigned i) const
506   {
507     if (unlikely (i > glyphCount)) return 0;
508     _hb_compiler_memory_r_barrier ();
509     return is_long_offset () ? get_long_offset_array ()[i] : get_short_offset_array ()[i] * 2;
510   }
511 
get_long_offset_arrayOT::gvar512   const HBUINT32 * get_long_offset_array () const { return (const HBUINT32 *) &offsetZ; }
get_short_offset_arrayOT::gvar513   const HBUINT16 *get_short_offset_array () const { return (const HBUINT16 *) &offsetZ; }
514 
515   public:
516   struct accelerator_t
517   {
accelerator_tOT::gvar::accelerator_t518     accelerator_t (hb_face_t *face)
519     { table = hb_sanitize_context_t ().reference_table<gvar> (face); }
~accelerator_tOT::gvar::accelerator_t520     ~accelerator_t () { table.destroy (); }
521 
522     private:
523 
infer_deltaOT::gvar::accelerator_t524     static float infer_delta (const hb_array_t<contour_point_t> points,
525 			      const hb_array_t<contour_point_t> deltas,
526 			      unsigned int target, unsigned int prev, unsigned int next,
527 			      float contour_point_t::*m)
528     {
529       float target_val = points.arrayZ[target].*m;
530       float prev_val = points.arrayZ[prev].*m;
531       float next_val = points.arrayZ[next].*m;
532       float prev_delta =  deltas.arrayZ[prev].*m;
533       float next_delta =  deltas.arrayZ[next].*m;
534 
535       if (prev_val == next_val)
536 	return (prev_delta == next_delta) ? prev_delta : 0.f;
537       else if (target_val <= hb_min (prev_val, next_val))
538 	return (prev_val < next_val) ? prev_delta : next_delta;
539       else if (target_val >= hb_max (prev_val, next_val))
540 	return (prev_val > next_val) ? prev_delta : next_delta;
541 
542       /* linear interpolation */
543       float r = (target_val - prev_val) / (next_val - prev_val);
544       return prev_delta + r * (next_delta - prev_delta);
545     }
546 
next_indexOT::gvar::accelerator_t547     static unsigned int next_index (unsigned int i, unsigned int start, unsigned int end)
548     { return (i >= end) ? start : (i + 1); }
549 
550     public:
apply_deltas_to_pointsOT::gvar::accelerator_t551     bool apply_deltas_to_points (hb_codepoint_t glyph,
552 				 hb_array_t<int> coords,
553 				 const hb_array_t<contour_point_t> points) const
554     {
555       if (!coords) return true;
556 
557       if (unlikely (glyph >= table->glyphCount)) return true;
558 
559       hb_bytes_t var_data_bytes = table->get_glyph_var_data_bytes (table.get_blob (), glyph);
560       if (!var_data_bytes.as<GlyphVariationData> ()->has_data ()) return true;
561       hb_vector_t<unsigned int> shared_indices;
562       GlyphVariationData::tuple_iterator_t iterator;
563       if (!GlyphVariationData::get_tuple_iterator (var_data_bytes, table->axisCount,
564 						   shared_indices, &iterator))
565 	return true; /* so isn't applied at all */
566 
567       /* Save original points for inferred delta calculation */
568       contour_point_vector_t orig_points_vec;
569       orig_points_vec.extend (points);
570       if (unlikely (orig_points_vec.in_error ())) return false;
571       auto orig_points = orig_points_vec.as_array ();
572 
573       contour_point_vector_t deltas_vec; /* flag is used to indicate referenced point */
574       if (unlikely (!deltas_vec.resize (points.length, false))) return false;
575       auto deltas = deltas_vec.as_array ();
576 
577       hb_vector_t<unsigned> end_points;
578       for (unsigned i = 0; i < points.length; ++i)
579 	if (points.arrayZ[i].is_end_point)
580 	  end_points.push (i);
581 
582       unsigned num_coords = table->axisCount;
583       hb_array_t<const F2DOT14> shared_tuples = (table+table->sharedTuples).as_array (table->sharedTupleCount * table->axisCount);
584 
585       hb_vector_t<unsigned int> private_indices;
586       hb_vector_t<int> x_deltas;
587       hb_vector_t<int> y_deltas;
588       do
589       {
590 	float scalar = iterator.current_tuple->calculate_scalar (coords, num_coords, shared_tuples);
591 	if (scalar == 0.f) continue;
592 	const HBUINT8 *p = iterator.get_serialized_data ();
593 	unsigned int length = iterator.current_tuple->get_data_size ();
594 	if (unlikely (!iterator.var_data_bytes.check_range (p, length)))
595 	  return false;
596 
597 	const HBUINT8 *end = p + length;
598 
599 	bool has_private_points = iterator.current_tuple->has_private_points ();
600 	if (has_private_points &&
601 	    !GlyphVariationData::unpack_points (p, private_indices, end))
602 	  return false;
603 	const hb_array_t<unsigned int> &indices = has_private_points ? private_indices : shared_indices;
604 
605 	bool apply_to_all = (indices.length == 0);
606 	unsigned int num_deltas = apply_to_all ? points.length : indices.length;
607 	if (unlikely (!x_deltas.resize (num_deltas, false))) return false;
608 	if (unlikely (!GlyphVariationData::unpack_deltas (p, x_deltas, end))) return false;
609 	if (unlikely (!y_deltas.resize (num_deltas, false))) return false;
610 	if (unlikely (!GlyphVariationData::unpack_deltas (p, y_deltas, end))) return false;
611 
612 	hb_memset (deltas.arrayZ, 0, deltas.get_size ());
613 
614 	unsigned ref_points = 0;
615 	if (scalar != 1.0f)
616 	  for (unsigned int i = 0; i < num_deltas; i++)
617 	  {
618 	    unsigned int pt_index = apply_to_all ? i : indices[i];
619 	    if (unlikely (pt_index >= deltas.length)) continue;
620 	    auto &delta = deltas.arrayZ[pt_index];
621 	    ref_points += !delta.flag;
622 	    delta.flag = 1;	/* this point is referenced, i.e., explicit deltas specified */
623 	    delta.x += x_deltas.arrayZ[i] * scalar;
624 	    delta.y += y_deltas.arrayZ[i] * scalar;
625 	  }
626 	else
627 	  for (unsigned int i = 0; i < num_deltas; i++)
628 	  {
629 	    unsigned int pt_index = apply_to_all ? i : indices[i];
630 	    if (unlikely (pt_index >= deltas.length)) continue;
631 	    auto &delta = deltas.arrayZ[pt_index];
632 	    ref_points += !delta.flag;
633 	    delta.flag = 1;	/* this point is referenced, i.e., explicit deltas specified */
634 	    delta.x += x_deltas.arrayZ[i];
635 	    delta.y += y_deltas.arrayZ[i];
636 	  }
637 
638 	/* infer deltas for unreferenced points */
639 	if (ref_points && ref_points < orig_points.length)
640 	{
641 	  unsigned start_point = 0;
642 	  for (unsigned c = 0; c < end_points.length; c++)
643 	  {
644 	    unsigned end_point = end_points.arrayZ[c];
645 
646 	    /* Check the number of unreferenced points in a contour. If no unref points or no ref points, nothing to do. */
647 	    unsigned unref_count = 0;
648 	    for (unsigned i = start_point; i < end_point + 1; i++)
649 	      unref_count += deltas.arrayZ[i].flag;
650 	    unref_count = (end_point - start_point + 1) - unref_count;
651 
652 	    unsigned j = start_point;
653 	    if (unref_count == 0 || unref_count > end_point - start_point)
654 	      goto no_more_gaps;
655 
656 	    for (;;)
657 	    {
658 	      /* Locate the next gap of unreferenced points between two referenced points prev and next.
659 	       * Note that a gap may wrap around at left (start_point) and/or at right (end_point).
660 	       */
661 	      unsigned int prev, next, i;
662 	      for (;;)
663 	      {
664 		i = j;
665 		j = next_index (i, start_point, end_point);
666 		if (deltas.arrayZ[i].flag && !deltas.arrayZ[j].flag) break;
667 	      }
668 	      prev = j = i;
669 	      for (;;)
670 	      {
671 		i = j;
672 		j = next_index (i, start_point, end_point);
673 		if (!deltas.arrayZ[i].flag && deltas.arrayZ[j].flag) break;
674 	      }
675 	      next = j;
676 	      /* Infer deltas for all unref points in the gap between prev and next */
677 	      i = prev;
678 	      for (;;)
679 	      {
680 		i = next_index (i, start_point, end_point);
681 		if (i == next) break;
682 		deltas.arrayZ[i].x = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::x);
683 		deltas.arrayZ[i].y = infer_delta (orig_points, deltas, i, prev, next, &contour_point_t::y);
684 		if (--unref_count == 0) goto no_more_gaps;
685 	      }
686 	    }
687 	  no_more_gaps:
688 	    start_point = end_point + 1;
689 	  }
690 	}
691 
692 	/* apply specified / inferred deltas to points */
693 	for (unsigned int i = 0; i < points.length; i++)
694 	{
695 	  points.arrayZ[i].x += deltas.arrayZ[i].x;
696 	  points.arrayZ[i].y += deltas.arrayZ[i].y;
697 	}
698       } while (iterator.move_to_next ());
699 
700       return true;
701     }
702 
get_axis_countOT::gvar::accelerator_t703     unsigned int get_axis_count () const { return table->axisCount; }
704 
705     private:
706     hb_blob_ptr_t<gvar> table;
707   };
708 
709   protected:
710   FixedVersion<>version;	/* Version number of the glyph variations table
711 				 * Set to 0x00010000u. */
712   HBUINT16	axisCount;	/* The number of variation axes for this font. This must be
713 				 * the same number as axisCount in the 'fvar' table. */
714   HBUINT16	sharedTupleCount;
715 				/* The number of shared tuple records. Shared tuple records
716 				 * can be referenced within glyph variation data tables for
717 				 * multiple glyphs, as opposed to other tuple records stored
718 				 * directly within a glyph variation data table. */
719   NNOffset32To<UnsizedArrayOf<F2DOT14>>
720 		sharedTuples;	/* Offset from the start of this table to the shared tuple records.
721 				 * Array of tuple records shared across all glyph variation data tables. */
722   HBUINT16	glyphCount;	/* The number of glyphs in this font. This must match the number of
723 				 * glyphs stored elsewhere in the font. */
724   HBUINT16	flags;		/* Bit-field that gives the format of the offset array that follows.
725 				 * If bit 0 is clear, the offsets are uint16; if bit 0 is set, the
726 				 * offsets are uint32. */
727   Offset32To<GlyphVariationData>
728 		dataZ;		/* Offset from the start of this table to the array of
729 				 * GlyphVariationData tables. */
730   UnsizedArrayOf<HBUINT8>
731 		offsetZ;	/* Offsets from the start of the GlyphVariationData array
732 				 * to each GlyphVariationData table. */
733   public:
734   DEFINE_SIZE_ARRAY (20, offsetZ);
735 };
736 
737 struct gvar_accelerator_t : gvar::accelerator_t {
gvar_accelerator_tOT::gvar_accelerator_t738   gvar_accelerator_t (hb_face_t *face) : gvar::accelerator_t (face) {}
739 };
740 
741 } /* namespace OT */
742 
743 #endif /* HB_OT_VAR_GVAR_TABLE_HH */
744