• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1from __future__ import print_function, division, absolute_import
2from fontTools.misc.py23 import *
3from fontTools.misc.fixedTools import fixedToFloat, floatToFixed, otRound
4from fontTools.misc.textTools import safeEval
5import array
6import io
7import logging
8import struct
9import sys
10
11
12# https://www.microsoft.com/typography/otspec/otvarcommonformats.htm
13
14EMBEDDED_PEAK_TUPLE = 0x8000
15INTERMEDIATE_REGION = 0x4000
16PRIVATE_POINT_NUMBERS = 0x2000
17
18DELTAS_ARE_ZERO = 0x80
19DELTAS_ARE_WORDS = 0x40
20DELTA_RUN_COUNT_MASK = 0x3f
21
22POINTS_ARE_WORDS = 0x80
23POINT_RUN_COUNT_MASK = 0x7f
24
25TUPLES_SHARE_POINT_NUMBERS = 0x8000
26TUPLE_COUNT_MASK = 0x0fff
27TUPLE_INDEX_MASK = 0x0fff
28
29log = logging.getLogger(__name__)
30
31
32class TupleVariation(object):
33
34	def __init__(self, axes, coordinates):
35		self.axes = axes.copy()
36		self.coordinates = coordinates[:]
37
38	def __repr__(self):
39		axes = ",".join(sorted(["%s=%s" % (name, value) for (name, value) in self.axes.items()]))
40		return "<TupleVariation %s %s>" % (axes, self.coordinates)
41
42	def __eq__(self, other):
43		return self.coordinates == other.coordinates and self.axes == other.axes
44
45	def getUsedPoints(self):
46		result = set()
47		for i, point in enumerate(self.coordinates):
48			if point is not None:
49				result.add(i)
50		return result
51
52	def hasImpact(self):
53		"""Returns True if this TupleVariation has any visible impact.
54
55		If the result is False, the TupleVariation can be omitted from the font
56		without making any visible difference.
57		"""
58		return any(c is not None for c in self.coordinates)
59
60	def toXML(self, writer, axisTags):
61		writer.begintag("tuple")
62		writer.newline()
63		for axis in axisTags:
64			value = self.axes.get(axis)
65			if value is not None:
66				minValue, value, maxValue = (float(v) for v in value)
67				defaultMinValue = min(value, 0.0)  # -0.3 --> -0.3; 0.7 --> 0.0
68				defaultMaxValue = max(value, 0.0)  # -0.3 -->  0.0; 0.7 --> 0.7
69				if minValue == defaultMinValue and maxValue == defaultMaxValue:
70					writer.simpletag("coord", axis=axis, value=value)
71				else:
72					attrs = [
73						("axis", axis),
74						("min", minValue),
75						("value", value),
76						("max", maxValue),
77				        ]
78					writer.simpletag("coord", attrs)
79				writer.newline()
80		wrote_any_deltas = False
81		for i, delta in enumerate(self.coordinates):
82			if type(delta) == tuple and len(delta) == 2:
83				writer.simpletag("delta", pt=i, x=delta[0], y=delta[1])
84				writer.newline()
85				wrote_any_deltas = True
86			elif type(delta) == int:
87				writer.simpletag("delta", cvt=i, value=delta)
88				writer.newline()
89				wrote_any_deltas = True
90			elif delta is not None:
91				log.error("bad delta format")
92				writer.comment("bad delta #%d" % i)
93				writer.newline()
94				wrote_any_deltas = True
95		if not wrote_any_deltas:
96			writer.comment("no deltas")
97			writer.newline()
98		writer.endtag("tuple")
99		writer.newline()
100
101	def fromXML(self, name, attrs, _content):
102		if name == "coord":
103			axis = attrs["axis"]
104			value = float(attrs["value"])
105			defaultMinValue = min(value, 0.0)  # -0.3 --> -0.3; 0.7 --> 0.0
106			defaultMaxValue = max(value, 0.0)  # -0.3 -->  0.0; 0.7 --> 0.7
107			minValue = float(attrs.get("min", defaultMinValue))
108			maxValue = float(attrs.get("max", defaultMaxValue))
109			self.axes[axis] = (minValue, value, maxValue)
110		elif name == "delta":
111			if "pt" in attrs:
112				point = safeEval(attrs["pt"])
113				x = safeEval(attrs["x"])
114				y = safeEval(attrs["y"])
115				self.coordinates[point] = (x, y)
116			elif "cvt" in attrs:
117				cvt = safeEval(attrs["cvt"])
118				value = safeEval(attrs["value"])
119				self.coordinates[cvt] = value
120			else:
121				log.warning("bad delta format: %s" %
122				            ", ".join(sorted(attrs.keys())))
123
124	def compile(self, axisTags, sharedCoordIndices, sharedPoints):
125		tupleData = []
126
127		assert all(tag in axisTags for tag in self.axes.keys()), ("Unknown axis tag found.", self.axes.keys(), axisTags)
128
129		coord = self.compileCoord(axisTags)
130		if coord in sharedCoordIndices:
131			flags = sharedCoordIndices[coord]
132		else:
133			flags = EMBEDDED_PEAK_TUPLE
134			tupleData.append(coord)
135
136		intermediateCoord = self.compileIntermediateCoord(axisTags)
137		if intermediateCoord is not None:
138			flags |= INTERMEDIATE_REGION
139			tupleData.append(intermediateCoord)
140
141		points = self.getUsedPoints()
142		if sharedPoints == points:
143			# Only use the shared points if they are identical to the actually used points
144			auxData = self.compileDeltas(sharedPoints)
145			usesSharedPoints = True
146		else:
147			flags |= PRIVATE_POINT_NUMBERS
148			numPointsInGlyph = len(self.coordinates)
149			auxData = self.compilePoints(points, numPointsInGlyph) + self.compileDeltas(points)
150			usesSharedPoints = False
151
152		tupleData = struct.pack('>HH', len(auxData), flags) + bytesjoin(tupleData)
153		return (tupleData, auxData, usesSharedPoints)
154
155	def compileCoord(self, axisTags):
156		result = []
157		for axis in axisTags:
158			_minValue, value, _maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
159			result.append(struct.pack(">h", floatToFixed(value, 14)))
160		return bytesjoin(result)
161
162	def compileIntermediateCoord(self, axisTags):
163		needed = False
164		for axis in axisTags:
165			minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
166			defaultMinValue = min(value, 0.0)  # -0.3 --> -0.3; 0.7 --> 0.0
167			defaultMaxValue = max(value, 0.0)  # -0.3 -->  0.0; 0.7 --> 0.7
168			if (minValue != defaultMinValue) or (maxValue != defaultMaxValue):
169				needed = True
170				break
171		if not needed:
172			return None
173		minCoords = []
174		maxCoords = []
175		for axis in axisTags:
176			minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
177			minCoords.append(struct.pack(">h", floatToFixed(minValue, 14)))
178			maxCoords.append(struct.pack(">h", floatToFixed(maxValue, 14)))
179		return bytesjoin(minCoords + maxCoords)
180
181	@staticmethod
182	def decompileCoord_(axisTags, data, offset):
183		coord = {}
184		pos = offset
185		for axis in axisTags:
186			coord[axis] = fixedToFloat(struct.unpack(">h", data[pos:pos+2])[0], 14)
187			pos += 2
188		return coord, pos
189
190	@staticmethod
191	def compilePoints(points, numPointsInGlyph):
192		# If the set consists of all points in the glyph, it gets encoded with
193		# a special encoding: a single zero byte.
194		if len(points) == numPointsInGlyph:
195			return b"\0"
196
197		# In the 'gvar' table, the packing of point numbers is a little surprising.
198		# It consists of multiple runs, each being a delta-encoded list of integers.
199		# For example, the point set {17, 18, 19, 20, 21, 22, 23} gets encoded as
200		# [6, 17, 1, 1, 1, 1, 1, 1]. The first value (6) is the run length minus 1.
201		# There are two types of runs, with values being either 8 or 16 bit unsigned
202		# integers.
203		points = list(points)
204		points.sort()
205		numPoints = len(points)
206
207		# The binary representation starts with the total number of points in the set,
208		# encoded into one or two bytes depending on the value.
209		if numPoints < 0x80:
210			result = [bytechr(numPoints)]
211		else:
212			result = [bytechr((numPoints >> 8) | 0x80) + bytechr(numPoints & 0xff)]
213
214		MAX_RUN_LENGTH = 127
215		pos = 0
216		lastValue = 0
217		while pos < numPoints:
218			run = io.BytesIO()
219			runLength = 0
220			useByteEncoding = None
221			while pos < numPoints and runLength <= MAX_RUN_LENGTH:
222				curValue = points[pos]
223				delta = curValue - lastValue
224				if useByteEncoding is None:
225					useByteEncoding = 0 <= delta <= 0xff
226				if useByteEncoding and (delta > 0xff or delta < 0):
227					# we need to start a new run (which will not use byte encoding)
228					break
229				# TODO This never switches back to a byte-encoding from a short-encoding.
230				# That's suboptimal.
231				if useByteEncoding:
232					run.write(bytechr(delta))
233				else:
234					run.write(bytechr(delta >> 8))
235					run.write(bytechr(delta & 0xff))
236				lastValue = curValue
237				pos += 1
238				runLength += 1
239			if useByteEncoding:
240				runHeader = bytechr(runLength - 1)
241			else:
242				runHeader = bytechr((runLength - 1) | POINTS_ARE_WORDS)
243			result.append(runHeader)
244			result.append(run.getvalue())
245
246		return bytesjoin(result)
247
248	@staticmethod
249	def decompilePoints_(numPoints, data, offset, tableTag):
250		"""(numPoints, data, offset, tableTag) --> ([point1, point2, ...], newOffset)"""
251		assert tableTag in ('cvar', 'gvar')
252		pos = offset
253		numPointsInData = byteord(data[pos])
254		pos += 1
255		if (numPointsInData & POINTS_ARE_WORDS) != 0:
256			numPointsInData = (numPointsInData & POINT_RUN_COUNT_MASK) << 8 | byteord(data[pos])
257			pos += 1
258		if numPointsInData == 0:
259			return (range(numPoints), pos)
260
261		result = []
262		while len(result) < numPointsInData:
263			runHeader = byteord(data[pos])
264			pos += 1
265			numPointsInRun = (runHeader & POINT_RUN_COUNT_MASK) + 1
266			point = 0
267			if (runHeader & POINTS_ARE_WORDS) != 0:
268				points = array.array("H")
269				pointsSize = numPointsInRun * 2
270			else:
271				points = array.array("B")
272				pointsSize = numPointsInRun
273			points.fromstring(data[pos:pos+pointsSize])
274			if sys.byteorder != "big": points.byteswap()
275
276			assert len(points) == numPointsInRun
277			pos += pointsSize
278
279			result.extend(points)
280
281		# Convert relative to absolute
282		absolute = []
283		current = 0
284		for delta in result:
285			current += delta
286			absolute.append(current)
287		result = absolute
288		del absolute
289
290		badPoints = {str(p) for p in result if p < 0 or p >= numPoints}
291		if badPoints:
292			log.warning("point %s out of range in '%s' table" %
293			            (",".join(sorted(badPoints)), tableTag))
294		return (result, pos)
295
296	def compileDeltas(self, points):
297		deltaX = []
298		deltaY = []
299		for p in sorted(list(points)):
300			c = self.coordinates[p]
301			if type(c) is tuple and len(c) == 2:
302				deltaX.append(c[0])
303				deltaY.append(c[1])
304			elif type(c) is int:
305				deltaX.append(c)
306			elif c is not None:
307				raise TypeError("invalid type of delta: %s" % type(c))
308		return self.compileDeltaValues_(deltaX) + self.compileDeltaValues_(deltaY)
309
310	@staticmethod
311	def compileDeltaValues_(deltas):
312		"""[value1, value2, value3, ...] --> bytestring
313
314		Emits a sequence of runs. Each run starts with a
315		byte-sized header whose 6 least significant bits
316		(header & 0x3F) indicate how many values are encoded
317		in this run. The stored length is the actual length
318		minus one; run lengths are thus in the range [1..64].
319		If the header byte has its most significant bit (0x80)
320		set, all values in this run are zero, and no data
321		follows. Otherwise, the header byte is followed by
322		((header & 0x3F) + 1) signed values.  If (header &
323		0x40) is clear, the delta values are stored as signed
324		bytes; if (header & 0x40) is set, the delta values are
325		signed 16-bit integers.
326		"""  # Explaining the format because the 'gvar' spec is hard to understand.
327		stream = io.BytesIO()
328		pos = 0
329		while pos < len(deltas):
330			value = deltas[pos]
331			if value == 0:
332				pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, stream)
333			elif value >= -128 and value <= 127:
334				pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, stream)
335			else:
336				pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, stream)
337		return stream.getvalue()
338
339	@staticmethod
340	def encodeDeltaRunAsZeroes_(deltas, offset, stream):
341		runLength = 0
342		pos = offset
343		numDeltas = len(deltas)
344		while pos < numDeltas and runLength < 64 and deltas[pos] == 0:
345			pos += 1
346			runLength += 1
347		assert runLength >= 1 and runLength <= 64
348		stream.write(bytechr(DELTAS_ARE_ZERO | (runLength - 1)))
349		return pos
350
351	@staticmethod
352	def encodeDeltaRunAsBytes_(deltas, offset, stream):
353		runLength = 0
354		pos = offset
355		numDeltas = len(deltas)
356		while pos < numDeltas and runLength < 64:
357			value = deltas[pos]
358			if value < -128 or value > 127:
359				break
360			# Within a byte-encoded run of deltas, a single zero
361			# is best stored literally as 0x00 value. However,
362			# if are two or more zeroes in a sequence, it is
363			# better to start a new run. For example, the sequence
364			# of deltas [15, 15, 0, 15, 15] becomes 6 bytes
365			# (04 0F 0F 00 0F 0F) when storing the zero value
366			# literally, but 7 bytes (01 0F 0F 80 01 0F 0F)
367			# when starting a new run.
368			if value == 0 and pos+1 < numDeltas and deltas[pos+1] == 0:
369				break
370			pos += 1
371			runLength += 1
372		assert runLength >= 1 and runLength <= 64
373		stream.write(bytechr(runLength - 1))
374		for i in range(offset, pos):
375			stream.write(struct.pack('b', otRound(deltas[i])))
376		return pos
377
378	@staticmethod
379	def encodeDeltaRunAsWords_(deltas, offset, stream):
380		runLength = 0
381		pos = offset
382		numDeltas = len(deltas)
383		while pos < numDeltas and runLength < 64:
384			value = deltas[pos]
385			# Within a word-encoded run of deltas, it is easiest
386			# to start a new run (with a different encoding)
387			# whenever we encounter a zero value. For example,
388			# the sequence [0x6666, 0, 0x7777] needs 7 bytes when
389			# storing the zero literally (42 66 66 00 00 77 77),
390			# and equally 7 bytes when starting a new run
391			# (40 66 66 80 40 77 77).
392			if value == 0:
393				break
394
395			# Within a word-encoded run of deltas, a single value
396			# in the range (-128..127) should be encoded literally
397			# because it is more compact. For example, the sequence
398			# [0x6666, 2, 0x7777] becomes 7 bytes when storing
399			# the value literally (42 66 66 00 02 77 77), but 8 bytes
400			# when starting a new run (40 66 66 00 02 40 77 77).
401			isByteEncodable = lambda value: value >= -128 and value <= 127
402			if isByteEncodable(value) and pos+1 < numDeltas and isByteEncodable(deltas[pos+1]):
403				break
404			pos += 1
405			runLength += 1
406		assert runLength >= 1 and runLength <= 64
407		stream.write(bytechr(DELTAS_ARE_WORDS | (runLength - 1)))
408		for i in range(offset, pos):
409			stream.write(struct.pack('>h', otRound(deltas[i])))
410		return pos
411
412	@staticmethod
413	def decompileDeltas_(numDeltas, data, offset):
414		"""(numDeltas, data, offset) --> ([delta, delta, ...], newOffset)"""
415		result = []
416		pos = offset
417		while len(result) < numDeltas:
418			runHeader = byteord(data[pos])
419			pos += 1
420			numDeltasInRun = (runHeader & DELTA_RUN_COUNT_MASK) + 1
421			if (runHeader & DELTAS_ARE_ZERO) != 0:
422				result.extend([0] * numDeltasInRun)
423			else:
424				if (runHeader & DELTAS_ARE_WORDS) != 0:
425					deltas = array.array("h")
426					deltasSize = numDeltasInRun * 2
427				else:
428					deltas = array.array("b")
429					deltasSize = numDeltasInRun
430				deltas.fromstring(data[pos:pos+deltasSize])
431				if sys.byteorder != "big": deltas.byteswap()
432				assert len(deltas) == numDeltasInRun
433				pos += deltasSize
434				result.extend(deltas)
435		assert len(result) == numDeltas
436		return (result, pos)
437
438	@staticmethod
439	def getTupleSize_(flags, axisCount):
440		size = 4
441		if (flags & EMBEDDED_PEAK_TUPLE) != 0:
442			size += axisCount * 2
443		if (flags & INTERMEDIATE_REGION) != 0:
444			size += axisCount * 4
445		return size
446
447	def getCoordWidth(self):
448		""" Return 2 if coordinates are (x, y) as in gvar, 1 if single values
449		as in cvar, or 0 if empty.
450		"""
451		firstDelta = next((c for c in self.coordinates if c is not None), None)
452		if firstDelta is None:
453			return 0  # empty or has no impact
454		if type(firstDelta) in (int, float):
455			return 1
456		if type(firstDelta) is tuple and len(firstDelta) == 2:
457			return 2
458		raise TypeError(
459			"invalid type of delta; expected (int or float) number, or "
460			"Tuple[number, number]: %r" % firstDelta
461		)
462
463	def scaleDeltas(self, scalar):
464		if scalar == 1.0:
465			return  # no change
466		coordWidth = self.getCoordWidth()
467		self.coordinates = [
468			None
469			if d is None
470			else d * scalar
471			if coordWidth == 1
472			else (d[0] * scalar, d[1] * scalar)
473			for d in self.coordinates
474		]
475
476	def roundDeltas(self):
477		coordWidth = self.getCoordWidth()
478		self.coordinates = [
479			None
480			if d is None
481			else otRound(d)
482			if coordWidth == 1
483			else (otRound(d[0]), otRound(d[1]))
484			for d in self.coordinates
485		]
486
487	def calcInferredDeltas(self, origCoords, endPts):
488		from fontTools.varLib.iup import iup_delta
489
490		if self.getCoordWidth() == 1:
491			raise TypeError(
492				"Only 'gvar' TupleVariation can have inferred deltas"
493			)
494		if None in self.coordinates:
495			if len(self.coordinates) != len(origCoords):
496				raise ValueError(
497					"Expected len(origCoords) == %d; found %d"
498					% (len(self.coordinates), len(origCoords))
499				)
500			self.coordinates = iup_delta(self.coordinates, origCoords, endPts)
501
502	def optimize(self, origCoords, endPts, tolerance=0.5, isComposite=False):
503		from fontTools.varLib.iup import iup_delta_optimize
504
505		if None in self.coordinates:
506			return  # already optimized
507
508		deltaOpt = iup_delta_optimize(
509		    self.coordinates, origCoords, endPts, tolerance=tolerance
510		)
511		if None in deltaOpt:
512			if isComposite and all(d is None for d in deltaOpt):
513				# Fix for macOS composites
514				# https://github.com/fonttools/fonttools/issues/1381
515				deltaOpt = [(0, 0)] + [None] * (len(deltaOpt) - 1)
516			# Use "optimized" version only if smaller...
517			varOpt = TupleVariation(self.axes, deltaOpt)
518
519			# Shouldn't matter that this is different from fvar...?
520			axisTags = sorted(self.axes.keys())
521			tupleData, auxData, _ = self.compile(axisTags, [], None)
522			unoptimizedLength = len(tupleData) + len(auxData)
523			tupleData, auxData, _ = varOpt.compile(axisTags, [], None)
524			optimizedLength = len(tupleData) + len(auxData)
525
526			if optimizedLength < unoptimizedLength:
527				self.coordinates = varOpt.coordinates
528
529	def __iadd__(self, other):
530		if not isinstance(other, TupleVariation):
531			return NotImplemented
532		deltas1 = self.coordinates
533		length = len(deltas1)
534		deltas2 = other.coordinates
535		if len(deltas2) != length:
536			raise ValueError(
537				"cannot sum TupleVariation deltas with different lengths"
538			)
539		# 'None' values have different meanings in gvar vs cvar TupleVariations:
540		# within the gvar, when deltas are not provided explicitly for some points,
541		# they need to be inferred; whereas for the 'cvar' table, if deltas are not
542		# provided for some CVT values, then no adjustments are made (i.e. None == 0).
543		# Thus, we cannot sum deltas for gvar TupleVariations if they contain
544		# inferred inferred deltas (the latter need to be computed first using
545		# 'calcInferredDeltas' method), but we can treat 'None' values in cvar
546		# deltas as if they are zeros.
547		if self.getCoordWidth() == 2:
548			for i, d2 in zip(range(length), deltas2):
549				d1 = deltas1[i]
550				try:
551					deltas1[i] = (d1[0] + d2[0], d1[1] + d2[1])
552				except TypeError:
553					raise ValueError(
554						"cannot sum gvar deltas with inferred points"
555					)
556		else:
557			for i, d2 in zip(range(length), deltas2):
558				d1 = deltas1[i]
559				if d1 is not None and d2 is not None:
560					deltas1[i] = d1 + d2
561				elif d1 is None and d2 is not None:
562					deltas1[i] = d2
563				# elif d2 is None do nothing
564		return self
565
566
567def decompileSharedTuples(axisTags, sharedTupleCount, data, offset):
568	result = []
569	for _ in range(sharedTupleCount):
570		t, offset = TupleVariation.decompileCoord_(axisTags, data, offset)
571		result.append(t)
572	return result
573
574
575def compileSharedTuples(axisTags, variations):
576	coordCount = {}
577	for var in variations:
578		coord = var.compileCoord(axisTags)
579		coordCount[coord] = coordCount.get(coord, 0) + 1
580	sharedCoords = [(count, coord)
581					for (coord, count) in coordCount.items() if count > 1]
582	sharedCoords.sort(reverse=True)
583	MAX_NUM_SHARED_COORDS = TUPLE_INDEX_MASK + 1
584	sharedCoords = sharedCoords[:MAX_NUM_SHARED_COORDS]
585	return [c[1] for c in sharedCoords]  # Strip off counts.
586
587
588def compileTupleVariationStore(variations, pointCount,
589                               axisTags, sharedTupleIndices,
590                               useSharedPoints=True):
591	variations = [v for v in variations if v.hasImpact()]
592	if len(variations) == 0:
593		return (0, b"", b"")
594
595	# Each glyph variation tuples modifies a set of control points. To
596	# indicate which exact points are getting modified, a single tuple
597	# can either refer to a shared set of points, or the tuple can
598	# supply its private point numbers.  Because the impact of sharing
599	# can be positive (no need for a private point list) or negative
600	# (need to supply 0,0 deltas for unused points), it is not obvious
601	# how to determine which tuples should take their points from the
602	# shared pool versus have their own. Perhaps we should resort to
603	# brute force, and try all combinations? However, if a glyph has n
604	# variation tuples, we would need to try 2^n combinations (because
605	# each tuple may or may not be part of the shared set). How many
606	# variations tuples do glyphs have?
607	#
608	#   Skia.ttf: {3: 1, 5: 11, 6: 41, 7: 62, 8: 387, 13: 1, 14: 3}
609	#   JamRegular.ttf: {3: 13, 4: 122, 5: 1, 7: 4, 8: 1, 9: 1, 10: 1}
610	#   BuffaloGalRegular.ttf: {1: 16, 2: 13, 4: 2, 5: 4, 6: 19, 7: 1, 8: 3, 9: 8}
611	#   (Reading example: In Skia.ttf, 41 glyphs have 6 variation tuples).
612	#
613
614	# Is this even worth optimizing? If we never use a shared point
615	# list, the private lists will consume 112K for Skia, 5K for
616	# BuffaloGalRegular, and 15K for JamRegular. If we always use a
617	# shared point list, the shared lists will consume 16K for Skia,
618	# 3K for BuffaloGalRegular, and 10K for JamRegular. However, in
619	# the latter case the delta arrays will become larger, but I
620	# haven't yet measured by how much. From gut feeling (which may be
621	# wrong), the optimum is to share some but not all points;
622	# however, then we would need to try all combinations.
623	#
624	# For the time being, we try two variants and then pick the better one:
625	# (a) each tuple supplies its own private set of points;
626	# (b) all tuples refer to a shared set of points, which consists of
627	#     "every control point in the glyph that has explicit deltas".
628	usedPoints = set()
629	for v in variations:
630		usedPoints |= v.getUsedPoints()
631	tuples = []
632	data = []
633	someTuplesSharePoints = False
634	sharedPointVariation = None # To keep track of a variation that uses shared points
635	for v in variations:
636		privateTuple, privateData, _ = v.compile(
637			axisTags, sharedTupleIndices, sharedPoints=None)
638		sharedTuple, sharedData, usesSharedPoints = v.compile(
639			axisTags, sharedTupleIndices, sharedPoints=usedPoints)
640		if useSharedPoints and (len(sharedTuple) + len(sharedData)) < (len(privateTuple) + len(privateData)):
641			tuples.append(sharedTuple)
642			data.append(sharedData)
643			someTuplesSharePoints |= usesSharedPoints
644			sharedPointVariation = v
645		else:
646			tuples.append(privateTuple)
647			data.append(privateData)
648	if someTuplesSharePoints:
649		# Use the last of the variations that share points for compiling the packed point data
650		data = sharedPointVariation.compilePoints(usedPoints, len(sharedPointVariation.coordinates)) + bytesjoin(data)
651		tupleVariationCount = TUPLES_SHARE_POINT_NUMBERS | len(tuples)
652	else:
653		data = bytesjoin(data)
654		tupleVariationCount = len(tuples)
655	tuples = bytesjoin(tuples)
656	return tupleVariationCount, tuples, data
657
658
659def decompileTupleVariationStore(tableTag, axisTags,
660                                 tupleVariationCount, pointCount, sharedTuples,
661							     data, pos, dataPos):
662	numAxes = len(axisTags)
663	result = []
664	if (tupleVariationCount & TUPLES_SHARE_POINT_NUMBERS) != 0:
665		sharedPoints, dataPos = TupleVariation.decompilePoints_(
666			pointCount, data, dataPos, tableTag)
667	else:
668		sharedPoints = []
669	for _ in range(tupleVariationCount & TUPLE_COUNT_MASK):
670		dataSize, flags = struct.unpack(">HH", data[pos:pos+4])
671		tupleSize = TupleVariation.getTupleSize_(flags, numAxes)
672		tupleData = data[pos : pos + tupleSize]
673		pointDeltaData = data[dataPos : dataPos + dataSize]
674		result.append(decompileTupleVariation_(
675			pointCount, sharedTuples, sharedPoints,
676			tableTag, axisTags, tupleData, pointDeltaData))
677		pos += tupleSize
678		dataPos += dataSize
679	return result
680
681
682def decompileTupleVariation_(pointCount, sharedTuples, sharedPoints,
683							 tableTag, axisTags, data, tupleData):
684	assert tableTag in ("cvar", "gvar"), tableTag
685	flags = struct.unpack(">H", data[2:4])[0]
686	pos = 4
687	if (flags & EMBEDDED_PEAK_TUPLE) == 0:
688		peak = sharedTuples[flags & TUPLE_INDEX_MASK]
689	else:
690		peak, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
691	if (flags & INTERMEDIATE_REGION) != 0:
692		start, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
693		end, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
694	else:
695		start, end = inferRegion_(peak)
696	axes = {}
697	for axis in axisTags:
698		region = start[axis], peak[axis], end[axis]
699		if region != (0.0, 0.0, 0.0):
700			axes[axis] = region
701	pos = 0
702	if (flags & PRIVATE_POINT_NUMBERS) != 0:
703		points, pos = TupleVariation.decompilePoints_(
704			pointCount, tupleData, pos, tableTag)
705	else:
706		points = sharedPoints
707
708	deltas = [None] * pointCount
709
710	if tableTag == "cvar":
711		deltas_cvt, pos = TupleVariation.decompileDeltas_(
712			len(points), tupleData, pos)
713		for p, delta in zip(points, deltas_cvt):
714			if 0 <= p < pointCount:
715				deltas[p] = delta
716
717	elif tableTag == "gvar":
718		deltas_x, pos = TupleVariation.decompileDeltas_(
719			len(points), tupleData, pos)
720		deltas_y, pos = TupleVariation.decompileDeltas_(
721			len(points), tupleData, pos)
722		for p, x, y in zip(points, deltas_x, deltas_y):
723			if 0 <= p < pointCount:
724				deltas[p] = (x, y)
725
726	return TupleVariation(axes, deltas)
727
728
729def inferRegion_(peak):
730	"""Infer start and end for a (non-intermediate) region
731
732	This helper function computes the applicability region for
733	variation tuples whose INTERMEDIATE_REGION flag is not set in the
734	TupleVariationHeader structure.  Variation tuples apply only to
735	certain regions of the variation space; outside that region, the
736	tuple has no effect.  To make the binary encoding more compact,
737	TupleVariationHeaders can omit the intermediateStartTuple and
738	intermediateEndTuple fields.
739    """
740	start, end = {}, {}
741	for (axis, value) in peak.items():
742		start[axis] = min(value, 0.0)  # -0.3 --> -0.3; 0.7 --> 0.0
743		end[axis] = max(value, 0.0)  # -0.3 -->  0.0; 0.7 --> 0.7
744	return (start, end)
745