• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1from __future__ import print_function, division, absolute_import
2from fontTools.misc.py23 import *
3from fontTools.misc.fixedTools import fixedToFloat, floatToFixed, otRound
4from fontTools.misc.textTools import safeEval
5import array
6import io
7import logging
8import struct
9import sys
10
11
12# https://www.microsoft.com/typography/otspec/otvarcommonformats.htm
13
14EMBEDDED_PEAK_TUPLE = 0x8000
15INTERMEDIATE_REGION = 0x4000
16PRIVATE_POINT_NUMBERS = 0x2000
17
18DELTAS_ARE_ZERO = 0x80
19DELTAS_ARE_WORDS = 0x40
20DELTA_RUN_COUNT_MASK = 0x3f
21
22POINTS_ARE_WORDS = 0x80
23POINT_RUN_COUNT_MASK = 0x7f
24
25TUPLES_SHARE_POINT_NUMBERS = 0x8000
26TUPLE_COUNT_MASK = 0x0fff
27TUPLE_INDEX_MASK = 0x0fff
28
29log = logging.getLogger(__name__)
30
31
32class TupleVariation(object):
33	def __init__(self, axes, coordinates):
34		self.axes = axes.copy()
35		self.coordinates = coordinates[:]
36
37	def __repr__(self):
38		axes = ",".join(sorted(["%s=%s" % (name, value) for (name, value) in self.axes.items()]))
39		return "<TupleVariation %s %s>" % (axes, self.coordinates)
40
41	def __eq__(self, other):
42		return self.coordinates == other.coordinates and self.axes == other.axes
43
44	def getUsedPoints(self):
45		result = set()
46		for i, point in enumerate(self.coordinates):
47			if point is not None:
48				result.add(i)
49		return result
50
51	def hasImpact(self):
52		"""Returns True if this TupleVariation has any visible impact.
53
54		If the result is False, the TupleVariation can be omitted from the font
55		without making any visible difference.
56		"""
57		for c in self.coordinates:
58			if c is not None:
59				return True
60		return False
61
62	def toXML(self, writer, axisTags):
63		writer.begintag("tuple")
64		writer.newline()
65		for axis in axisTags:
66			value = self.axes.get(axis)
67			if value is not None:
68				minValue, value, maxValue = (float(v) for v in value)
69				defaultMinValue = min(value, 0.0)  # -0.3 --> -0.3; 0.7 --> 0.0
70				defaultMaxValue = max(value, 0.0)  # -0.3 -->  0.0; 0.7 --> 0.7
71				if minValue == defaultMinValue and maxValue == defaultMaxValue:
72					writer.simpletag("coord", axis=axis, value=value)
73				else:
74					attrs = [
75						("axis", axis),
76						("min", minValue),
77						("value", value),
78						("max", maxValue),
79				        ]
80					writer.simpletag("coord", attrs)
81				writer.newline()
82		wrote_any_deltas = False
83		for i, delta in enumerate(self.coordinates):
84			if type(delta) == tuple and len(delta) == 2:
85				writer.simpletag("delta", pt=i, x=delta[0], y=delta[1])
86				writer.newline()
87				wrote_any_deltas = True
88			elif type(delta) == int:
89				writer.simpletag("delta", cvt=i, value=delta)
90				writer.newline()
91				wrote_any_deltas = True
92			elif delta is not None:
93				log.error("bad delta format")
94				writer.comment("bad delta #%d" % i)
95				writer.newline()
96				wrote_any_deltas = True
97		if not wrote_any_deltas:
98			writer.comment("no deltas")
99			writer.newline()
100		writer.endtag("tuple")
101		writer.newline()
102
103	def fromXML(self, name, attrs, _content):
104		if name == "coord":
105			axis = attrs["axis"]
106			value = float(attrs["value"])
107			defaultMinValue = min(value, 0.0)  # -0.3 --> -0.3; 0.7 --> 0.0
108			defaultMaxValue = max(value, 0.0)  # -0.3 -->  0.0; 0.7 --> 0.7
109			minValue = float(attrs.get("min", defaultMinValue))
110			maxValue = float(attrs.get("max", defaultMaxValue))
111			self.axes[axis] = (minValue, value, maxValue)
112		elif name == "delta":
113			if "pt" in attrs:
114				point = safeEval(attrs["pt"])
115				x = safeEval(attrs["x"])
116				y = safeEval(attrs["y"])
117				self.coordinates[point] = (x, y)
118			elif "cvt" in attrs:
119				cvt = safeEval(attrs["cvt"])
120				value = safeEval(attrs["value"])
121				self.coordinates[cvt] = value
122			else:
123				log.warning("bad delta format: %s" %
124				            ", ".join(sorted(attrs.keys())))
125
126	def compile(self, axisTags, sharedCoordIndices, sharedPoints):
127		tupleData = []
128
129		assert all(tag in axisTags for tag in self.axes.keys()), ("Unknown axis tag found.", self.axes.keys(), axisTags)
130
131		coord = self.compileCoord(axisTags)
132		if coord in sharedCoordIndices:
133			flags = sharedCoordIndices[coord]
134		else:
135			flags = EMBEDDED_PEAK_TUPLE
136			tupleData.append(coord)
137
138		intermediateCoord = self.compileIntermediateCoord(axisTags)
139		if intermediateCoord is not None:
140			flags |= INTERMEDIATE_REGION
141			tupleData.append(intermediateCoord)
142
143		points = self.getUsedPoints()
144		if sharedPoints == points:
145			# Only use the shared points if they are identical to the actually used points
146			auxData = self.compileDeltas(sharedPoints)
147			usesSharedPoints = True
148		else:
149			flags |= PRIVATE_POINT_NUMBERS
150			numPointsInGlyph = len(self.coordinates)
151			auxData = self.compilePoints(points, numPointsInGlyph) + self.compileDeltas(points)
152			usesSharedPoints = False
153
154		tupleData = struct.pack('>HH', len(auxData), flags) + bytesjoin(tupleData)
155		return (tupleData, auxData, usesSharedPoints)
156
157	def compileCoord(self, axisTags):
158		result = []
159		for axis in axisTags:
160			_minValue, value, _maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
161			result.append(struct.pack(">h", floatToFixed(value, 14)))
162		return bytesjoin(result)
163
164	def compileIntermediateCoord(self, axisTags):
165		needed = False
166		for axis in axisTags:
167			minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
168			defaultMinValue = min(value, 0.0)  # -0.3 --> -0.3; 0.7 --> 0.0
169			defaultMaxValue = max(value, 0.0)  # -0.3 -->  0.0; 0.7 --> 0.7
170			if (minValue != defaultMinValue) or (maxValue != defaultMaxValue):
171				needed = True
172				break
173		if not needed:
174			return None
175		minCoords = []
176		maxCoords = []
177		for axis in axisTags:
178			minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0))
179			minCoords.append(struct.pack(">h", floatToFixed(minValue, 14)))
180			maxCoords.append(struct.pack(">h", floatToFixed(maxValue, 14)))
181		return bytesjoin(minCoords + maxCoords)
182
183	@staticmethod
184	def decompileCoord_(axisTags, data, offset):
185		coord = {}
186		pos = offset
187		for axis in axisTags:
188			coord[axis] = fixedToFloat(struct.unpack(">h", data[pos:pos+2])[0], 14)
189			pos += 2
190		return coord, pos
191
192	@staticmethod
193	def compilePoints(points, numPointsInGlyph):
194		# If the set consists of all points in the glyph, it gets encoded with
195		# a special encoding: a single zero byte.
196		if len(points) == numPointsInGlyph:
197			return b"\0"
198
199		# In the 'gvar' table, the packing of point numbers is a little surprising.
200		# It consists of multiple runs, each being a delta-encoded list of integers.
201		# For example, the point set {17, 18, 19, 20, 21, 22, 23} gets encoded as
202		# [6, 17, 1, 1, 1, 1, 1, 1]. The first value (6) is the run length minus 1.
203		# There are two types of runs, with values being either 8 or 16 bit unsigned
204		# integers.
205		points = list(points)
206		points.sort()
207		numPoints = len(points)
208
209		# The binary representation starts with the total number of points in the set,
210		# encoded into one or two bytes depending on the value.
211		if numPoints < 0x80:
212			result = [bytechr(numPoints)]
213		else:
214			result = [bytechr((numPoints >> 8) | 0x80) + bytechr(numPoints & 0xff)]
215
216		MAX_RUN_LENGTH = 127
217		pos = 0
218		lastValue = 0
219		while pos < numPoints:
220			run = io.BytesIO()
221			runLength = 0
222			useByteEncoding = None
223			while pos < numPoints and runLength <= MAX_RUN_LENGTH:
224				curValue = points[pos]
225				delta = curValue - lastValue
226				if useByteEncoding is None:
227					useByteEncoding = 0 <= delta <= 0xff
228				if useByteEncoding and (delta > 0xff or delta < 0):
229					# we need to start a new run (which will not use byte encoding)
230					break
231				# TODO This never switches back to a byte-encoding from a short-encoding.
232				# That's suboptimal.
233				if useByteEncoding:
234					run.write(bytechr(delta))
235				else:
236					run.write(bytechr(delta >> 8))
237					run.write(bytechr(delta & 0xff))
238				lastValue = curValue
239				pos += 1
240				runLength += 1
241			if useByteEncoding:
242				runHeader = bytechr(runLength - 1)
243			else:
244				runHeader = bytechr((runLength - 1) | POINTS_ARE_WORDS)
245			result.append(runHeader)
246			result.append(run.getvalue())
247
248		return bytesjoin(result)
249
250	@staticmethod
251	def decompilePoints_(numPoints, data, offset, tableTag):
252		"""(numPoints, data, offset, tableTag) --> ([point1, point2, ...], newOffset)"""
253		assert tableTag in ('cvar', 'gvar')
254		pos = offset
255		numPointsInData = byteord(data[pos])
256		pos += 1
257		if (numPointsInData & POINTS_ARE_WORDS) != 0:
258			numPointsInData = (numPointsInData & POINT_RUN_COUNT_MASK) << 8 | byteord(data[pos])
259			pos += 1
260		if numPointsInData == 0:
261			return (range(numPoints), pos)
262
263		result = []
264		while len(result) < numPointsInData:
265			runHeader = byteord(data[pos])
266			pos += 1
267			numPointsInRun = (runHeader & POINT_RUN_COUNT_MASK) + 1
268			point = 0
269			if (runHeader & POINTS_ARE_WORDS) != 0:
270				points = array.array("H")
271				pointsSize = numPointsInRun * 2
272			else:
273				points = array.array("B")
274				pointsSize = numPointsInRun
275			points.fromstring(data[pos:pos+pointsSize])
276			if sys.byteorder != "big": points.byteswap()
277
278			assert len(points) == numPointsInRun
279			pos += pointsSize
280
281			result.extend(points)
282
283		# Convert relative to absolute
284		absolute = []
285		current = 0
286		for delta in result:
287			current += delta
288			absolute.append(current)
289		result = absolute
290		del absolute
291
292		badPoints = {str(p) for p in result if p < 0 or p >= numPoints}
293		if badPoints:
294			log.warning("point %s out of range in '%s' table" %
295			            (",".join(sorted(badPoints)), tableTag))
296		return (result, pos)
297
298	def compileDeltas(self, points):
299		deltaX = []
300		deltaY = []
301		for p in sorted(list(points)):
302			c = self.coordinates[p]
303			if type(c) is tuple and len(c) == 2:
304				deltaX.append(c[0])
305				deltaY.append(c[1])
306			elif type(c) is int:
307				deltaX.append(c)
308			elif c is not None:
309				raise ValueError("invalid type of delta: %s" % type(c))
310		return self.compileDeltaValues_(deltaX) + self.compileDeltaValues_(deltaY)
311
312	@staticmethod
313	def compileDeltaValues_(deltas):
314		"""[value1, value2, value3, ...] --> bytestring
315
316		Emits a sequence of runs. Each run starts with a
317		byte-sized header whose 6 least significant bits
318		(header & 0x3F) indicate how many values are encoded
319		in this run. The stored length is the actual length
320		minus one; run lengths are thus in the range [1..64].
321		If the header byte has its most significant bit (0x80)
322		set, all values in this run are zero, and no data
323		follows. Otherwise, the header byte is followed by
324		((header & 0x3F) + 1) signed values.  If (header &
325		0x40) is clear, the delta values are stored as signed
326		bytes; if (header & 0x40) is set, the delta values are
327		signed 16-bit integers.
328		"""  # Explaining the format because the 'gvar' spec is hard to understand.
329		stream = io.BytesIO()
330		pos = 0
331		while pos < len(deltas):
332			value = deltas[pos]
333			if value == 0:
334				pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, stream)
335			elif value >= -128 and value <= 127:
336				pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, stream)
337			else:
338				pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, stream)
339		return stream.getvalue()
340
341	@staticmethod
342	def encodeDeltaRunAsZeroes_(deltas, offset, stream):
343		runLength = 0
344		pos = offset
345		numDeltas = len(deltas)
346		while pos < numDeltas and runLength < 64 and deltas[pos] == 0:
347			pos += 1
348			runLength += 1
349		assert runLength >= 1 and runLength <= 64
350		stream.write(bytechr(DELTAS_ARE_ZERO | (runLength - 1)))
351		return pos
352
353	@staticmethod
354	def encodeDeltaRunAsBytes_(deltas, offset, stream):
355		runLength = 0
356		pos = offset
357		numDeltas = len(deltas)
358		while pos < numDeltas and runLength < 64:
359			value = deltas[pos]
360			if value < -128 or value > 127:
361				break
362			# Within a byte-encoded run of deltas, a single zero
363			# is best stored literally as 0x00 value. However,
364			# if are two or more zeroes in a sequence, it is
365			# better to start a new run. For example, the sequence
366			# of deltas [15, 15, 0, 15, 15] becomes 6 bytes
367			# (04 0F 0F 00 0F 0F) when storing the zero value
368			# literally, but 7 bytes (01 0F 0F 80 01 0F 0F)
369			# when starting a new run.
370			if value == 0 and pos+1 < numDeltas and deltas[pos+1] == 0:
371				break
372			pos += 1
373			runLength += 1
374		assert runLength >= 1 and runLength <= 64
375		stream.write(bytechr(runLength - 1))
376		for i in range(offset, pos):
377			stream.write(struct.pack('b', otRound(deltas[i])))
378		return pos
379
380	@staticmethod
381	def encodeDeltaRunAsWords_(deltas, offset, stream):
382		runLength = 0
383		pos = offset
384		numDeltas = len(deltas)
385		while pos < numDeltas and runLength < 64:
386			value = deltas[pos]
387			# Within a word-encoded run of deltas, it is easiest
388			# to start a new run (with a different encoding)
389			# whenever we encounter a zero value. For example,
390			# the sequence [0x6666, 0, 0x7777] needs 7 bytes when
391			# storing the zero literally (42 66 66 00 00 77 77),
392			# and equally 7 bytes when starting a new run
393			# (40 66 66 80 40 77 77).
394			if value == 0:
395				break
396
397			# Within a word-encoded run of deltas, a single value
398			# in the range (-128..127) should be encoded literally
399			# because it is more compact. For example, the sequence
400			# [0x6666, 2, 0x7777] becomes 7 bytes when storing
401			# the value literally (42 66 66 00 02 77 77), but 8 bytes
402			# when starting a new run (40 66 66 00 02 40 77 77).
403			isByteEncodable = lambda value: value >= -128 and value <= 127
404			if isByteEncodable(value) and pos+1 < numDeltas and isByteEncodable(deltas[pos+1]):
405				break
406			pos += 1
407			runLength += 1
408		assert runLength >= 1 and runLength <= 64
409		stream.write(bytechr(DELTAS_ARE_WORDS | (runLength - 1)))
410		for i in range(offset, pos):
411			stream.write(struct.pack('>h', otRound(deltas[i])))
412		return pos
413
414	@staticmethod
415	def decompileDeltas_(numDeltas, data, offset):
416		"""(numDeltas, data, offset) --> ([delta, delta, ...], newOffset)"""
417		result = []
418		pos = offset
419		while len(result) < numDeltas:
420			runHeader = byteord(data[pos])
421			pos += 1
422			numDeltasInRun = (runHeader & DELTA_RUN_COUNT_MASK) + 1
423			if (runHeader & DELTAS_ARE_ZERO) != 0:
424				result.extend([0] * numDeltasInRun)
425			else:
426				if (runHeader & DELTAS_ARE_WORDS) != 0:
427					deltas = array.array("h")
428					deltasSize = numDeltasInRun * 2
429				else:
430					deltas = array.array("b")
431					deltasSize = numDeltasInRun
432				deltas.fromstring(data[pos:pos+deltasSize])
433				if sys.byteorder != "big": deltas.byteswap()
434				assert len(deltas) == numDeltasInRun
435				pos += deltasSize
436				result.extend(deltas)
437		assert len(result) == numDeltas
438		return (result, pos)
439
440	@staticmethod
441	def getTupleSize_(flags, axisCount):
442		size = 4
443		if (flags & EMBEDDED_PEAK_TUPLE) != 0:
444			size += axisCount * 2
445		if (flags & INTERMEDIATE_REGION) != 0:
446			size += axisCount * 4
447		return size
448
449
450def decompileSharedTuples(axisTags, sharedTupleCount, data, offset):
451	result = []
452	for _ in range(sharedTupleCount):
453		t, offset = TupleVariation.decompileCoord_(axisTags, data, offset)
454		result.append(t)
455	return result
456
457
458def compileSharedTuples(axisTags, variations):
459	coordCount = {}
460	for var in variations:
461		coord = var.compileCoord(axisTags)
462		coordCount[coord] = coordCount.get(coord, 0) + 1
463	sharedCoords = [(count, coord)
464					for (coord, count) in coordCount.items() if count > 1]
465	sharedCoords.sort(reverse=True)
466	MAX_NUM_SHARED_COORDS = TUPLE_INDEX_MASK + 1
467	sharedCoords = sharedCoords[:MAX_NUM_SHARED_COORDS]
468	return [c[1] for c in sharedCoords]  # Strip off counts.
469
470
471def compileTupleVariationStore(variations, pointCount,
472                               axisTags, sharedTupleIndices,
473                               useSharedPoints=True):
474	variations = [v for v in variations if v.hasImpact()]
475	if len(variations) == 0:
476		return (0, b"", b"")
477
478	# Each glyph variation tuples modifies a set of control points. To
479	# indicate which exact points are getting modified, a single tuple
480	# can either refer to a shared set of points, or the tuple can
481	# supply its private point numbers.  Because the impact of sharing
482	# can be positive (no need for a private point list) or negative
483	# (need to supply 0,0 deltas for unused points), it is not obvious
484	# how to determine which tuples should take their points from the
485	# shared pool versus have their own. Perhaps we should resort to
486	# brute force, and try all combinations? However, if a glyph has n
487	# variation tuples, we would need to try 2^n combinations (because
488	# each tuple may or may not be part of the shared set). How many
489	# variations tuples do glyphs have?
490	#
491	#   Skia.ttf: {3: 1, 5: 11, 6: 41, 7: 62, 8: 387, 13: 1, 14: 3}
492	#   JamRegular.ttf: {3: 13, 4: 122, 5: 1, 7: 4, 8: 1, 9: 1, 10: 1}
493	#   BuffaloGalRegular.ttf: {1: 16, 2: 13, 4: 2, 5: 4, 6: 19, 7: 1, 8: 3, 9: 8}
494	#   (Reading example: In Skia.ttf, 41 glyphs have 6 variation tuples).
495	#
496
497	# Is this even worth optimizing? If we never use a shared point
498	# list, the private lists will consume 112K for Skia, 5K for
499	# BuffaloGalRegular, and 15K for JamRegular. If we always use a
500	# shared point list, the shared lists will consume 16K for Skia,
501	# 3K for BuffaloGalRegular, and 10K for JamRegular. However, in
502	# the latter case the delta arrays will become larger, but I
503	# haven't yet measured by how much. From gut feeling (which may be
504	# wrong), the optimum is to share some but not all points;
505	# however, then we would need to try all combinations.
506	#
507	# For the time being, we try two variants and then pick the better one:
508	# (a) each tuple supplies its own private set of points;
509	# (b) all tuples refer to a shared set of points, which consists of
510	#     "every control point in the glyph that has explicit deltas".
511	usedPoints = set()
512	for v in variations:
513		usedPoints |= v.getUsedPoints()
514	tuples = []
515	data = []
516	someTuplesSharePoints = False
517	sharedPointVariation = None # To keep track of a variation that uses shared points
518	for v in variations:
519		privateTuple, privateData, _ = v.compile(
520			axisTags, sharedTupleIndices, sharedPoints=None)
521		sharedTuple, sharedData, usesSharedPoints = v.compile(
522			axisTags, sharedTupleIndices, sharedPoints=usedPoints)
523		if useSharedPoints and (len(sharedTuple) + len(sharedData)) < (len(privateTuple) + len(privateData)):
524			tuples.append(sharedTuple)
525			data.append(sharedData)
526			someTuplesSharePoints |= usesSharedPoints
527			sharedPointVariation = v
528		else:
529			tuples.append(privateTuple)
530			data.append(privateData)
531	if someTuplesSharePoints:
532		# Use the last of the variations that share points for compiling the packed point data
533		data = sharedPointVariation.compilePoints(usedPoints, len(sharedPointVariation.coordinates)) + bytesjoin(data)
534		tupleVariationCount = TUPLES_SHARE_POINT_NUMBERS | len(tuples)
535	else:
536		data = bytesjoin(data)
537		tupleVariationCount = len(tuples)
538	tuples = bytesjoin(tuples)
539	return tupleVariationCount, tuples, data
540
541
542def decompileTupleVariationStore(tableTag, axisTags,
543                                 tupleVariationCount, pointCount, sharedTuples,
544							     data, pos, dataPos):
545	numAxes = len(axisTags)
546	result = []
547	if (tupleVariationCount & TUPLES_SHARE_POINT_NUMBERS) != 0:
548		sharedPoints, dataPos = TupleVariation.decompilePoints_(
549			pointCount, data, dataPos, tableTag)
550	else:
551		sharedPoints = []
552	for _ in range(tupleVariationCount & TUPLE_COUNT_MASK):
553		dataSize, flags = struct.unpack(">HH", data[pos:pos+4])
554		tupleSize = TupleVariation.getTupleSize_(flags, numAxes)
555		tupleData = data[pos : pos + tupleSize]
556		pointDeltaData = data[dataPos : dataPos + dataSize]
557		result.append(decompileTupleVariation_(
558			pointCount, sharedTuples, sharedPoints,
559			tableTag, axisTags, tupleData, pointDeltaData))
560		pos += tupleSize
561		dataPos += dataSize
562	return result
563
564
565def decompileTupleVariation_(pointCount, sharedTuples, sharedPoints,
566							 tableTag, axisTags, data, tupleData):
567	assert tableTag in ("cvar", "gvar"), tableTag
568	flags = struct.unpack(">H", data[2:4])[0]
569	pos = 4
570	if (flags & EMBEDDED_PEAK_TUPLE) == 0:
571		peak = sharedTuples[flags & TUPLE_INDEX_MASK]
572	else:
573		peak, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
574	if (flags & INTERMEDIATE_REGION) != 0:
575		start, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
576		end, pos = TupleVariation.decompileCoord_(axisTags, data, pos)
577	else:
578		start, end = inferRegion_(peak)
579	axes = {}
580	for axis in axisTags:
581		region = start[axis], peak[axis], end[axis]
582		if region != (0.0, 0.0, 0.0):
583			axes[axis] = region
584	pos = 0
585	if (flags & PRIVATE_POINT_NUMBERS) != 0:
586		points, pos = TupleVariation.decompilePoints_(
587			pointCount, tupleData, pos, tableTag)
588	else:
589		points = sharedPoints
590
591	deltas = [None] * pointCount
592
593	if tableTag == "cvar":
594		deltas_cvt, pos = TupleVariation.decompileDeltas_(
595			len(points), tupleData, pos)
596		for p, delta in zip(points, deltas_cvt):
597			if 0 <= p < pointCount:
598				deltas[p] = delta
599
600	elif tableTag == "gvar":
601		deltas_x, pos = TupleVariation.decompileDeltas_(
602			len(points), tupleData, pos)
603		deltas_y, pos = TupleVariation.decompileDeltas_(
604			len(points), tupleData, pos)
605		for p, x, y in zip(points, deltas_x, deltas_y):
606			if 0 <= p < pointCount:
607				deltas[p] = (x, y)
608
609	return TupleVariation(axes, deltas)
610
611
612def inferRegion_(peak):
613	"""Infer start and end for a (non-intermediate) region
614
615	This helper function computes the applicability region for
616	variation tuples whose INTERMEDIATE_REGION flag is not set in the
617	TupleVariationHeader structure.  Variation tuples apply only to
618	certain regions of the variation space; outside that region, the
619	tuple has no effect.  To make the binary encoding more compact,
620	TupleVariationHeaders can omit the intermediateStartTuple and
621	intermediateEndTuple fields.
622    """
623	start, end = {}, {}
624	for (axis, value) in peak.items():
625		start[axis] = min(value, 0.0)  # -0.3 --> -0.3; 0.7 --> 0.0
626		end[axis] = max(value, 0.0)  # -0.3 -->  0.0; 0.7 --> 0.7
627	return (start, end)
628