1from fontTools.misc.py23 import bytechr, byteord, bytesjoin 2from fontTools.misc.fixedTools import ( 3 fixedToFloat as fi2fl, 4 floatToFixed as fl2fi, 5 floatToFixedToStr as fl2str, 6 strToFixedToFloat as str2fl, 7 otRound, 8) 9from fontTools.misc.textTools import safeEval 10import array 11import io 12import logging 13import struct 14import sys 15 16 17# https://www.microsoft.com/typography/otspec/otvarcommonformats.htm 18 19EMBEDDED_PEAK_TUPLE = 0x8000 20INTERMEDIATE_REGION = 0x4000 21PRIVATE_POINT_NUMBERS = 0x2000 22 23DELTAS_ARE_ZERO = 0x80 24DELTAS_ARE_WORDS = 0x40 25DELTA_RUN_COUNT_MASK = 0x3f 26 27POINTS_ARE_WORDS = 0x80 28POINT_RUN_COUNT_MASK = 0x7f 29 30TUPLES_SHARE_POINT_NUMBERS = 0x8000 31TUPLE_COUNT_MASK = 0x0fff 32TUPLE_INDEX_MASK = 0x0fff 33 34log = logging.getLogger(__name__) 35 36 37class TupleVariation(object): 38 39 def __init__(self, axes, coordinates): 40 self.axes = axes.copy() 41 self.coordinates = coordinates[:] 42 43 def __repr__(self): 44 axes = ",".join(sorted(["%s=%s" % (name, value) for (name, value) in self.axes.items()])) 45 return "<TupleVariation %s %s>" % (axes, self.coordinates) 46 47 def __eq__(self, other): 48 return self.coordinates == other.coordinates and self.axes == other.axes 49 50 def getUsedPoints(self): 51 result = set() 52 for i, point in enumerate(self.coordinates): 53 if point is not None: 54 result.add(i) 55 return result 56 57 def hasImpact(self): 58 """Returns True if this TupleVariation has any visible impact. 59 60 If the result is False, the TupleVariation can be omitted from the font 61 without making any visible difference. 62 """ 63 return any(c is not None for c in self.coordinates) 64 65 def toXML(self, writer, axisTags): 66 writer.begintag("tuple") 67 writer.newline() 68 for axis in axisTags: 69 value = self.axes.get(axis) 70 if value is not None: 71 minValue, value, maxValue = value 72 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 73 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 74 if minValue == defaultMinValue and maxValue == defaultMaxValue: 75 writer.simpletag("coord", axis=axis, value=fl2str(value, 14)) 76 else: 77 attrs = [ 78 ("axis", axis), 79 ("min", fl2str(minValue, 14)), 80 ("value", fl2str(value, 14)), 81 ("max", fl2str(maxValue, 14)), 82 ] 83 writer.simpletag("coord", attrs) 84 writer.newline() 85 wrote_any_deltas = False 86 for i, delta in enumerate(self.coordinates): 87 if type(delta) == tuple and len(delta) == 2: 88 writer.simpletag("delta", pt=i, x=delta[0], y=delta[1]) 89 writer.newline() 90 wrote_any_deltas = True 91 elif type(delta) == int: 92 writer.simpletag("delta", cvt=i, value=delta) 93 writer.newline() 94 wrote_any_deltas = True 95 elif delta is not None: 96 log.error("bad delta format") 97 writer.comment("bad delta #%d" % i) 98 writer.newline() 99 wrote_any_deltas = True 100 if not wrote_any_deltas: 101 writer.comment("no deltas") 102 writer.newline() 103 writer.endtag("tuple") 104 writer.newline() 105 106 def fromXML(self, name, attrs, _content): 107 if name == "coord": 108 axis = attrs["axis"] 109 value = str2fl(attrs["value"], 14) 110 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 111 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 112 minValue = str2fl(attrs.get("min", defaultMinValue), 14) 113 maxValue = str2fl(attrs.get("max", defaultMaxValue), 14) 114 self.axes[axis] = (minValue, value, maxValue) 115 elif name == "delta": 116 if "pt" in attrs: 117 point = safeEval(attrs["pt"]) 118 x = safeEval(attrs["x"]) 119 y = safeEval(attrs["y"]) 120 self.coordinates[point] = (x, y) 121 elif "cvt" in attrs: 122 cvt = safeEval(attrs["cvt"]) 123 value = safeEval(attrs["value"]) 124 self.coordinates[cvt] = value 125 else: 126 log.warning("bad delta format: %s" % 127 ", ".join(sorted(attrs.keys()))) 128 129 def compile(self, axisTags, sharedCoordIndices, sharedPoints): 130 tupleData = [] 131 132 assert all(tag in axisTags for tag in self.axes.keys()), ("Unknown axis tag found.", self.axes.keys(), axisTags) 133 134 coord = self.compileCoord(axisTags) 135 if coord in sharedCoordIndices: 136 flags = sharedCoordIndices[coord] 137 else: 138 flags = EMBEDDED_PEAK_TUPLE 139 tupleData.append(coord) 140 141 intermediateCoord = self.compileIntermediateCoord(axisTags) 142 if intermediateCoord is not None: 143 flags |= INTERMEDIATE_REGION 144 tupleData.append(intermediateCoord) 145 146 points = self.getUsedPoints() 147 if sharedPoints == points: 148 # Only use the shared points if they are identical to the actually used points 149 auxData = self.compileDeltas(sharedPoints) 150 usesSharedPoints = True 151 else: 152 flags |= PRIVATE_POINT_NUMBERS 153 numPointsInGlyph = len(self.coordinates) 154 auxData = self.compilePoints(points, numPointsInGlyph) + self.compileDeltas(points) 155 usesSharedPoints = False 156 157 tupleData = struct.pack('>HH', len(auxData), flags) + bytesjoin(tupleData) 158 return (tupleData, auxData, usesSharedPoints) 159 160 def compileCoord(self, axisTags): 161 result = [] 162 for axis in axisTags: 163 _minValue, value, _maxValue = self.axes.get(axis, (0.0, 0.0, 0.0)) 164 result.append(struct.pack(">h", fl2fi(value, 14))) 165 return bytesjoin(result) 166 167 def compileIntermediateCoord(self, axisTags): 168 needed = False 169 for axis in axisTags: 170 minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0)) 171 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 172 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 173 if (minValue != defaultMinValue) or (maxValue != defaultMaxValue): 174 needed = True 175 break 176 if not needed: 177 return None 178 minCoords = [] 179 maxCoords = [] 180 for axis in axisTags: 181 minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0)) 182 minCoords.append(struct.pack(">h", fl2fi(minValue, 14))) 183 maxCoords.append(struct.pack(">h", fl2fi(maxValue, 14))) 184 return bytesjoin(minCoords + maxCoords) 185 186 @staticmethod 187 def decompileCoord_(axisTags, data, offset): 188 coord = {} 189 pos = offset 190 for axis in axisTags: 191 coord[axis] = fi2fl(struct.unpack(">h", data[pos:pos+2])[0], 14) 192 pos += 2 193 return coord, pos 194 195 @staticmethod 196 def compilePoints(points, numPointsInGlyph): 197 # If the set consists of all points in the glyph, it gets encoded with 198 # a special encoding: a single zero byte. 199 if len(points) == numPointsInGlyph: 200 return b"\0" 201 202 # In the 'gvar' table, the packing of point numbers is a little surprising. 203 # It consists of multiple runs, each being a delta-encoded list of integers. 204 # For example, the point set {17, 18, 19, 20, 21, 22, 23} gets encoded as 205 # [6, 17, 1, 1, 1, 1, 1, 1]. The first value (6) is the run length minus 1. 206 # There are two types of runs, with values being either 8 or 16 bit unsigned 207 # integers. 208 points = list(points) 209 points.sort() 210 numPoints = len(points) 211 212 # The binary representation starts with the total number of points in the set, 213 # encoded into one or two bytes depending on the value. 214 if numPoints < 0x80: 215 result = [bytechr(numPoints)] 216 else: 217 result = [bytechr((numPoints >> 8) | 0x80) + bytechr(numPoints & 0xff)] 218 219 MAX_RUN_LENGTH = 127 220 pos = 0 221 lastValue = 0 222 while pos < numPoints: 223 run = io.BytesIO() 224 runLength = 0 225 useByteEncoding = None 226 while pos < numPoints and runLength <= MAX_RUN_LENGTH: 227 curValue = points[pos] 228 delta = curValue - lastValue 229 if useByteEncoding is None: 230 useByteEncoding = 0 <= delta <= 0xff 231 if useByteEncoding and (delta > 0xff or delta < 0): 232 # we need to start a new run (which will not use byte encoding) 233 break 234 # TODO This never switches back to a byte-encoding from a short-encoding. 235 # That's suboptimal. 236 if useByteEncoding: 237 run.write(bytechr(delta)) 238 else: 239 run.write(bytechr(delta >> 8)) 240 run.write(bytechr(delta & 0xff)) 241 lastValue = curValue 242 pos += 1 243 runLength += 1 244 if useByteEncoding: 245 runHeader = bytechr(runLength - 1) 246 else: 247 runHeader = bytechr((runLength - 1) | POINTS_ARE_WORDS) 248 result.append(runHeader) 249 result.append(run.getvalue()) 250 251 return bytesjoin(result) 252 253 @staticmethod 254 def decompilePoints_(numPoints, data, offset, tableTag): 255 """(numPoints, data, offset, tableTag) --> ([point1, point2, ...], newOffset)""" 256 assert tableTag in ('cvar', 'gvar') 257 pos = offset 258 numPointsInData = byteord(data[pos]) 259 pos += 1 260 if (numPointsInData & POINTS_ARE_WORDS) != 0: 261 numPointsInData = (numPointsInData & POINT_RUN_COUNT_MASK) << 8 | byteord(data[pos]) 262 pos += 1 263 if numPointsInData == 0: 264 return (range(numPoints), pos) 265 266 result = [] 267 while len(result) < numPointsInData: 268 runHeader = byteord(data[pos]) 269 pos += 1 270 numPointsInRun = (runHeader & POINT_RUN_COUNT_MASK) + 1 271 point = 0 272 if (runHeader & POINTS_ARE_WORDS) != 0: 273 points = array.array("H") 274 pointsSize = numPointsInRun * 2 275 else: 276 points = array.array("B") 277 pointsSize = numPointsInRun 278 points.frombytes(data[pos:pos+pointsSize]) 279 if sys.byteorder != "big": points.byteswap() 280 281 assert len(points) == numPointsInRun 282 pos += pointsSize 283 284 result.extend(points) 285 286 # Convert relative to absolute 287 absolute = [] 288 current = 0 289 for delta in result: 290 current += delta 291 absolute.append(current) 292 result = absolute 293 del absolute 294 295 badPoints = {str(p) for p in result if p < 0 or p >= numPoints} 296 if badPoints: 297 log.warning("point %s out of range in '%s' table" % 298 (",".join(sorted(badPoints)), tableTag)) 299 return (result, pos) 300 301 def compileDeltas(self, points): 302 deltaX = [] 303 deltaY = [] 304 for p in sorted(list(points)): 305 c = self.coordinates[p] 306 if type(c) is tuple and len(c) == 2: 307 deltaX.append(c[0]) 308 deltaY.append(c[1]) 309 elif type(c) is int: 310 deltaX.append(c) 311 elif c is not None: 312 raise TypeError("invalid type of delta: %s" % type(c)) 313 return self.compileDeltaValues_(deltaX) + self.compileDeltaValues_(deltaY) 314 315 @staticmethod 316 def compileDeltaValues_(deltas): 317 """[value1, value2, value3, ...] --> bytestring 318 319 Emits a sequence of runs. Each run starts with a 320 byte-sized header whose 6 least significant bits 321 (header & 0x3F) indicate how many values are encoded 322 in this run. The stored length is the actual length 323 minus one; run lengths are thus in the range [1..64]. 324 If the header byte has its most significant bit (0x80) 325 set, all values in this run are zero, and no data 326 follows. Otherwise, the header byte is followed by 327 ((header & 0x3F) + 1) signed values. If (header & 328 0x40) is clear, the delta values are stored as signed 329 bytes; if (header & 0x40) is set, the delta values are 330 signed 16-bit integers. 331 """ # Explaining the format because the 'gvar' spec is hard to understand. 332 stream = io.BytesIO() 333 pos = 0 334 while pos < len(deltas): 335 value = deltas[pos] 336 if value == 0: 337 pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, stream) 338 elif value >= -128 and value <= 127: 339 pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, stream) 340 else: 341 pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, stream) 342 return stream.getvalue() 343 344 @staticmethod 345 def encodeDeltaRunAsZeroes_(deltas, offset, stream): 346 runLength = 0 347 pos = offset 348 numDeltas = len(deltas) 349 while pos < numDeltas and runLength < 64 and deltas[pos] == 0: 350 pos += 1 351 runLength += 1 352 assert runLength >= 1 and runLength <= 64 353 stream.write(bytechr(DELTAS_ARE_ZERO | (runLength - 1))) 354 return pos 355 356 @staticmethod 357 def encodeDeltaRunAsBytes_(deltas, offset, stream): 358 runLength = 0 359 pos = offset 360 numDeltas = len(deltas) 361 while pos < numDeltas and runLength < 64: 362 value = deltas[pos] 363 if value < -128 or value > 127: 364 break 365 # Within a byte-encoded run of deltas, a single zero 366 # is best stored literally as 0x00 value. However, 367 # if are two or more zeroes in a sequence, it is 368 # better to start a new run. For example, the sequence 369 # of deltas [15, 15, 0, 15, 15] becomes 6 bytes 370 # (04 0F 0F 00 0F 0F) when storing the zero value 371 # literally, but 7 bytes (01 0F 0F 80 01 0F 0F) 372 # when starting a new run. 373 if value == 0 and pos+1 < numDeltas and deltas[pos+1] == 0: 374 break 375 pos += 1 376 runLength += 1 377 assert runLength >= 1 and runLength <= 64 378 stream.write(bytechr(runLength - 1)) 379 for i in range(offset, pos): 380 stream.write(struct.pack('b', otRound(deltas[i]))) 381 return pos 382 383 @staticmethod 384 def encodeDeltaRunAsWords_(deltas, offset, stream): 385 runLength = 0 386 pos = offset 387 numDeltas = len(deltas) 388 while pos < numDeltas and runLength < 64: 389 value = deltas[pos] 390 # Within a word-encoded run of deltas, it is easiest 391 # to start a new run (with a different encoding) 392 # whenever we encounter a zero value. For example, 393 # the sequence [0x6666, 0, 0x7777] needs 7 bytes when 394 # storing the zero literally (42 66 66 00 00 77 77), 395 # and equally 7 bytes when starting a new run 396 # (40 66 66 80 40 77 77). 397 if value == 0: 398 break 399 400 # Within a word-encoded run of deltas, a single value 401 # in the range (-128..127) should be encoded literally 402 # because it is more compact. For example, the sequence 403 # [0x6666, 2, 0x7777] becomes 7 bytes when storing 404 # the value literally (42 66 66 00 02 77 77), but 8 bytes 405 # when starting a new run (40 66 66 00 02 40 77 77). 406 isByteEncodable = lambda value: value >= -128 and value <= 127 407 if isByteEncodable(value) and pos+1 < numDeltas and isByteEncodable(deltas[pos+1]): 408 break 409 pos += 1 410 runLength += 1 411 assert runLength >= 1 and runLength <= 64 412 stream.write(bytechr(DELTAS_ARE_WORDS | (runLength - 1))) 413 for i in range(offset, pos): 414 stream.write(struct.pack('>h', otRound(deltas[i]))) 415 return pos 416 417 @staticmethod 418 def decompileDeltas_(numDeltas, data, offset): 419 """(numDeltas, data, offset) --> ([delta, delta, ...], newOffset)""" 420 result = [] 421 pos = offset 422 while len(result) < numDeltas: 423 runHeader = byteord(data[pos]) 424 pos += 1 425 numDeltasInRun = (runHeader & DELTA_RUN_COUNT_MASK) + 1 426 if (runHeader & DELTAS_ARE_ZERO) != 0: 427 result.extend([0] * numDeltasInRun) 428 else: 429 if (runHeader & DELTAS_ARE_WORDS) != 0: 430 deltas = array.array("h") 431 deltasSize = numDeltasInRun * 2 432 else: 433 deltas = array.array("b") 434 deltasSize = numDeltasInRun 435 deltas.frombytes(data[pos:pos+deltasSize]) 436 if sys.byteorder != "big": deltas.byteswap() 437 assert len(deltas) == numDeltasInRun 438 pos += deltasSize 439 result.extend(deltas) 440 assert len(result) == numDeltas 441 return (result, pos) 442 443 @staticmethod 444 def getTupleSize_(flags, axisCount): 445 size = 4 446 if (flags & EMBEDDED_PEAK_TUPLE) != 0: 447 size += axisCount * 2 448 if (flags & INTERMEDIATE_REGION) != 0: 449 size += axisCount * 4 450 return size 451 452 def getCoordWidth(self): 453 """ Return 2 if coordinates are (x, y) as in gvar, 1 if single values 454 as in cvar, or 0 if empty. 455 """ 456 firstDelta = next((c for c in self.coordinates if c is not None), None) 457 if firstDelta is None: 458 return 0 # empty or has no impact 459 if type(firstDelta) in (int, float): 460 return 1 461 if type(firstDelta) is tuple and len(firstDelta) == 2: 462 return 2 463 raise TypeError( 464 "invalid type of delta; expected (int or float) number, or " 465 "Tuple[number, number]: %r" % firstDelta 466 ) 467 468 def scaleDeltas(self, scalar): 469 if scalar == 1.0: 470 return # no change 471 coordWidth = self.getCoordWidth() 472 self.coordinates = [ 473 None 474 if d is None 475 else d * scalar 476 if coordWidth == 1 477 else (d[0] * scalar, d[1] * scalar) 478 for d in self.coordinates 479 ] 480 481 def roundDeltas(self): 482 coordWidth = self.getCoordWidth() 483 self.coordinates = [ 484 None 485 if d is None 486 else otRound(d) 487 if coordWidth == 1 488 else (otRound(d[0]), otRound(d[1])) 489 for d in self.coordinates 490 ] 491 492 def calcInferredDeltas(self, origCoords, endPts): 493 from fontTools.varLib.iup import iup_delta 494 495 if self.getCoordWidth() == 1: 496 raise TypeError( 497 "Only 'gvar' TupleVariation can have inferred deltas" 498 ) 499 if None in self.coordinates: 500 if len(self.coordinates) != len(origCoords): 501 raise ValueError( 502 "Expected len(origCoords) == %d; found %d" 503 % (len(self.coordinates), len(origCoords)) 504 ) 505 self.coordinates = iup_delta(self.coordinates, origCoords, endPts) 506 507 def optimize(self, origCoords, endPts, tolerance=0.5, isComposite=False): 508 from fontTools.varLib.iup import iup_delta_optimize 509 510 if None in self.coordinates: 511 return # already optimized 512 513 deltaOpt = iup_delta_optimize( 514 self.coordinates, origCoords, endPts, tolerance=tolerance 515 ) 516 if None in deltaOpt: 517 if isComposite and all(d is None for d in deltaOpt): 518 # Fix for macOS composites 519 # https://github.com/fonttools/fonttools/issues/1381 520 deltaOpt = [(0, 0)] + [None] * (len(deltaOpt) - 1) 521 # Use "optimized" version only if smaller... 522 varOpt = TupleVariation(self.axes, deltaOpt) 523 524 # Shouldn't matter that this is different from fvar...? 525 axisTags = sorted(self.axes.keys()) 526 tupleData, auxData, _ = self.compile(axisTags, [], None) 527 unoptimizedLength = len(tupleData) + len(auxData) 528 tupleData, auxData, _ = varOpt.compile(axisTags, [], None) 529 optimizedLength = len(tupleData) + len(auxData) 530 531 if optimizedLength < unoptimizedLength: 532 self.coordinates = varOpt.coordinates 533 534 def __iadd__(self, other): 535 if not isinstance(other, TupleVariation): 536 return NotImplemented 537 deltas1 = self.coordinates 538 length = len(deltas1) 539 deltas2 = other.coordinates 540 if len(deltas2) != length: 541 raise ValueError( 542 "cannot sum TupleVariation deltas with different lengths" 543 ) 544 # 'None' values have different meanings in gvar vs cvar TupleVariations: 545 # within the gvar, when deltas are not provided explicitly for some points, 546 # they need to be inferred; whereas for the 'cvar' table, if deltas are not 547 # provided for some CVT values, then no adjustments are made (i.e. None == 0). 548 # Thus, we cannot sum deltas for gvar TupleVariations if they contain 549 # inferred inferred deltas (the latter need to be computed first using 550 # 'calcInferredDeltas' method), but we can treat 'None' values in cvar 551 # deltas as if they are zeros. 552 if self.getCoordWidth() == 2: 553 for i, d2 in zip(range(length), deltas2): 554 d1 = deltas1[i] 555 try: 556 deltas1[i] = (d1[0] + d2[0], d1[1] + d2[1]) 557 except TypeError: 558 raise ValueError( 559 "cannot sum gvar deltas with inferred points" 560 ) 561 else: 562 for i, d2 in zip(range(length), deltas2): 563 d1 = deltas1[i] 564 if d1 is not None and d2 is not None: 565 deltas1[i] = d1 + d2 566 elif d1 is None and d2 is not None: 567 deltas1[i] = d2 568 # elif d2 is None do nothing 569 return self 570 571 572def decompileSharedTuples(axisTags, sharedTupleCount, data, offset): 573 result = [] 574 for _ in range(sharedTupleCount): 575 t, offset = TupleVariation.decompileCoord_(axisTags, data, offset) 576 result.append(t) 577 return result 578 579 580def compileSharedTuples(axisTags, variations): 581 coordCount = {} 582 for var in variations: 583 coord = var.compileCoord(axisTags) 584 coordCount[coord] = coordCount.get(coord, 0) + 1 585 sharedCoords = [(count, coord) 586 for (coord, count) in coordCount.items() if count > 1] 587 sharedCoords.sort(reverse=True) 588 MAX_NUM_SHARED_COORDS = TUPLE_INDEX_MASK + 1 589 sharedCoords = sharedCoords[:MAX_NUM_SHARED_COORDS] 590 return [c[1] for c in sharedCoords] # Strip off counts. 591 592 593def compileTupleVariationStore(variations, pointCount, 594 axisTags, sharedTupleIndices, 595 useSharedPoints=True): 596 variations = [v for v in variations if v.hasImpact()] 597 if len(variations) == 0: 598 return (0, b"", b"") 599 600 # Each glyph variation tuples modifies a set of control points. To 601 # indicate which exact points are getting modified, a single tuple 602 # can either refer to a shared set of points, or the tuple can 603 # supply its private point numbers. Because the impact of sharing 604 # can be positive (no need for a private point list) or negative 605 # (need to supply 0,0 deltas for unused points), it is not obvious 606 # how to determine which tuples should take their points from the 607 # shared pool versus have their own. Perhaps we should resort to 608 # brute force, and try all combinations? However, if a glyph has n 609 # variation tuples, we would need to try 2^n combinations (because 610 # each tuple may or may not be part of the shared set). How many 611 # variations tuples do glyphs have? 612 # 613 # Skia.ttf: {3: 1, 5: 11, 6: 41, 7: 62, 8: 387, 13: 1, 14: 3} 614 # JamRegular.ttf: {3: 13, 4: 122, 5: 1, 7: 4, 8: 1, 9: 1, 10: 1} 615 # BuffaloGalRegular.ttf: {1: 16, 2: 13, 4: 2, 5: 4, 6: 19, 7: 1, 8: 3, 9: 8} 616 # (Reading example: In Skia.ttf, 41 glyphs have 6 variation tuples). 617 # 618 619 # Is this even worth optimizing? If we never use a shared point 620 # list, the private lists will consume 112K for Skia, 5K for 621 # BuffaloGalRegular, and 15K for JamRegular. If we always use a 622 # shared point list, the shared lists will consume 16K for Skia, 623 # 3K for BuffaloGalRegular, and 10K for JamRegular. However, in 624 # the latter case the delta arrays will become larger, but I 625 # haven't yet measured by how much. From gut feeling (which may be 626 # wrong), the optimum is to share some but not all points; 627 # however, then we would need to try all combinations. 628 # 629 # For the time being, we try two variants and then pick the better one: 630 # (a) each tuple supplies its own private set of points; 631 # (b) all tuples refer to a shared set of points, which consists of 632 # "every control point in the glyph that has explicit deltas". 633 usedPoints = set() 634 for v in variations: 635 usedPoints |= v.getUsedPoints() 636 tuples = [] 637 data = [] 638 someTuplesSharePoints = False 639 sharedPointVariation = None # To keep track of a variation that uses shared points 640 for v in variations: 641 privateTuple, privateData, _ = v.compile( 642 axisTags, sharedTupleIndices, sharedPoints=None) 643 sharedTuple, sharedData, usesSharedPoints = v.compile( 644 axisTags, sharedTupleIndices, sharedPoints=usedPoints) 645 if useSharedPoints and (len(sharedTuple) + len(sharedData)) < (len(privateTuple) + len(privateData)): 646 tuples.append(sharedTuple) 647 data.append(sharedData) 648 someTuplesSharePoints |= usesSharedPoints 649 sharedPointVariation = v 650 else: 651 tuples.append(privateTuple) 652 data.append(privateData) 653 if someTuplesSharePoints: 654 # Use the last of the variations that share points for compiling the packed point data 655 data = sharedPointVariation.compilePoints(usedPoints, len(sharedPointVariation.coordinates)) + bytesjoin(data) 656 tupleVariationCount = TUPLES_SHARE_POINT_NUMBERS | len(tuples) 657 else: 658 data = bytesjoin(data) 659 tupleVariationCount = len(tuples) 660 tuples = bytesjoin(tuples) 661 return tupleVariationCount, tuples, data 662 663 664def decompileTupleVariationStore(tableTag, axisTags, 665 tupleVariationCount, pointCount, sharedTuples, 666 data, pos, dataPos): 667 numAxes = len(axisTags) 668 result = [] 669 if (tupleVariationCount & TUPLES_SHARE_POINT_NUMBERS) != 0: 670 sharedPoints, dataPos = TupleVariation.decompilePoints_( 671 pointCount, data, dataPos, tableTag) 672 else: 673 sharedPoints = [] 674 for _ in range(tupleVariationCount & TUPLE_COUNT_MASK): 675 dataSize, flags = struct.unpack(">HH", data[pos:pos+4]) 676 tupleSize = TupleVariation.getTupleSize_(flags, numAxes) 677 tupleData = data[pos : pos + tupleSize] 678 pointDeltaData = data[dataPos : dataPos + dataSize] 679 result.append(decompileTupleVariation_( 680 pointCount, sharedTuples, sharedPoints, 681 tableTag, axisTags, tupleData, pointDeltaData)) 682 pos += tupleSize 683 dataPos += dataSize 684 return result 685 686 687def decompileTupleVariation_(pointCount, sharedTuples, sharedPoints, 688 tableTag, axisTags, data, tupleData): 689 assert tableTag in ("cvar", "gvar"), tableTag 690 flags = struct.unpack(">H", data[2:4])[0] 691 pos = 4 692 if (flags & EMBEDDED_PEAK_TUPLE) == 0: 693 peak = sharedTuples[flags & TUPLE_INDEX_MASK] 694 else: 695 peak, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 696 if (flags & INTERMEDIATE_REGION) != 0: 697 start, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 698 end, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 699 else: 700 start, end = inferRegion_(peak) 701 axes = {} 702 for axis in axisTags: 703 region = start[axis], peak[axis], end[axis] 704 if region != (0.0, 0.0, 0.0): 705 axes[axis] = region 706 pos = 0 707 if (flags & PRIVATE_POINT_NUMBERS) != 0: 708 points, pos = TupleVariation.decompilePoints_( 709 pointCount, tupleData, pos, tableTag) 710 else: 711 points = sharedPoints 712 713 deltas = [None] * pointCount 714 715 if tableTag == "cvar": 716 deltas_cvt, pos = TupleVariation.decompileDeltas_( 717 len(points), tupleData, pos) 718 for p, delta in zip(points, deltas_cvt): 719 if 0 <= p < pointCount: 720 deltas[p] = delta 721 722 elif tableTag == "gvar": 723 deltas_x, pos = TupleVariation.decompileDeltas_( 724 len(points), tupleData, pos) 725 deltas_y, pos = TupleVariation.decompileDeltas_( 726 len(points), tupleData, pos) 727 for p, x, y in zip(points, deltas_x, deltas_y): 728 if 0 <= p < pointCount: 729 deltas[p] = (x, y) 730 731 return TupleVariation(axes, deltas) 732 733 734def inferRegion_(peak): 735 """Infer start and end for a (non-intermediate) region 736 737 This helper function computes the applicability region for 738 variation tuples whose INTERMEDIATE_REGION flag is not set in the 739 TupleVariationHeader structure. Variation tuples apply only to 740 certain regions of the variation space; outside that region, the 741 tuple has no effect. To make the binary encoding more compact, 742 TupleVariationHeaders can omit the intermediateStartTuple and 743 intermediateEndTuple fields. 744 """ 745 start, end = {}, {} 746 for (axis, value) in peak.items(): 747 start[axis] = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 748 end[axis] = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 749 return (start, end) 750