1from fontTools.misc.fixedTools import ( 2 fixedToFloat as fi2fl, 3 floatToFixed as fl2fi, 4 floatToFixedToStr as fl2str, 5 strToFixedToFloat as str2fl, 6 otRound, 7) 8from fontTools.misc.textTools import safeEval 9import array 10from collections import Counter, defaultdict 11import io 12import logging 13import struct 14import sys 15 16 17# https://www.microsoft.com/typography/otspec/otvarcommonformats.htm 18 19EMBEDDED_PEAK_TUPLE = 0x8000 20INTERMEDIATE_REGION = 0x4000 21PRIVATE_POINT_NUMBERS = 0x2000 22 23DELTAS_ARE_ZERO = 0x80 24DELTAS_ARE_WORDS = 0x40 25DELTA_RUN_COUNT_MASK = 0x3f 26 27POINTS_ARE_WORDS = 0x80 28POINT_RUN_COUNT_MASK = 0x7f 29 30TUPLES_SHARE_POINT_NUMBERS = 0x8000 31TUPLE_COUNT_MASK = 0x0fff 32TUPLE_INDEX_MASK = 0x0fff 33 34log = logging.getLogger(__name__) 35 36 37class TupleVariation(object): 38 39 def __init__(self, axes, coordinates): 40 self.axes = axes.copy() 41 self.coordinates = list(coordinates) 42 43 def __repr__(self): 44 axes = ",".join(sorted(["%s=%s" % (name, value) for (name, value) in self.axes.items()])) 45 return "<TupleVariation %s %s>" % (axes, self.coordinates) 46 47 def __eq__(self, other): 48 return self.coordinates == other.coordinates and self.axes == other.axes 49 50 def getUsedPoints(self): 51 # Empty set means "all points used". 52 if None not in self.coordinates: 53 return frozenset() 54 used = frozenset([i for i,p in enumerate(self.coordinates) if p is not None]) 55 # Return None if no points used. 56 return used if used else None 57 58 def hasImpact(self): 59 """Returns True if this TupleVariation has any visible impact. 60 61 If the result is False, the TupleVariation can be omitted from the font 62 without making any visible difference. 63 """ 64 return any(c is not None for c in self.coordinates) 65 66 def toXML(self, writer, axisTags): 67 writer.begintag("tuple") 68 writer.newline() 69 for axis in axisTags: 70 value = self.axes.get(axis) 71 if value is not None: 72 minValue, value, maxValue = value 73 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 74 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 75 if minValue == defaultMinValue and maxValue == defaultMaxValue: 76 writer.simpletag("coord", axis=axis, value=fl2str(value, 14)) 77 else: 78 attrs = [ 79 ("axis", axis), 80 ("min", fl2str(minValue, 14)), 81 ("value", fl2str(value, 14)), 82 ("max", fl2str(maxValue, 14)), 83 ] 84 writer.simpletag("coord", attrs) 85 writer.newline() 86 wrote_any_deltas = False 87 for i, delta in enumerate(self.coordinates): 88 if type(delta) == tuple and len(delta) == 2: 89 writer.simpletag("delta", pt=i, x=delta[0], y=delta[1]) 90 writer.newline() 91 wrote_any_deltas = True 92 elif type(delta) == int: 93 writer.simpletag("delta", cvt=i, value=delta) 94 writer.newline() 95 wrote_any_deltas = True 96 elif delta is not None: 97 log.error("bad delta format") 98 writer.comment("bad delta #%d" % i) 99 writer.newline() 100 wrote_any_deltas = True 101 if not wrote_any_deltas: 102 writer.comment("no deltas") 103 writer.newline() 104 writer.endtag("tuple") 105 writer.newline() 106 107 def fromXML(self, name, attrs, _content): 108 if name == "coord": 109 axis = attrs["axis"] 110 value = str2fl(attrs["value"], 14) 111 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 112 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 113 minValue = str2fl(attrs.get("min", defaultMinValue), 14) 114 maxValue = str2fl(attrs.get("max", defaultMaxValue), 14) 115 self.axes[axis] = (minValue, value, maxValue) 116 elif name == "delta": 117 if "pt" in attrs: 118 point = safeEval(attrs["pt"]) 119 x = safeEval(attrs["x"]) 120 y = safeEval(attrs["y"]) 121 self.coordinates[point] = (x, y) 122 elif "cvt" in attrs: 123 cvt = safeEval(attrs["cvt"]) 124 value = safeEval(attrs["value"]) 125 self.coordinates[cvt] = value 126 else: 127 log.warning("bad delta format: %s" % 128 ", ".join(sorted(attrs.keys()))) 129 130 def compile(self, axisTags, sharedCoordIndices={}, pointData=None): 131 assert set(self.axes.keys()) <= set(axisTags), ("Unknown axis tag found.", self.axes.keys(), axisTags) 132 133 tupleData = [] 134 auxData = [] 135 136 if pointData is None: 137 usedPoints = self.getUsedPoints() 138 if usedPoints is None: # Nothing to encode 139 return b'', b'' 140 pointData = self.compilePoints(usedPoints) 141 142 coord = self.compileCoord(axisTags) 143 flags = sharedCoordIndices.get(coord) 144 if flags is None: 145 flags = EMBEDDED_PEAK_TUPLE 146 tupleData.append(coord) 147 148 intermediateCoord = self.compileIntermediateCoord(axisTags) 149 if intermediateCoord is not None: 150 flags |= INTERMEDIATE_REGION 151 tupleData.append(intermediateCoord) 152 153 # pointData of b'' implies "use shared points". 154 if pointData: 155 flags |= PRIVATE_POINT_NUMBERS 156 auxData.append(pointData) 157 158 auxData.append(self.compileDeltas()) 159 auxData = b''.join(auxData) 160 161 tupleData.insert(0, struct.pack('>HH', len(auxData), flags)) 162 return b''.join(tupleData), auxData 163 164 def compileCoord(self, axisTags): 165 result = bytearray() 166 axes = self.axes 167 for axis in axisTags: 168 triple = axes.get(axis) 169 if triple is None: 170 result.extend(b'\0\0') 171 else: 172 result.extend(struct.pack(">h", fl2fi(triple[1], 14))) 173 return bytes(result) 174 175 def compileIntermediateCoord(self, axisTags): 176 needed = False 177 for axis in axisTags: 178 minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0)) 179 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 180 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 181 if (minValue != defaultMinValue) or (maxValue != defaultMaxValue): 182 needed = True 183 break 184 if not needed: 185 return None 186 minCoords = bytearray() 187 maxCoords = bytearray() 188 for axis in axisTags: 189 minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0)) 190 minCoords.extend(struct.pack(">h", fl2fi(minValue, 14))) 191 maxCoords.extend(struct.pack(">h", fl2fi(maxValue, 14))) 192 return minCoords + maxCoords 193 194 @staticmethod 195 def decompileCoord_(axisTags, data, offset): 196 coord = {} 197 pos = offset 198 for axis in axisTags: 199 coord[axis] = fi2fl(struct.unpack(">h", data[pos:pos+2])[0], 14) 200 pos += 2 201 return coord, pos 202 203 @staticmethod 204 def compilePoints(points): 205 # If the set consists of all points in the glyph, it gets encoded with 206 # a special encoding: a single zero byte. 207 # 208 # To use this optimization, points passed in must be empty set. 209 # The following two lines are not strictly necessary as the main code 210 # below would emit the same. But this is most common and faster. 211 if not points: 212 return b'\0' 213 214 # In the 'gvar' table, the packing of point numbers is a little surprising. 215 # It consists of multiple runs, each being a delta-encoded list of integers. 216 # For example, the point set {17, 18, 19, 20, 21, 22, 23} gets encoded as 217 # [6, 17, 1, 1, 1, 1, 1, 1]. The first value (6) is the run length minus 1. 218 # There are two types of runs, with values being either 8 or 16 bit unsigned 219 # integers. 220 points = list(points) 221 points.sort() 222 numPoints = len(points) 223 224 result = bytearray() 225 # The binary representation starts with the total number of points in the set, 226 # encoded into one or two bytes depending on the value. 227 if numPoints < 0x80: 228 result.append(numPoints) 229 else: 230 result.append((numPoints >> 8) | 0x80) 231 result.append(numPoints & 0xff) 232 233 MAX_RUN_LENGTH = 127 234 pos = 0 235 lastValue = 0 236 while pos < numPoints: 237 runLength = 0 238 239 headerPos = len(result) 240 result.append(0) 241 242 useByteEncoding = None 243 while pos < numPoints and runLength <= MAX_RUN_LENGTH: 244 curValue = points[pos] 245 delta = curValue - lastValue 246 if useByteEncoding is None: 247 useByteEncoding = 0 <= delta <= 0xff 248 if useByteEncoding and (delta > 0xff or delta < 0): 249 # we need to start a new run (which will not use byte encoding) 250 break 251 # TODO This never switches back to a byte-encoding from a short-encoding. 252 # That's suboptimal. 253 if useByteEncoding: 254 result.append(delta) 255 else: 256 result.append(delta >> 8) 257 result.append(delta & 0xff) 258 lastValue = curValue 259 pos += 1 260 runLength += 1 261 if useByteEncoding: 262 result[headerPos] = runLength - 1 263 else: 264 result[headerPos] = (runLength - 1) | POINTS_ARE_WORDS 265 266 return result 267 268 @staticmethod 269 def decompilePoints_(numPoints, data, offset, tableTag): 270 """(numPoints, data, offset, tableTag) --> ([point1, point2, ...], newOffset)""" 271 assert tableTag in ('cvar', 'gvar') 272 pos = offset 273 numPointsInData = data[pos] 274 pos += 1 275 if (numPointsInData & POINTS_ARE_WORDS) != 0: 276 numPointsInData = (numPointsInData & POINT_RUN_COUNT_MASK) << 8 | data[pos] 277 pos += 1 278 if numPointsInData == 0: 279 return (range(numPoints), pos) 280 281 result = [] 282 while len(result) < numPointsInData: 283 runHeader = data[pos] 284 pos += 1 285 numPointsInRun = (runHeader & POINT_RUN_COUNT_MASK) + 1 286 point = 0 287 if (runHeader & POINTS_ARE_WORDS) != 0: 288 points = array.array("H") 289 pointsSize = numPointsInRun * 2 290 else: 291 points = array.array("B") 292 pointsSize = numPointsInRun 293 points.frombytes(data[pos:pos+pointsSize]) 294 if sys.byteorder != "big": points.byteswap() 295 296 assert len(points) == numPointsInRun 297 pos += pointsSize 298 299 result.extend(points) 300 301 # Convert relative to absolute 302 absolute = [] 303 current = 0 304 for delta in result: 305 current += delta 306 absolute.append(current) 307 result = absolute 308 del absolute 309 310 badPoints = {str(p) for p in result if p < 0 or p >= numPoints} 311 if badPoints: 312 log.warning("point %s out of range in '%s' table" % 313 (",".join(sorted(badPoints)), tableTag)) 314 return (result, pos) 315 316 def compileDeltas(self): 317 deltaX = [] 318 deltaY = [] 319 if self.getCoordWidth() == 2: 320 for c in self.coordinates: 321 if c is None: 322 continue 323 deltaX.append(c[0]) 324 deltaY.append(c[1]) 325 else: 326 for c in self.coordinates: 327 if c is None: 328 continue 329 deltaX.append(c) 330 bytearr = bytearray() 331 self.compileDeltaValues_(deltaX, bytearr) 332 self.compileDeltaValues_(deltaY, bytearr) 333 return bytearr 334 335 @staticmethod 336 def compileDeltaValues_(deltas, bytearr=None): 337 """[value1, value2, value3, ...] --> bytearray 338 339 Emits a sequence of runs. Each run starts with a 340 byte-sized header whose 6 least significant bits 341 (header & 0x3F) indicate how many values are encoded 342 in this run. The stored length is the actual length 343 minus one; run lengths are thus in the range [1..64]. 344 If the header byte has its most significant bit (0x80) 345 set, all values in this run are zero, and no data 346 follows. Otherwise, the header byte is followed by 347 ((header & 0x3F) + 1) signed values. If (header & 348 0x40) is clear, the delta values are stored as signed 349 bytes; if (header & 0x40) is set, the delta values are 350 signed 16-bit integers. 351 """ # Explaining the format because the 'gvar' spec is hard to understand. 352 if bytearr is None: 353 bytearr = bytearray() 354 pos = 0 355 numDeltas = len(deltas) 356 while pos < numDeltas: 357 value = deltas[pos] 358 if value == 0: 359 pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, bytearr) 360 elif -128 <= value <= 127: 361 pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, bytearr) 362 else: 363 pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, bytearr) 364 return bytearr 365 366 @staticmethod 367 def encodeDeltaRunAsZeroes_(deltas, offset, bytearr): 368 pos = offset 369 numDeltas = len(deltas) 370 while pos < numDeltas and deltas[pos] == 0: 371 pos += 1 372 runLength = pos - offset 373 while runLength >= 64: 374 bytearr.append(DELTAS_ARE_ZERO | 63) 375 runLength -= 64 376 if runLength: 377 bytearr.append(DELTAS_ARE_ZERO | (runLength - 1)) 378 return pos 379 380 @staticmethod 381 def encodeDeltaRunAsBytes_(deltas, offset, bytearr): 382 pos = offset 383 numDeltas = len(deltas) 384 while pos < numDeltas: 385 value = deltas[pos] 386 if not (-128 <= value <= 127): 387 break 388 # Within a byte-encoded run of deltas, a single zero 389 # is best stored literally as 0x00 value. However, 390 # if are two or more zeroes in a sequence, it is 391 # better to start a new run. For example, the sequence 392 # of deltas [15, 15, 0, 15, 15] becomes 6 bytes 393 # (04 0F 0F 00 0F 0F) when storing the zero value 394 # literally, but 7 bytes (01 0F 0F 80 01 0F 0F) 395 # when starting a new run. 396 if value == 0 and pos+1 < numDeltas and deltas[pos+1] == 0: 397 break 398 pos += 1 399 runLength = pos - offset 400 while runLength >= 64: 401 bytearr.append(63) 402 bytearr.extend(array.array('b', deltas[offset:offset+64])) 403 offset += 64 404 runLength -= 64 405 if runLength: 406 bytearr.append(runLength - 1) 407 bytearr.extend(array.array('b', deltas[offset:pos])) 408 return pos 409 410 @staticmethod 411 def encodeDeltaRunAsWords_(deltas, offset, bytearr): 412 pos = offset 413 numDeltas = len(deltas) 414 while pos < numDeltas: 415 value = deltas[pos] 416 # Within a word-encoded run of deltas, it is easiest 417 # to start a new run (with a different encoding) 418 # whenever we encounter a zero value. For example, 419 # the sequence [0x6666, 0, 0x7777] needs 7 bytes when 420 # storing the zero literally (42 66 66 00 00 77 77), 421 # and equally 7 bytes when starting a new run 422 # (40 66 66 80 40 77 77). 423 if value == 0: 424 break 425 426 # Within a word-encoded run of deltas, a single value 427 # in the range (-128..127) should be encoded literally 428 # because it is more compact. For example, the sequence 429 # [0x6666, 2, 0x7777] becomes 7 bytes when storing 430 # the value literally (42 66 66 00 02 77 77), but 8 bytes 431 # when starting a new run (40 66 66 00 02 40 77 77). 432 if (-128 <= value <= 127) and pos+1 < numDeltas and (-128 <= deltas[pos+1] <= 127): 433 break 434 pos += 1 435 runLength = pos - offset 436 while runLength >= 64: 437 bytearr.append(DELTAS_ARE_WORDS | 63) 438 a = array.array('h', deltas[offset:offset+64]) 439 if sys.byteorder != "big": a.byteswap() 440 bytearr.extend(a) 441 offset += 64 442 runLength -= 64 443 if runLength: 444 bytearr.append(DELTAS_ARE_WORDS | (runLength - 1)) 445 a = array.array('h', deltas[offset:pos]) 446 if sys.byteorder != "big": a.byteswap() 447 bytearr.extend(a) 448 return pos 449 450 @staticmethod 451 def decompileDeltas_(numDeltas, data, offset): 452 """(numDeltas, data, offset) --> ([delta, delta, ...], newOffset)""" 453 result = [] 454 pos = offset 455 while len(result) < numDeltas: 456 runHeader = data[pos] 457 pos += 1 458 numDeltasInRun = (runHeader & DELTA_RUN_COUNT_MASK) + 1 459 if (runHeader & DELTAS_ARE_ZERO) != 0: 460 result.extend([0] * numDeltasInRun) 461 else: 462 if (runHeader & DELTAS_ARE_WORDS) != 0: 463 deltas = array.array("h") 464 deltasSize = numDeltasInRun * 2 465 else: 466 deltas = array.array("b") 467 deltasSize = numDeltasInRun 468 deltas.frombytes(data[pos:pos+deltasSize]) 469 if sys.byteorder != "big": deltas.byteswap() 470 assert len(deltas) == numDeltasInRun 471 pos += deltasSize 472 result.extend(deltas) 473 assert len(result) == numDeltas 474 return (result, pos) 475 476 @staticmethod 477 def getTupleSize_(flags, axisCount): 478 size = 4 479 if (flags & EMBEDDED_PEAK_TUPLE) != 0: 480 size += axisCount * 2 481 if (flags & INTERMEDIATE_REGION) != 0: 482 size += axisCount * 4 483 return size 484 485 def getCoordWidth(self): 486 """ Return 2 if coordinates are (x, y) as in gvar, 1 if single values 487 as in cvar, or 0 if empty. 488 """ 489 firstDelta = next((c for c in self.coordinates if c is not None), None) 490 if firstDelta is None: 491 return 0 # empty or has no impact 492 if type(firstDelta) in (int, float): 493 return 1 494 if type(firstDelta) is tuple and len(firstDelta) == 2: 495 return 2 496 raise TypeError( 497 "invalid type of delta; expected (int or float) number, or " 498 "Tuple[number, number]: %r" % firstDelta 499 ) 500 501 def scaleDeltas(self, scalar): 502 if scalar == 1.0: 503 return # no change 504 coordWidth = self.getCoordWidth() 505 self.coordinates = [ 506 None 507 if d is None 508 else d * scalar 509 if coordWidth == 1 510 else (d[0] * scalar, d[1] * scalar) 511 for d in self.coordinates 512 ] 513 514 def roundDeltas(self): 515 coordWidth = self.getCoordWidth() 516 self.coordinates = [ 517 None 518 if d is None 519 else otRound(d) 520 if coordWidth == 1 521 else (otRound(d[0]), otRound(d[1])) 522 for d in self.coordinates 523 ] 524 525 def calcInferredDeltas(self, origCoords, endPts): 526 from fontTools.varLib.iup import iup_delta 527 528 if self.getCoordWidth() == 1: 529 raise TypeError( 530 "Only 'gvar' TupleVariation can have inferred deltas" 531 ) 532 if None in self.coordinates: 533 if len(self.coordinates) != len(origCoords): 534 raise ValueError( 535 "Expected len(origCoords) == %d; found %d" 536 % (len(self.coordinates), len(origCoords)) 537 ) 538 self.coordinates = iup_delta(self.coordinates, origCoords, endPts) 539 540 def optimize(self, origCoords, endPts, tolerance=0.5, isComposite=False): 541 from fontTools.varLib.iup import iup_delta_optimize 542 543 if None in self.coordinates: 544 return # already optimized 545 546 deltaOpt = iup_delta_optimize( 547 self.coordinates, origCoords, endPts, tolerance=tolerance 548 ) 549 if None in deltaOpt: 550 if isComposite and all(d is None for d in deltaOpt): 551 # Fix for macOS composites 552 # https://github.com/fonttools/fonttools/issues/1381 553 deltaOpt = [(0, 0)] + [None] * (len(deltaOpt) - 1) 554 # Use "optimized" version only if smaller... 555 varOpt = TupleVariation(self.axes, deltaOpt) 556 557 # Shouldn't matter that this is different from fvar...? 558 axisTags = sorted(self.axes.keys()) 559 tupleData, auxData = self.compile(axisTags) 560 unoptimizedLength = len(tupleData) + len(auxData) 561 tupleData, auxData = varOpt.compile(axisTags) 562 optimizedLength = len(tupleData) + len(auxData) 563 564 if optimizedLength < unoptimizedLength: 565 self.coordinates = varOpt.coordinates 566 567 def __iadd__(self, other): 568 if not isinstance(other, TupleVariation): 569 return NotImplemented 570 deltas1 = self.coordinates 571 length = len(deltas1) 572 deltas2 = other.coordinates 573 if len(deltas2) != length: 574 raise ValueError( 575 "cannot sum TupleVariation deltas with different lengths" 576 ) 577 # 'None' values have different meanings in gvar vs cvar TupleVariations: 578 # within the gvar, when deltas are not provided explicitly for some points, 579 # they need to be inferred; whereas for the 'cvar' table, if deltas are not 580 # provided for some CVT values, then no adjustments are made (i.e. None == 0). 581 # Thus, we cannot sum deltas for gvar TupleVariations if they contain 582 # inferred inferred deltas (the latter need to be computed first using 583 # 'calcInferredDeltas' method), but we can treat 'None' values in cvar 584 # deltas as if they are zeros. 585 if self.getCoordWidth() == 2: 586 for i, d2 in zip(range(length), deltas2): 587 d1 = deltas1[i] 588 try: 589 deltas1[i] = (d1[0] + d2[0], d1[1] + d2[1]) 590 except TypeError: 591 raise ValueError( 592 "cannot sum gvar deltas with inferred points" 593 ) 594 else: 595 for i, d2 in zip(range(length), deltas2): 596 d1 = deltas1[i] 597 if d1 is not None and d2 is not None: 598 deltas1[i] = d1 + d2 599 elif d1 is None and d2 is not None: 600 deltas1[i] = d2 601 # elif d2 is None do nothing 602 return self 603 604 605def decompileSharedTuples(axisTags, sharedTupleCount, data, offset): 606 result = [] 607 for _ in range(sharedTupleCount): 608 t, offset = TupleVariation.decompileCoord_(axisTags, data, offset) 609 result.append(t) 610 return result 611 612 613def compileSharedTuples(axisTags, variations, 614 MAX_NUM_SHARED_COORDS = TUPLE_INDEX_MASK + 1): 615 coordCount = Counter() 616 for var in variations: 617 coord = var.compileCoord(axisTags) 618 coordCount[coord] += 1 619 # In python < 3.7, most_common() ordering is non-deterministic 620 # so apply a sort to make sure the ordering is consistent. 621 sharedCoords = sorted( 622 coordCount.most_common(MAX_NUM_SHARED_COORDS), 623 key=lambda item: (-item[1], item[0]), 624 ) 625 return [c[0] for c in sharedCoords if c[1] > 1] 626 627 628def compileTupleVariationStore(variations, pointCount, 629 axisTags, sharedTupleIndices, 630 useSharedPoints=True): 631 newVariations = [] 632 pointDatas = [] 633 # Compile all points and figure out sharing if desired 634 sharedPoints = None 635 636 # Collect, count, and compile point-sets for all variation sets 637 pointSetCount = defaultdict(int) 638 for v in variations: 639 points = v.getUsedPoints() 640 if points is None: # Empty variations 641 continue 642 pointSetCount[points] += 1 643 newVariations.append(v) 644 pointDatas.append(points) 645 variations = newVariations 646 del newVariations 647 648 if not variations: 649 return (0, b"", b"") 650 651 n = len(variations[0].coordinates) 652 assert all(len(v.coordinates) == n for v in variations), "Variation sets have different sizes" 653 654 compiledPoints = {pointSet:TupleVariation.compilePoints(pointSet) 655 for pointSet in pointSetCount} 656 657 tupleVariationCount = len(variations) 658 tuples = [] 659 data = [] 660 661 if useSharedPoints: 662 # Find point-set which saves most bytes. 663 def key(pn): 664 pointSet = pn[0] 665 count = pn[1] 666 return len(compiledPoints[pointSet]) * (count - 1) 667 sharedPoints = max(pointSetCount.items(), key=key)[0] 668 669 data.append(compiledPoints[sharedPoints]) 670 tupleVariationCount |= TUPLES_SHARE_POINT_NUMBERS 671 672 # b'' implies "use shared points" 673 pointDatas = [compiledPoints[points] if points != sharedPoints else b'' 674 for points in pointDatas] 675 676 for v,p in zip(variations, pointDatas): 677 thisTuple, thisData = v.compile(axisTags, sharedTupleIndices, pointData=p) 678 679 tuples.append(thisTuple) 680 data.append(thisData) 681 682 tuples = b''.join(tuples) 683 data = b''.join(data) 684 return tupleVariationCount, tuples, data 685 686 687def decompileTupleVariationStore(tableTag, axisTags, 688 tupleVariationCount, pointCount, sharedTuples, 689 data, pos, dataPos): 690 numAxes = len(axisTags) 691 result = [] 692 if (tupleVariationCount & TUPLES_SHARE_POINT_NUMBERS) != 0: 693 sharedPoints, dataPos = TupleVariation.decompilePoints_( 694 pointCount, data, dataPos, tableTag) 695 else: 696 sharedPoints = [] 697 for _ in range(tupleVariationCount & TUPLE_COUNT_MASK): 698 dataSize, flags = struct.unpack(">HH", data[pos:pos+4]) 699 tupleSize = TupleVariation.getTupleSize_(flags, numAxes) 700 tupleData = data[pos : pos + tupleSize] 701 pointDeltaData = data[dataPos : dataPos + dataSize] 702 result.append(decompileTupleVariation_( 703 pointCount, sharedTuples, sharedPoints, 704 tableTag, axisTags, tupleData, pointDeltaData)) 705 pos += tupleSize 706 dataPos += dataSize 707 return result 708 709 710def decompileTupleVariation_(pointCount, sharedTuples, sharedPoints, 711 tableTag, axisTags, data, tupleData): 712 assert tableTag in ("cvar", "gvar"), tableTag 713 flags = struct.unpack(">H", data[2:4])[0] 714 pos = 4 715 if (flags & EMBEDDED_PEAK_TUPLE) == 0: 716 peak = sharedTuples[flags & TUPLE_INDEX_MASK] 717 else: 718 peak, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 719 if (flags & INTERMEDIATE_REGION) != 0: 720 start, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 721 end, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 722 else: 723 start, end = inferRegion_(peak) 724 axes = {} 725 for axis in axisTags: 726 region = start[axis], peak[axis], end[axis] 727 if region != (0.0, 0.0, 0.0): 728 axes[axis] = region 729 pos = 0 730 if (flags & PRIVATE_POINT_NUMBERS) != 0: 731 points, pos = TupleVariation.decompilePoints_( 732 pointCount, tupleData, pos, tableTag) 733 else: 734 points = sharedPoints 735 736 deltas = [None] * pointCount 737 738 if tableTag == "cvar": 739 deltas_cvt, pos = TupleVariation.decompileDeltas_( 740 len(points), tupleData, pos) 741 for p, delta in zip(points, deltas_cvt): 742 if 0 <= p < pointCount: 743 deltas[p] = delta 744 745 elif tableTag == "gvar": 746 deltas_x, pos = TupleVariation.decompileDeltas_( 747 len(points), tupleData, pos) 748 deltas_y, pos = TupleVariation.decompileDeltas_( 749 len(points), tupleData, pos) 750 for p, x, y in zip(points, deltas_x, deltas_y): 751 if 0 <= p < pointCount: 752 deltas[p] = (x, y) 753 754 return TupleVariation(axes, deltas) 755 756 757def inferRegion_(peak): 758 """Infer start and end for a (non-intermediate) region 759 760 This helper function computes the applicability region for 761 variation tuples whose INTERMEDIATE_REGION flag is not set in the 762 TupleVariationHeader structure. Variation tuples apply only to 763 certain regions of the variation space; outside that region, the 764 tuple has no effect. To make the binary encoding more compact, 765 TupleVariationHeaders can omit the intermediateStartTuple and 766 intermediateEndTuple fields. 767 """ 768 start, end = {}, {} 769 for (axis, value) in peak.items(): 770 start[axis] = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 771 end[axis] = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 772 return (start, end) 773