• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1from fontTools.misc.roundTools import noRound, otRound
2from fontTools.ttLib.tables import otTables as ot
3from fontTools.varLib.models import supportScalar
4from fontTools.varLib.builder import (buildVarRegionList, buildVarStore,
5				      buildVarRegion, buildVarData)
6from functools import partial
7from collections import defaultdict
8
9
10NO_VARIATION_INDEX = ot.NO_VARIATION_INDEX
11ot.VarStore.NO_VARIATION_INDEX = NO_VARIATION_INDEX
12
13
14def _getLocationKey(loc):
15	return tuple(sorted(loc.items(), key=lambda kv: kv[0]))
16
17
18class OnlineVarStoreBuilder(object):
19
20	def __init__(self, axisTags):
21		self._axisTags = axisTags
22		self._regionMap = {}
23		self._regionList = buildVarRegionList([], axisTags)
24		self._store = buildVarStore(self._regionList, [])
25		self._data = None
26		self._model = None
27		self._supports = None
28		self._varDataIndices = {}
29		self._varDataCaches = {}
30		self._cache = {}
31
32	def setModel(self, model):
33		self.setSupports(model.supports)
34		self._model = model
35
36	def setSupports(self, supports):
37		self._model = None
38		self._supports = list(supports)
39		if not self._supports[0]:
40			del self._supports[0] # Drop base master support
41		self._cache = {}
42		self._data = None
43
44	def finish(self, optimize=True):
45		self._regionList.RegionCount = len(self._regionList.Region)
46		self._store.VarDataCount = len(self._store.VarData)
47		for data in self._store.VarData:
48			data.ItemCount = len(data.Item)
49			data.calculateNumShorts(optimize=optimize)
50		return self._store
51
52	def _add_VarData(self):
53		regionMap = self._regionMap
54		regionList = self._regionList
55
56		regions = self._supports
57		regionIndices = []
58		for region in regions:
59			key = _getLocationKey(region)
60			idx = regionMap.get(key)
61			if idx is None:
62				varRegion = buildVarRegion(region, self._axisTags)
63				idx = regionMap[key] = len(regionList.Region)
64				regionList.Region.append(varRegion)
65			regionIndices.append(idx)
66
67		# Check if we have one already...
68		key = tuple(regionIndices)
69		varDataIdx = self._varDataIndices.get(key)
70		if varDataIdx is not None:
71			self._outer = varDataIdx
72			self._data = self._store.VarData[varDataIdx]
73			self._cache = self._varDataCaches[key]
74			if len(self._data.Item) == 0xFFFF:
75				# This is full.  Need new one.
76				varDataIdx = None
77
78		if varDataIdx is None:
79			self._data = buildVarData(regionIndices, [], optimize=False)
80			self._outer = len(self._store.VarData)
81			self._store.VarData.append(self._data)
82			self._varDataIndices[key] = self._outer
83			if key not in self._varDataCaches:
84				self._varDataCaches[key] = {}
85			self._cache = self._varDataCaches[key]
86
87
88	def storeMasters(self, master_values):
89		deltas = self._model.getDeltas(master_values, round=round)
90		base = deltas.pop(0)
91		return base, self.storeDeltas(deltas, round=noRound)
92
93	def storeDeltas(self, deltas, *, round=round):
94		deltas = [round(d) for d in deltas]
95		if len(deltas) == len(self._supports) + 1:
96			deltas = tuple(deltas[1:])
97		else:
98			assert len(deltas) == len(self._supports)
99			deltas = tuple(deltas)
100
101		varIdx = self._cache.get(deltas)
102		if varIdx is not None:
103			return varIdx
104
105		if not self._data:
106			self._add_VarData()
107		inner = len(self._data.Item)
108		if inner == 0xFFFF:
109			# Full array. Start new one.
110			self._add_VarData()
111			return self.storeDeltas(deltas)
112		self._data.addItem(deltas, round=noRound)
113
114		varIdx = (self._outer << 16) + inner
115		self._cache[deltas] = varIdx
116		return varIdx
117
118def VarData_addItem(self, deltas, *, round=round):
119	deltas = [round(d) for d in deltas]
120
121	countUs = self.VarRegionCount
122	countThem = len(deltas)
123	if countUs + 1 == countThem:
124		deltas = tuple(deltas[1:])
125	else:
126		assert countUs == countThem, (countUs, countThem)
127		deltas = tuple(deltas)
128	self.Item.append(list(deltas))
129	self.ItemCount = len(self.Item)
130
131ot.VarData.addItem = VarData_addItem
132
133def VarRegion_get_support(self, fvar_axes):
134	return {
135		fvar_axes[i].axisTag: (reg.StartCoord,reg.PeakCoord,reg.EndCoord)
136		for i, reg in enumerate(self.VarRegionAxis)
137		if reg.PeakCoord != 0
138	}
139
140ot.VarRegion.get_support = VarRegion_get_support
141
142def VarStore___bool__(self):
143    return bool(self.VarData)
144
145ot.VarStore.__bool__ = VarStore___bool__
146
147class VarStoreInstancer(object):
148
149	def __init__(self, varstore, fvar_axes, location={}):
150		self.fvar_axes = fvar_axes
151		assert varstore is None or varstore.Format == 1
152		self._varData = varstore.VarData if varstore else []
153		self._regions = varstore.VarRegionList.Region if varstore else []
154		self.setLocation(location)
155
156	def setLocation(self, location):
157		self.location = dict(location)
158		self._clearCaches()
159
160	def _clearCaches(self):
161		self._scalars = {}
162
163	def _getScalar(self, regionIdx):
164		scalar = self._scalars.get(regionIdx)
165		if scalar is None:
166			support = self._regions[regionIdx].get_support(self.fvar_axes)
167			scalar = supportScalar(self.location, support)
168			self._scalars[regionIdx] = scalar
169		return scalar
170
171	@staticmethod
172	def interpolateFromDeltasAndScalars(deltas, scalars):
173		delta = 0.
174		for d,s in zip(deltas, scalars):
175			if not s: continue
176			delta += d * s
177		return delta
178
179	def __getitem__(self, varidx):
180		major, minor = varidx >> 16, varidx & 0xFFFF
181		if varidx == NO_VARIATION_INDEX: return 0.
182		varData = self._varData
183		scalars = [self._getScalar(ri) for ri in varData[major].VarRegionIndex]
184		deltas = varData[major].Item[minor]
185		return self.interpolateFromDeltasAndScalars(deltas, scalars)
186
187	def interpolateFromDeltas(self, varDataIndex, deltas):
188		varData = self._varData
189		scalars = [self._getScalar(ri) for ri in
190					varData[varDataIndex].VarRegionIndex]
191		return self.interpolateFromDeltasAndScalars(deltas, scalars)
192
193
194#
195# Optimizations
196#
197# retainFirstMap - If true, major 0 mappings are retained. Deltas for unused indices are zeroed
198# advIdxes - Set of major 0 indices for advance deltas to be listed first. Other major 0 indices follow.
199
200def VarStore_subset_varidxes(self, varIdxes, optimize=True, retainFirstMap=False, advIdxes=set()):
201
202	# Sort out used varIdxes by major/minor.
203	used = {}
204	for varIdx in varIdxes:
205		if varIdx == NO_VARIATION_INDEX:
206			continue
207		major = varIdx >> 16
208		minor = varIdx & 0xFFFF
209		d = used.get(major)
210		if d is None:
211			d = used[major] = set()
212		d.add(minor)
213	del varIdxes
214
215	#
216	# Subset VarData
217	#
218
219	varData = self.VarData
220	newVarData = []
221	varDataMap = {NO_VARIATION_INDEX: NO_VARIATION_INDEX}
222	for major,data in enumerate(varData):
223		usedMinors = used.get(major)
224		if usedMinors is None:
225			continue
226		newMajor = len(newVarData)
227		newVarData.append(data)
228
229		items = data.Item
230		newItems = []
231		if major == 0 and retainFirstMap:
232			for minor in range(len(items)):
233				newItems.append(items[minor] if minor in usedMinors else [0] * len(items[minor]))
234				varDataMap[minor] = minor
235		else:
236			if major == 0:
237				minors = sorted(advIdxes) + sorted(usedMinors - advIdxes)
238			else:
239				minors = sorted(usedMinors)
240			for minor in minors:
241				newMinor = len(newItems)
242				newItems.append(items[minor])
243				varDataMap[(major<<16)+minor] = (newMajor<<16)+newMinor
244
245		data.Item = newItems
246		data.ItemCount = len(data.Item)
247
248		data.calculateNumShorts(optimize=optimize)
249
250	self.VarData = newVarData
251	self.VarDataCount = len(self.VarData)
252
253	self.prune_regions()
254
255	return varDataMap
256
257ot.VarStore.subset_varidxes = VarStore_subset_varidxes
258
259def VarStore_prune_regions(self):
260	"""Remove unused VarRegions."""
261	#
262	# Subset VarRegionList
263	#
264
265	# Collect.
266	usedRegions = set()
267	for data in self.VarData:
268		usedRegions.update(data.VarRegionIndex)
269	# Subset.
270	regionList = self.VarRegionList
271	regions = regionList.Region
272	newRegions = []
273	regionMap = {}
274	for i in sorted(usedRegions):
275		regionMap[i] = len(newRegions)
276		newRegions.append(regions[i])
277	regionList.Region = newRegions
278	regionList.RegionCount = len(regionList.Region)
279	# Map.
280	for data in self.VarData:
281		data.VarRegionIndex = [regionMap[i] for i in data.VarRegionIndex]
282
283ot.VarStore.prune_regions = VarStore_prune_regions
284
285
286def _visit(self, func):
287	"""Recurse down from self, if type of an object is ot.Device,
288	call func() on it.  Works on otData-style classes."""
289
290	if type(self) == ot.Device:
291		func(self)
292
293	elif isinstance(self, list):
294		for that in self:
295			_visit(that, func)
296
297	elif hasattr(self, 'getConverters') and not hasattr(self, 'postRead'):
298		for conv in self.getConverters():
299			that = getattr(self, conv.name, None)
300			if that is not None:
301				_visit(that, func)
302
303	elif isinstance(self, ot.ValueRecord):
304		for that in self.__dict__.values():
305			_visit(that, func)
306
307def _Device_recordVarIdx(self, s):
308	"""Add VarIdx in this Device table (if any) to the set s."""
309	if self.DeltaFormat == 0x8000:
310		s.add((self.StartSize<<16)+self.EndSize)
311
312def Object_collect_device_varidxes(self, varidxes):
313	adder = partial(_Device_recordVarIdx, s=varidxes)
314	_visit(self, adder)
315
316ot.GDEF.collect_device_varidxes = Object_collect_device_varidxes
317ot.GPOS.collect_device_varidxes = Object_collect_device_varidxes
318
319def _Device_mapVarIdx(self, mapping, done):
320	"""Map VarIdx in this Device table (if any) through mapping."""
321	if id(self) in done:
322		return
323	done.add(id(self))
324	if self.DeltaFormat == 0x8000:
325		varIdx = mapping[(self.StartSize<<16)+self.EndSize]
326		self.StartSize = varIdx >> 16
327		self.EndSize = varIdx & 0xFFFF
328
329def Object_remap_device_varidxes(self, varidxes_map):
330	mapper = partial(_Device_mapVarIdx, mapping=varidxes_map, done=set())
331	_visit(self, mapper)
332
333ot.GDEF.remap_device_varidxes = Object_remap_device_varidxes
334ot.GPOS.remap_device_varidxes = Object_remap_device_varidxes
335
336
337class _Encoding(object):
338
339	def __init__(self, chars):
340		self.chars = chars
341		self.width = self._popcount(chars)
342		self.overhead = self._characteristic_overhead(chars)
343		self.items = set()
344
345	def append(self, row):
346		self.items.add(row)
347
348	def extend(self, lst):
349		self.items.update(lst)
350
351	def get_room(self):
352		"""Maximum number of bytes that can be added to characteristic
353		while still being beneficial to merge it into another one."""
354		count = len(self.items)
355		return max(0, (self.overhead - 1) // count - self.width)
356	room = property(get_room)
357
358	@property
359	def gain(self):
360		"""Maximum possible byte gain from merging this into another
361		characteristic."""
362		count = len(self.items)
363		return max(0, self.overhead - count * (self.width + 1))
364
365	def sort_key(self):
366		return self.width, self.chars
367
368	def __len__(self):
369		return len(self.items)
370
371	def can_encode(self, chars):
372		return not (chars & ~self.chars)
373
374	def __sub__(self, other):
375		return self._popcount(self.chars & ~other.chars)
376
377	@staticmethod
378	def _popcount(n):
379		# Apparently this is the fastest native way to do it...
380		# https://stackoverflow.com/a/9831671
381		return bin(n).count('1')
382
383	@staticmethod
384	def _characteristic_overhead(chars):
385		"""Returns overhead in bytes of encoding this characteristic
386		as a VarData."""
387		c = 6
388		while chars:
389			if chars & 0b1111:
390				c += 2
391			chars >>= 4
392		return c
393
394	def _find_yourself_best_new_encoding(self, done_by_width):
395		self.best_new_encoding = None
396		for new_width in range(self.width+1, self.width+self.room+1):
397			for new_encoding in done_by_width[new_width]:
398				if new_encoding.can_encode(self.chars):
399					break
400			else:
401				new_encoding = None
402			self.best_new_encoding = new_encoding
403
404
405class _EncodingDict(dict):
406
407	def __missing__(self, chars):
408		r = self[chars] = _Encoding(chars)
409		return r
410
411	def add_row(self, row):
412		chars = self._row_characteristics(row)
413		self[chars].append(row)
414
415	@staticmethod
416	def _row_characteristics(row):
417		"""Returns encoding characteristics for a row."""
418		longWords = False
419
420		chars = 0
421		i = 1
422		for v in row:
423			if v:
424				chars += i
425			if not (-128 <= v <= 127):
426				chars += i * 0b0010
427			if not (-32768 <= v <= 32767):
428				longWords = True
429				break
430			i <<= 4
431
432		if longWords:
433			# Redo; only allow 2byte/4byte encoding
434			chars = 0
435			i = 1
436			for v in row:
437				if v:
438					chars += i * 0b0011
439				if not (-32768 <= v <= 32767):
440					chars += i * 0b1100
441				i <<= 4
442
443		return chars
444
445
446def VarStore_optimize(self, use_NO_VARIATION_INDEX=True):
447	"""Optimize storage. Returns mapping from old VarIdxes to new ones."""
448
449	# TODO
450	# Check that no two VarRegions are the same; if they are, fold them.
451
452	n = len(self.VarRegionList.Region) # Number of columns
453	zeroes = [0] * n
454
455	front_mapping = {} # Map from old VarIdxes to full row tuples
456
457	encodings = _EncodingDict()
458
459	# Collect all items into a set of full rows (with lots of zeroes.)
460	for major,data in enumerate(self.VarData):
461		regionIndices = data.VarRegionIndex
462
463		for minor,item in enumerate(data.Item):
464
465			row = list(zeroes)
466			for regionIdx,v in zip(regionIndices, item):
467				row[regionIdx] += v
468			row = tuple(row)
469
470			if use_NO_VARIATION_INDEX and not any(row):
471				front_mapping[(major<<16)+minor] = None
472				continue
473
474			encodings.add_row(row)
475			front_mapping[(major<<16)+minor] = row
476
477	# Separate encodings that have no gain (are decided) and those having
478	# possible gain (possibly to be merged into others.)
479	encodings = sorted(encodings.values(), key=_Encoding.__len__, reverse=True)
480	done_by_width = defaultdict(list)
481	todo = []
482	for encoding in encodings:
483		if not encoding.gain:
484			done_by_width[encoding.width].append(encoding)
485		else:
486			todo.append(encoding)
487
488	# For each encoding that is possibly to be merged, find the best match
489	# in the decided encodings, and record that.
490	todo.sort(key=_Encoding.get_room)
491	for encoding in todo:
492		encoding._find_yourself_best_new_encoding(done_by_width)
493
494	# Walk through todo encodings, for each, see if merging it with
495	# another todo encoding gains more than each of them merging with
496	# their best decided encoding. If yes, merge them and add resulting
497	# encoding back to todo queue.  If not, move the enconding to decided
498	# list.  Repeat till done.
499	while todo:
500		encoding = todo.pop()
501		best_idx = None
502		best_gain = 0
503		for i,other_encoding in enumerate(todo):
504			combined_chars = other_encoding.chars | encoding.chars
505			combined_width = _Encoding._popcount(combined_chars)
506			combined_overhead = _Encoding._characteristic_overhead(combined_chars)
507			combined_gain = (
508					+ encoding.overhead
509					+ other_encoding.overhead
510					- combined_overhead
511					- (combined_width - encoding.width) * len(encoding)
512					- (combined_width - other_encoding.width) * len(other_encoding)
513					)
514			this_gain = 0 if encoding.best_new_encoding is None else (
515						+ encoding.overhead
516						- (encoding.best_new_encoding.width - encoding.width) * len(encoding)
517					)
518			other_gain = 0 if other_encoding.best_new_encoding is None else (
519						+ other_encoding.overhead
520						- (other_encoding.best_new_encoding.width - other_encoding.width) * len(other_encoding)
521					)
522			separate_gain = this_gain + other_gain
523
524			if combined_gain > separate_gain:
525				best_idx = i
526				best_gain = combined_gain - separate_gain
527
528		if best_idx is None:
529			# Encoding is decided as is
530			done_by_width[encoding.width].append(encoding)
531		else:
532			other_encoding = todo[best_idx]
533			combined_chars = other_encoding.chars | encoding.chars
534			combined_encoding = _Encoding(combined_chars)
535			combined_encoding.extend(encoding.items)
536			combined_encoding.extend(other_encoding.items)
537			combined_encoding._find_yourself_best_new_encoding(done_by_width)
538			del todo[best_idx]
539			todo.append(combined_encoding)
540
541	# Assemble final store.
542	back_mapping = {} # Mapping from full rows to new VarIdxes
543	encodings = sum(done_by_width.values(), [])
544	encodings.sort(key=_Encoding.sort_key)
545	self.VarData = []
546	for major,encoding in enumerate(encodings):
547		data = ot.VarData()
548		self.VarData.append(data)
549		data.VarRegionIndex = range(n)
550		data.VarRegionCount = len(data.VarRegionIndex)
551		data.Item = sorted(encoding.items)
552		for minor,item in enumerate(data.Item):
553			back_mapping[item] = (major<<16)+minor
554
555	# Compile final mapping.
556	varidx_map = {NO_VARIATION_INDEX:NO_VARIATION_INDEX}
557	for k,v in front_mapping.items():
558		varidx_map[k] = back_mapping[v] if v is not None else NO_VARIATION_INDEX
559
560	# Remove unused regions.
561	self.prune_regions()
562
563	# Recalculate things and go home.
564	self.VarRegionList.RegionCount = len(self.VarRegionList.Region)
565	self.VarDataCount = len(self.VarData)
566	for data in self.VarData:
567		data.ItemCount = len(data.Item)
568		data.optimize()
569
570	return varidx_map
571
572ot.VarStore.optimize = VarStore_optimize
573
574
575def main(args=None):
576	"""Optimize a font's GDEF variation store"""
577	from argparse import ArgumentParser
578	from fontTools import configLogger
579	from fontTools.ttLib import TTFont
580	from fontTools.ttLib.tables.otBase import OTTableWriter
581
582	parser = ArgumentParser(prog='varLib.varStore', description= main.__doc__)
583	parser.add_argument('fontfile')
584	parser.add_argument('outfile', nargs='?')
585	options = parser.parse_args(args)
586
587	# TODO: allow user to configure logging via command-line options
588	configLogger(level="INFO")
589
590	fontfile = options.fontfile
591	outfile = options.outfile
592
593	font = TTFont(fontfile)
594	gdef = font['GDEF']
595	store = gdef.table.VarStore
596
597	writer = OTTableWriter()
598	store.compile(writer, font)
599	size = len(writer.getAllData())
600	print("Before: %7d bytes" % size)
601
602	varidx_map = store.optimize()
603
604	gdef.table.remap_device_varidxes(varidx_map)
605	if 'GPOS' in font:
606		font['GPOS'].table.remap_device_varidxes(varidx_map)
607
608	writer = OTTableWriter()
609	store.compile(writer, font)
610	size = len(writer.getAllData())
611	print("After:  %7d bytes" % size)
612
613	if outfile is not None:
614		font.save(outfile)
615
616
617if __name__ == "__main__":
618	import sys
619	if len(sys.argv) > 1:
620		sys.exit(main())
621	import doctest
622	sys.exit(doctest.testmod().failed)
623