1#!/usr/bin/env python3 2# 3# Parse and report coverage info from .info files generated by lcov 4# 5import os 6import glob 7import csv 8import re 9import collections as co 10import bisect as b 11 12 13INFO_PATHS = ['tests/*.toml.info'] 14 15def collect(paths, **args): 16 file = None 17 funcs = [] 18 lines = co.defaultdict(lambda: 0) 19 pattern = re.compile( 20 '^(?P<file>SF:/?(?P<file_name>.*))$' 21 '|^(?P<func>FN:(?P<func_lineno>[0-9]*),(?P<func_name>.*))$' 22 '|^(?P<line>DA:(?P<line_lineno>[0-9]*),(?P<line_hits>[0-9]*))$') 23 for path in paths: 24 with open(path) as f: 25 for line in f: 26 m = pattern.match(line) 27 if m and m.group('file'): 28 file = m.group('file_name') 29 elif m and file and m.group('func'): 30 funcs.append((file, int(m.group('func_lineno')), 31 m.group('func_name'))) 32 elif m and file and m.group('line'): 33 lines[(file, int(m.group('line_lineno')))] += ( 34 int(m.group('line_hits'))) 35 36 # map line numbers to functions 37 funcs.sort() 38 def func_from_lineno(file, lineno): 39 i = b.bisect(funcs, (file, lineno)) 40 if i and funcs[i-1][0] == file: 41 return funcs[i-1][2] 42 else: 43 return None 44 45 # reduce to function info 46 reduced_funcs = co.defaultdict(lambda: (0, 0)) 47 for (file, line_lineno), line_hits in lines.items(): 48 func = func_from_lineno(file, line_lineno) 49 if not func: 50 continue 51 hits, count = reduced_funcs[(file, func)] 52 reduced_funcs[(file, func)] = (hits + (line_hits > 0), count + 1) 53 54 results = [] 55 for (file, func), (hits, count) in reduced_funcs.items(): 56 # discard internal/testing functions (test_* injected with 57 # internal testing) 58 if not args.get('everything'): 59 if func.startswith('__') or func.startswith('test_'): 60 continue 61 # discard .8449 suffixes created by optimizer 62 func = re.sub('\.[0-9]+', '', func) 63 results.append((file, func, hits, count)) 64 65 return results 66 67 68def main(**args): 69 def openio(path, mode='r'): 70 if path == '-': 71 if 'r' in mode: 72 return os.fdopen(os.dup(sys.stdin.fileno()), 'r') 73 else: 74 return os.fdopen(os.dup(sys.stdout.fileno()), 'w') 75 else: 76 return open(path, mode) 77 78 # find coverage 79 if not args.get('use'): 80 # find *.info files 81 paths = [] 82 for path in args['info_paths']: 83 if os.path.isdir(path): 84 path = path + '/*.gcov' 85 86 for path in glob.glob(path): 87 paths.append(path) 88 89 if not paths: 90 print('no .info files found in %r?' % args['info_paths']) 91 sys.exit(-1) 92 93 results = collect(paths, **args) 94 else: 95 with openio(args['use']) as f: 96 r = csv.DictReader(f) 97 results = [ 98 ( result['file'], 99 result['name'], 100 int(result['coverage_hits']), 101 int(result['coverage_count'])) 102 for result in r 103 if result.get('coverage_hits') not in {None, ''} 104 if result.get('coverage_count') not in {None, ''}] 105 106 total_hits, total_count = 0, 0 107 for _, _, hits, count in results: 108 total_hits += hits 109 total_count += count 110 111 # find previous results? 112 if args.get('diff'): 113 try: 114 with openio(args['diff']) as f: 115 r = csv.DictReader(f) 116 prev_results = [ 117 ( result['file'], 118 result['name'], 119 int(result['coverage_hits']), 120 int(result['coverage_count'])) 121 for result in r 122 if result.get('coverage_hits') not in {None, ''} 123 if result.get('coverage_count') not in {None, ''}] 124 except FileNotFoundError: 125 prev_results = [] 126 127 prev_total_hits, prev_total_count = 0, 0 128 for _, _, hits, count in prev_results: 129 prev_total_hits += hits 130 prev_total_count += count 131 132 # write results to CSV 133 if args.get('output'): 134 merged_results = co.defaultdict(lambda: {}) 135 other_fields = [] 136 137 # merge? 138 if args.get('merge'): 139 try: 140 with openio(args['merge']) as f: 141 r = csv.DictReader(f) 142 for result in r: 143 file = result.pop('file', '') 144 func = result.pop('name', '') 145 result.pop('coverage_hits', None) 146 result.pop('coverage_count', None) 147 merged_results[(file, func)] = result 148 other_fields = result.keys() 149 except FileNotFoundError: 150 pass 151 152 for file, func, hits, count in results: 153 merged_results[(file, func)]['coverage_hits'] = hits 154 merged_results[(file, func)]['coverage_count'] = count 155 156 with openio(args['output'], 'w') as f: 157 w = csv.DictWriter(f, ['file', 'name', *other_fields, 'coverage_hits', 'coverage_count']) 158 w.writeheader() 159 for (file, func), result in sorted(merged_results.items()): 160 w.writerow({'file': file, 'name': func, **result}) 161 162 # print results 163 def dedup_entries(results, by='name'): 164 entries = co.defaultdict(lambda: (0, 0)) 165 for file, func, hits, count in results: 166 entry = (file if by == 'file' else func) 167 entry_hits, entry_count = entries[entry] 168 entries[entry] = (entry_hits + hits, entry_count + count) 169 return entries 170 171 def diff_entries(olds, news): 172 diff = co.defaultdict(lambda: (0, 0, 0, 0, 0, 0, 0)) 173 for name, (new_hits, new_count) in news.items(): 174 diff[name] = ( 175 0, 0, 176 new_hits, new_count, 177 new_hits, new_count, 178 (new_hits/new_count if new_count else 1.0) - 1.0) 179 for name, (old_hits, old_count) in olds.items(): 180 _, _, new_hits, new_count, _, _, _ = diff[name] 181 diff[name] = ( 182 old_hits, old_count, 183 new_hits, new_count, 184 new_hits-old_hits, new_count-old_count, 185 ((new_hits/new_count if new_count else 1.0) 186 - (old_hits/old_count if old_count else 1.0))) 187 return diff 188 189 def sorted_entries(entries): 190 if args.get('coverage_sort'): 191 return sorted(entries, key=lambda x: (-(x[1][0]/x[1][1] if x[1][1] else -1), x)) 192 elif args.get('reverse_coverage_sort'): 193 return sorted(entries, key=lambda x: (+(x[1][0]/x[1][1] if x[1][1] else -1), x)) 194 else: 195 return sorted(entries) 196 197 def sorted_diff_entries(entries): 198 if args.get('coverage_sort'): 199 return sorted(entries, key=lambda x: (-(x[1][2]/x[1][3] if x[1][3] else -1), x)) 200 elif args.get('reverse_coverage_sort'): 201 return sorted(entries, key=lambda x: (+(x[1][2]/x[1][3] if x[1][3] else -1), x)) 202 else: 203 return sorted(entries, key=lambda x: (-x[1][6], x)) 204 205 def print_header(by=''): 206 if not args.get('diff'): 207 print('%-36s %19s' % (by, 'hits/line')) 208 else: 209 print('%-36s %19s %19s %11s' % (by, 'old', 'new', 'diff')) 210 211 def print_entry(name, hits, count): 212 print("%-36s %11s %7s" % (name, 213 '%d/%d' % (hits, count) 214 if count else '-', 215 '%.1f%%' % (100*hits/count) 216 if count else '-')) 217 218 def print_diff_entry(name, 219 old_hits, old_count, 220 new_hits, new_count, 221 diff_hits, diff_count, 222 ratio): 223 print("%-36s %11s %7s %11s %7s %11s%s" % (name, 224 '%d/%d' % (old_hits, old_count) 225 if old_count else '-', 226 '%.1f%%' % (100*old_hits/old_count) 227 if old_count else '-', 228 '%d/%d' % (new_hits, new_count) 229 if new_count else '-', 230 '%.1f%%' % (100*new_hits/new_count) 231 if new_count else '-', 232 '%+d/%+d' % (diff_hits, diff_count), 233 ' (%+.1f%%)' % (100*ratio) if ratio else '')) 234 235 def print_entries(by='name'): 236 entries = dedup_entries(results, by=by) 237 238 if not args.get('diff'): 239 print_header(by=by) 240 for name, (hits, count) in sorted_entries(entries.items()): 241 print_entry(name, hits, count) 242 else: 243 prev_entries = dedup_entries(prev_results, by=by) 244 diff = diff_entries(prev_entries, entries) 245 print_header(by='%s (%d added, %d removed)' % (by, 246 sum(1 for _, old, _, _, _, _, _ in diff.values() if not old), 247 sum(1 for _, _, _, new, _, _, _ in diff.values() if not new))) 248 for name, ( 249 old_hits, old_count, 250 new_hits, new_count, 251 diff_hits, diff_count, ratio) in sorted_diff_entries( 252 diff.items()): 253 if ratio or args.get('all'): 254 print_diff_entry(name, 255 old_hits, old_count, 256 new_hits, new_count, 257 diff_hits, diff_count, 258 ratio) 259 260 def print_totals(): 261 if not args.get('diff'): 262 print_entry('TOTAL', total_hits, total_count) 263 else: 264 ratio = ((total_hits/total_count 265 if total_count else 1.0) 266 - (prev_total_hits/prev_total_count 267 if prev_total_count else 1.0)) 268 print_diff_entry('TOTAL', 269 prev_total_hits, prev_total_count, 270 total_hits, total_count, 271 total_hits-prev_total_hits, total_count-prev_total_count, 272 ratio) 273 274 if args.get('quiet'): 275 pass 276 elif args.get('summary'): 277 print_header() 278 print_totals() 279 elif args.get('files'): 280 print_entries(by='file') 281 print_totals() 282 else: 283 print_entries(by='name') 284 print_totals() 285 286if __name__ == "__main__": 287 import argparse 288 import sys 289 parser = argparse.ArgumentParser( 290 description="Parse and report coverage info from .info files \ 291 generated by lcov") 292 parser.add_argument('info_paths', nargs='*', default=INFO_PATHS, 293 help="Description of where to find *.info files. May be a directory \ 294 or list of paths. *.info files will be merged to show the total \ 295 coverage. Defaults to %r." % INFO_PATHS) 296 parser.add_argument('-v', '--verbose', action='store_true', 297 help="Output commands that run behind the scenes.") 298 parser.add_argument('-o', '--output', 299 help="Specify CSV file to store results.") 300 parser.add_argument('-u', '--use', 301 help="Don't do any work, instead use this CSV file.") 302 parser.add_argument('-d', '--diff', 303 help="Specify CSV file to diff code size against.") 304 parser.add_argument('-m', '--merge', 305 help="Merge with an existing CSV file when writing to output.") 306 parser.add_argument('-a', '--all', action='store_true', 307 help="Show all functions, not just the ones that changed.") 308 parser.add_argument('-A', '--everything', action='store_true', 309 help="Include builtin and libc specific symbols.") 310 parser.add_argument('-s', '--coverage-sort', action='store_true', 311 help="Sort by coverage.") 312 parser.add_argument('-S', '--reverse-coverage-sort', action='store_true', 313 help="Sort by coverage, but backwards.") 314 parser.add_argument('-F', '--files', action='store_true', 315 help="Show file-level coverage.") 316 parser.add_argument('-Y', '--summary', action='store_true', 317 help="Only show the total coverage.") 318 parser.add_argument('-q', '--quiet', action='store_true', 319 help="Don't show anything, useful with -o.") 320 parser.add_argument('--build-dir', 321 help="Specify the relative build directory. Used to map object files \ 322 to the correct source files.") 323 sys.exit(main(**vars(parser.parse_args()))) 324