1#!/usr/bin/env python3 2# 3# Parse and report coverage info from .info files generated by lcov 4# 5import os 6import glob 7import csv 8import re 9import collections as co 10import bisect as b 11 12 13INFO_PATHS = ['tests/*.toml.info'] 14 15def collect(paths, **args): 16 file = None 17 funcs = [] 18 lines = co.defaultdict(lambda: 0) 19 pattern = re.compile( 20 '^(?P<file>SF:/?(?P<file_name>.*))$' 21 '|^(?P<func>FN:(?P<func_lineno>[0-9]*),(?P<func_name>.*))$' 22 '|^(?P<line>DA:(?P<line_lineno>[0-9]*),(?P<line_hits>[0-9]*))$') 23 for path in paths: 24 with open(path) as f: 25 for line in f: 26 m = pattern.match(line) 27 if m and m.group('file'): 28 file = m.group('file_name') 29 elif m and file and m.group('func'): 30 funcs.append((file, int(m.group('func_lineno')), 31 m.group('func_name'))) 32 elif m and file and m.group('line'): 33 lines[(file, int(m.group('line_lineno')))] += ( 34 int(m.group('line_hits'))) 35 36 # map line numbers to functions 37 funcs.sort() 38 def func_from_lineno(file, lineno): 39 i = b.bisect(funcs, (file, lineno)) 40 if i and funcs[i-1][0] == file: 41 return funcs[i-1][2] 42 else: 43 return None 44 45 # reduce to function info 46 reduced_funcs = co.defaultdict(lambda: (0, 0)) 47 for (file, line_lineno), line_hits in lines.items(): 48 func = func_from_lineno(file, line_lineno) 49 if not func: 50 continue 51 hits, count = reduced_funcs[(file, func)] 52 reduced_funcs[(file, func)] = (hits + (line_hits > 0), count + 1) 53 54 results = [] 55 for (file, func), (hits, count) in reduced_funcs.items(): 56 # discard internal/testing functions (test_* injected with 57 # internal testing) 58 if func.startswith('__') or func.startswith('test_'): 59 continue 60 # discard .8449 suffixes created by optimizer 61 func = re.sub('\.[0-9]+', '', func) 62 results.append((file, func, hits, count)) 63 64 return results 65 66 67def main(**args): 68 # find coverage 69 if not args.get('use'): 70 # find *.info files 71 paths = [] 72 for path in args['info_paths']: 73 if os.path.isdir(path): 74 path = path + '/*.gcov' 75 76 for path in glob.glob(path): 77 paths.append(path) 78 79 if not paths: 80 print('no .info files found in %r?' % args['info_paths']) 81 sys.exit(-1) 82 83 results = collect(paths, **args) 84 else: 85 with open(args['use']) as f: 86 r = csv.DictReader(f) 87 results = [ 88 ( result['file'], 89 result['function'], 90 int(result['hits']), 91 int(result['count'])) 92 for result in r] 93 94 total_hits, total_count = 0, 0 95 for _, _, hits, count in results: 96 total_hits += hits 97 total_count += count 98 99 # find previous results? 100 if args.get('diff'): 101 with open(args['diff']) as f: 102 r = csv.DictReader(f) 103 prev_results = [ 104 ( result['file'], 105 result['function'], 106 int(result['hits']), 107 int(result['count'])) 108 for result in r] 109 110 prev_total_hits, prev_total_count = 0, 0 111 for _, _, hits, count in prev_results: 112 prev_total_hits += hits 113 prev_total_count += count 114 115 # write results to CSV 116 if args.get('output'): 117 with open(args['output'], 'w') as f: 118 w = csv.writer(f) 119 w.writerow(['file', 'function', 'hits', 'count']) 120 for file, func, hits, count in sorted(results): 121 w.writerow((file, func, hits, count)) 122 123 # print results 124 def dedup_entries(results, by='function'): 125 entries = co.defaultdict(lambda: (0, 0)) 126 for file, func, hits, count in results: 127 entry = (file if by == 'file' else func) 128 entry_hits, entry_count = entries[entry] 129 entries[entry] = (entry_hits + hits, entry_count + count) 130 return entries 131 132 def diff_entries(olds, news): 133 diff = co.defaultdict(lambda: (0, 0, 0, 0, 0, 0, 0)) 134 for name, (new_hits, new_count) in news.items(): 135 diff[name] = ( 136 0, 0, 137 new_hits, new_count, 138 new_hits, new_count, 139 (new_hits/new_count if new_count else 1.0) - 1.0) 140 for name, (old_hits, old_count) in olds.items(): 141 _, _, new_hits, new_count, _, _, _ = diff[name] 142 diff[name] = ( 143 old_hits, old_count, 144 new_hits, new_count, 145 new_hits-old_hits, new_count-old_count, 146 ((new_hits/new_count if new_count else 1.0) 147 - (old_hits/old_count if old_count else 1.0))) 148 return diff 149 150 def print_header(by=''): 151 if not args.get('diff'): 152 print('%-36s %19s' % (by, 'hits/line')) 153 else: 154 print('%-36s %19s %19s %11s' % (by, 'old', 'new', 'diff')) 155 156 def print_entries(by='function'): 157 entries = dedup_entries(results, by=by) 158 159 if not args.get('diff'): 160 print_header(by=by) 161 for name, (hits, count) in sorted(entries.items()): 162 print("%-36s %11s %7s" % (name, 163 '%d/%d' % (hits, count) 164 if count else '-', 165 '%.1f%%' % (100*hits/count) 166 if count else '-')) 167 else: 168 prev_entries = dedup_entries(prev_results, by=by) 169 diff = diff_entries(prev_entries, entries) 170 print_header(by='%s (%d added, %d removed)' % (by, 171 sum(1 for _, old, _, _, _, _, _ in diff.values() if not old), 172 sum(1 for _, _, _, new, _, _, _ in diff.values() if not new))) 173 for name, ( 174 old_hits, old_count, 175 new_hits, new_count, 176 diff_hits, diff_count, ratio) in sorted(diff.items(), 177 key=lambda x: (-x[1][6], x)): 178 if ratio or args.get('all'): 179 print("%-36s %11s %7s %11s %7s %11s%s" % (name, 180 '%d/%d' % (old_hits, old_count) 181 if old_count else '-', 182 '%.1f%%' % (100*old_hits/old_count) 183 if old_count else '-', 184 '%d/%d' % (new_hits, new_count) 185 if new_count else '-', 186 '%.1f%%' % (100*new_hits/new_count) 187 if new_count else '-', 188 '%+d/%+d' % (diff_hits, diff_count), 189 ' (%+.1f%%)' % (100*ratio) if ratio else '')) 190 191 def print_totals(): 192 if not args.get('diff'): 193 print("%-36s %11s %7s" % ('TOTAL', 194 '%d/%d' % (total_hits, total_count) 195 if total_count else '-', 196 '%.1f%%' % (100*total_hits/total_count) 197 if total_count else '-')) 198 else: 199 ratio = ((total_hits/total_count 200 if total_count else 1.0) 201 - (prev_total_hits/prev_total_count 202 if prev_total_count else 1.0)) 203 print("%-36s %11s %7s %11s %7s %11s%s" % ('TOTAL', 204 '%d/%d' % (prev_total_hits, prev_total_count) 205 if prev_total_count else '-', 206 '%.1f%%' % (100*prev_total_hits/prev_total_count) 207 if prev_total_count else '-', 208 '%d/%d' % (total_hits, total_count) 209 if total_count else '-', 210 '%.1f%%' % (100*total_hits/total_count) 211 if total_count else '-', 212 '%+d/%+d' % (total_hits-prev_total_hits, 213 total_count-prev_total_count), 214 ' (%+.1f%%)' % (100*ratio) if ratio else '')) 215 216 if args.get('quiet'): 217 pass 218 elif args.get('summary'): 219 print_header() 220 print_totals() 221 elif args.get('files'): 222 print_entries(by='file') 223 print_totals() 224 else: 225 print_entries(by='function') 226 print_totals() 227 228if __name__ == "__main__": 229 import argparse 230 import sys 231 parser = argparse.ArgumentParser( 232 description="Parse and report coverage info from .info files \ 233 generated by lcov") 234 parser.add_argument('info_paths', nargs='*', default=INFO_PATHS, 235 help="Description of where to find *.info files. May be a directory \ 236 or list of paths. *.info files will be merged to show the total \ 237 coverage. Defaults to %r." % INFO_PATHS) 238 parser.add_argument('-v', '--verbose', action='store_true', 239 help="Output commands that run behind the scenes.") 240 parser.add_argument('-o', '--output', 241 help="Specify CSV file to store results.") 242 parser.add_argument('-u', '--use', 243 help="Don't do any work, instead use this CSV file.") 244 parser.add_argument('-d', '--diff', 245 help="Specify CSV file to diff code size against.") 246 parser.add_argument('-a', '--all', action='store_true', 247 help="Show all functions, not just the ones that changed.") 248 parser.add_argument('--files', action='store_true', 249 help="Show file-level coverage.") 250 parser.add_argument('-s', '--summary', action='store_true', 251 help="Only show the total coverage.") 252 parser.add_argument('-q', '--quiet', action='store_true', 253 help="Don't show anything, useful with -o.") 254 sys.exit(main(**vars(parser.parse_args()))) 255