1#!/usr/bin/python2 2"""Starts and runs coverage build on Google Cloud Builder. 3 4Usage: build_and_run_coverage.py <project_dir> 5""" 6 7import datetime 8import json 9import os 10import requests 11import sys 12import urlparse 13 14import build_lib 15import build_project 16 17SANITIZER = 'coverage' 18CONFIGURATION = ['FUZZING_ENGINE=libfuzzer', 'SANITIZER=%s' % SANITIZER] 19PLATFORM = 'linux' 20 21COVERAGE_BUILD_TAG = 'coverage' 22 23# Where code coverage reports need to be uploaded to. 24COVERAGE_BUCKET_NAME = 'oss-fuzz-coverage' 25 26# Link to the code coverage report in HTML format. 27HTML_REPORT_URL_FORMAT = (build_lib.GCS_URL_BASENAME + COVERAGE_BUCKET_NAME + 28 '/{project}/reports/{date}/{platform}/index.html') 29 30# This is needed for ClusterFuzz to pick up the most recent reports data. 31LATEST_REPORT_INFO_URL = ('/' + COVERAGE_BUCKET_NAME + 32 '/latest_report_info/{project}.json') 33 34# Link where to upload code coverage report files to. 35UPLOAD_URL_FORMAT = 'gs://' + COVERAGE_BUCKET_NAME + '/{project}/{type}/{date}' 36 37# Languages from project.yaml that have code coverage support. 38LANGUAGES_WITH_COVERAGE_SUPPORT = ['c', 'cpp'] 39 40 41def skip_build(message): 42 """Exit with 0 code not to mark code coverage job as failed.""" 43 sys.stderr.write('%s\n' % message) 44 45 # Since the script should print build_id, print '0' as a special value. 46 print '0' 47 exit(0) 48 49 50def usage(): 51 sys.stderr.write("Usage: " + sys.argv[0] + " <project_dir>\n") 52 exit(1) 53 54 55def get_build_steps(project_dir): 56 project_name = os.path.basename(project_dir) 57 project_yaml = build_project.load_project_yaml(project_dir) 58 if project_yaml['disabled']: 59 skip_build('Project "%s" is disabled.' % project_name) 60 61 build_script_path = os.path.join(project_dir, 'build.sh') 62 if os.path.exists(build_script_path): 63 with open(build_script_path) as fh: 64 if project_yaml['language'] not in LANGUAGES_WITH_COVERAGE_SUPPORT: 65 skip_build(('Project "{project_name}" is written in "{language}", ' 66 'coverage is not supported yet.').format( 67 project_name=project_name, 68 language=project_yaml['language'])) 69 70 dockerfile_path = os.path.join(project_dir, 'Dockerfile') 71 name = project_yaml['name'] 72 image = project_yaml['image'] 73 report_date = datetime.datetime.now().strftime('%Y%m%d') 74 75 build_steps = [ 76 { 77 'args': [ 78 'clone', 79 'https://github.com/google/oss-fuzz.git', 80 ], 81 'name': 'gcr.io/cloud-builders/git', 82 }, 83 { 84 'name': 'gcr.io/cloud-builders/docker', 85 'args': [ 86 'build', 87 '-t', 88 image, 89 '.', 90 ], 91 'dir': 'oss-fuzz/projects/' + name, 92 }, 93 { 94 'name': image, 95 'args': [ 96 'bash', '-c', 97 'srcmap > /workspace/srcmap.json && cat /workspace/srcmap.json' 98 ], 99 'env': ['OSSFUZZ_REVISION=$REVISION_ID'], 100 }, 101 ] 102 103 env = CONFIGURATION[:] 104 out = '/workspace/out/' + SANITIZER 105 env.append('OUT=' + out) 106 107 workdir = build_project.workdir_from_dockerfile(dockerfile_path) 108 if not workdir: 109 workdir = '/src' 110 111 failure_msg = ('*' * 80 + '\nCoverage build failed.\nTo reproduce, run:\n' 112 'python infra/helper.py build_image {name}\n' 113 'python infra/helper.py build_fuzzers --sanitizer coverage ' 114 '{name}\n' + '*' * 80).format(name=name) 115 116 # Compilation step. 117 build_steps.append({ 118 'name': 119 image, 120 'env': 121 env, 122 'args': [ 123 'bash', 124 '-c', 125 # Remove /out to make sure there are non instrumented binaries. 126 # `cd /src && cd {workdir}` (where {workdir} is parsed from the 127 # Dockerfile). Container Builder overrides our workdir so we need 128 # to add this step to set it back. 129 ('rm -r /out && cd /src && cd {workdir} && mkdir -p {out} && ' 130 'compile || (echo "{failure_msg}" && false)' 131 ).format(workdir=workdir, out=out, failure_msg=failure_msg), 132 ], 133 }) 134 135 download_corpora_step = build_lib.download_corpora_step(project_name) 136 if not download_corpora_step: 137 skip_build("Skipping code coverage build for %s.\n" % project_name) 138 139 build_steps.append(download_corpora_step) 140 141 failure_msg = ('*' * 80 + '\nCode coverage report generation failed.\n' 142 'To reproduce, run:\n' 143 'python infra/helper.py build_image {name}\n' 144 'python infra/helper.py build_fuzzers --sanitizer coverage ' 145 '{name}\n' 146 'python infra/helper.py coverage {name}\n' + 147 '*' * 80).format(name=name) 148 149 # Unpack the corpus and run coverage script. 150 build_steps.append({ 151 'name': 152 'gcr.io/oss-fuzz-base/base-runner', 153 'env': 154 env + [ 155 'HTTP_PORT=', 156 'COVERAGE_EXTRA_ARGS=%s' % 157 project_yaml['coverage_extra_args'].strip() 158 ], 159 'args': [ 160 'bash', '-c', 161 ('for f in /corpus/*.zip; do unzip -q $f -d ${f%%.*} || (' 162 'echo "Failed to unpack the corpus for $(basename ${f%%.*}). ' 163 'This usually means that corpus backup for a particular fuzz ' 164 'target does not exist. If a fuzz target was added in the last ' 165 '24 hours, please wait one more day. Otherwise, something is ' 166 'wrong with the fuzz target or the infrastructure, and corpus ' 167 'pruning task does not finish successfully." && exit 1' 168 '); done && coverage || (echo "' + failure_msg + '" && false)') 169 ], 170 'volumes': [{ 171 'name': 'corpus', 172 'path': '/corpus' 173 }], 174 }) 175 176 # Upload the report. 177 upload_report_url = UPLOAD_URL_FORMAT.format(project=project_name, 178 type='reports', 179 date=report_date) 180 build_steps.append({ 181 'name': 182 'gcr.io/cloud-builders/gsutil', 183 'args': [ 184 '-m', 185 'cp', 186 '-r', 187 os.path.join(out, 'report'), 188 upload_report_url, 189 ], 190 }) 191 192 # Upload the fuzzer stats. 193 upload_fuzzer_stats_url = UPLOAD_URL_FORMAT.format(project=project_name, 194 type='fuzzer_stats', 195 date=report_date) 196 build_steps.append({ 197 'name': 198 'gcr.io/cloud-builders/gsutil', 199 'args': [ 200 '-m', 201 'cp', 202 '-r', 203 os.path.join(out, 'fuzzer_stats'), 204 upload_fuzzer_stats_url, 205 ], 206 }) 207 208 # Upload the fuzzer logs. 209 build_steps.append({ 210 'name': 211 'gcr.io/cloud-builders/gsutil', 212 'args': [ 213 '-m', 214 'cp', 215 '-r', 216 os.path.join(out, 'logs'), 217 UPLOAD_URL_FORMAT.format(project=project_name, 218 type='logs', 219 date=report_date), 220 ], 221 }) 222 223 # Upload srcmap. 224 srcmap_upload_url = UPLOAD_URL_FORMAT.format(project=project_name, 225 type='srcmap', 226 date=report_date) 227 srcmap_upload_url = srcmap_upload_url.rstrip('/') + '.json' 228 build_steps.append({ 229 'name': 'gcr.io/cloud-builders/gsutil', 230 'args': [ 231 'cp', 232 '/workspace/srcmap.json', 233 srcmap_upload_url, 234 ], 235 }) 236 237 # Update the latest report information file for ClusterFuzz. 238 latest_report_info_url = build_lib.get_signed_url( 239 LATEST_REPORT_INFO_URL.format(project=project_name), 240 method='PUT', 241 content_type='application/json') 242 latest_report_info_body = json.dumps({ 243 'fuzzer_stats_dir': 244 upload_fuzzer_stats_url, 245 'html_report_url': 246 HTML_REPORT_URL_FORMAT.format(project=project_name, 247 date=report_date, 248 platform=PLATFORM), 249 'report_date': 250 report_date, 251 'report_summary_path': 252 os.path.join(upload_report_url, PLATFORM, 'summary.json'), 253 }) 254 255 build_steps.append({ 256 'name': 257 'gcr.io/cloud-builders/curl', 258 'args': [ 259 '-H', 260 'Content-Type: application/json', 261 '-X', 262 'PUT', 263 '-d', 264 latest_report_info_body, 265 latest_report_info_url, 266 ], 267 }) 268 return build_steps 269 270 271def main(): 272 if len(sys.argv) != 2: 273 usage() 274 275 project_dir = sys.argv[1].rstrip(os.path.sep) 276 project_name = os.path.basename(project_dir) 277 steps = get_build_steps(project_dir) 278 build_project.run_build(steps, project_name, COVERAGE_BUILD_TAG) 279 280 281if __name__ == "__main__": 282 main() 283