• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Google Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14#
15################################################################################
16"""Cloud function to request builds."""
17import base64
18import concurrent.futures
19import json
20import sys
21
22import google.auth
23from googleapiclient.discovery import build
24from google.cloud import ndb
25from google.cloud import storage
26
27import build_and_run_coverage
28import build_project
29from datastore_entities import BuildsHistory
30from datastore_entities import LastSuccessfulBuild
31from datastore_entities import Project
32
33BADGE_DIR = 'badge_images'
34BADGE_IMAGE_TYPES = {'svg': 'image/svg+xml', 'png': 'image/png'}
35DESTINATION_BADGE_DIR = 'badges'
36MAX_BUILD_LOGS = 7
37
38STATUS_BUCKET = 'oss-fuzz-build-logs'
39
40FUZZING_STATUS_FILENAME = 'status.json'
41COVERAGE_STATUS_FILENAME = 'status-coverage.json'
42
43# pylint: disable=invalid-name
44_client = None
45
46
47class MissingBuildLogError(Exception):
48  """Missing build log file in cloud storage."""
49
50
51# pylint: disable=global-statement
52def get_storage_client():
53  """Return storage client."""
54  global _client
55  if not _client:
56    _client = storage.Client()
57
58  return _client
59
60
61def is_build_successful(build_obj):
62  """Check build success."""
63  return build_obj['status'] == 'SUCCESS'
64
65
66def upload_status(data, status_filename):
67  """Upload json file to cloud storage."""
68  bucket = get_storage_client().get_bucket(STATUS_BUCKET)
69  blob = bucket.blob(status_filename)
70  blob.cache_control = 'no-cache'
71  blob.upload_from_string(json.dumps(data), content_type='application/json')
72
73
74def sort_projects(projects):
75  """Sort projects in order Failures, Successes, Not yet built."""
76
77  def key_func(project):
78    if not project['history']:
79      return 2  # Order projects without history last.
80
81    if project['history'][0]['success']:
82      # Successful builds come second.
83      return 1
84
85    # Build failures come first.
86    return 0
87
88  projects.sort(key=key_func)
89
90
91def get_build(cloudbuild, image_project, build_id):
92  """Get build object from cloudbuild."""
93  return cloudbuild.projects().builds().get(projectId=image_project,
94                                            id=build_id).execute()
95
96
97def update_last_successful_build(project, build_tag):
98  """Update last successful build."""
99  last_successful_build = ndb.Key(LastSuccessfulBuild,
100                                  project['name'] + '-' + build_tag).get()
101  if not last_successful_build and 'last_successful_build' not in project:
102    return
103
104  if 'last_successful_build' not in project:
105    project['last_successful_build'] = {
106        'build_id': last_successful_build.build_id,
107        'finish_time': last_successful_build.finish_time
108    }
109  else:
110    if last_successful_build:
111      last_successful_build.build_id = project['last_successful_build'][
112          'build_id']
113      last_successful_build.finish_time = project['last_successful_build'][
114          'finish_time']
115    else:
116      last_successful_build = LastSuccessfulBuild(
117          id=project['name'] + '-' + build_tag,
118          project=project['name'],
119          build_id=project['last_successful_build']['build_id'],
120          finish_time=project['last_successful_build']['finish_time'])
121    last_successful_build.put()
122
123
124# pylint: disable=no-member
125def get_build_history(build_ids):
126  """Returns build object for the last finished build of project."""
127  credentials, image_project = google.auth.default()
128  cloudbuild = build('cloudbuild',
129                     'v1',
130                     credentials=credentials,
131                     cache_discovery=False)
132
133  history = []
134  last_successful_build = None
135
136  for build_id in reversed(build_ids):
137    project_build = get_build(cloudbuild, image_project, build_id)
138    if project_build['status'] not in ('SUCCESS', 'FAILURE', 'TIMEOUT'):
139      continue
140
141    if (not last_successful_build and is_build_successful(project_build)):
142      last_successful_build = {
143          'build_id': build_id,
144          'finish_time': project_build['finishTime'],
145      }
146
147    if not upload_log(build_id):
148      log_name = 'log-{0}'.format(build_id)
149      raise MissingBuildLogError('Missing build log file {0}'.format(log_name))
150
151    history.append({
152        'build_id': build_id,
153        'finish_time': project_build['finishTime'],
154        'success': is_build_successful(project_build)
155    })
156
157    if len(history) == MAX_BUILD_LOGS:
158      break
159
160  project = {'history': history}
161  if last_successful_build:
162    project['last_successful_build'] = last_successful_build
163  return project
164
165
166# pylint: disable=too-many-locals
167def update_build_status(build_tag, status_filename):
168  """Update build statuses."""
169  projects = []
170
171  def process_project(project_build):
172    """Process a project."""
173    project = get_build_history(project_build.build_ids)
174    project['name'] = project_build.project
175    print('Processing project', project['name'])
176    return project
177
178  with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
179    futures = []
180    for project_build in BuildsHistory.query(
181        BuildsHistory.build_tag == build_tag).order('project'):
182      futures.append(executor.submit(process_project, project_build))
183
184    for future in concurrent.futures.as_completed(futures):
185      project = future.result()
186      update_last_successful_build(project, build_tag)
187      projects.append(project)
188
189  sort_projects(projects)
190  data = {'projects': projects}
191  upload_status(data, status_filename)
192
193
194def update_build_badges(project, last_build_successful,
195                        last_coverage_build_successful):
196  """Upload badges of given project."""
197  badge = 'building'
198  # last_coverage_build_successful is False if there was an unsuccessful build
199  # and None if the target does not support coverage (e.g. Python or Java
200  # targets).
201  if last_coverage_build_successful is False:
202    badge = 'coverage_failing'
203  if not last_build_successful:
204    badge = 'failing'
205
206  print("[badge] {}: {}".format(project, badge))
207
208  for extension in BADGE_IMAGE_TYPES:
209    badge_name = '{badge}.{extension}'.format(badge=badge, extension=extension)
210
211    # Copy blob from badge_images/badge_name to badges/project/
212    blob_name = '{badge_dir}/{badge_name}'.format(badge_dir=BADGE_DIR,
213                                                  badge_name=badge_name)
214
215    destination_blob_name = '{badge_dir}/{project_name}.{extension}'.format(
216        badge_dir=DESTINATION_BADGE_DIR,
217        project_name=project,
218        extension=extension)
219
220    status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
221    badge_blob = status_bucket.blob(blob_name)
222    status_bucket.copy_blob(badge_blob,
223                            status_bucket,
224                            new_name=destination_blob_name)
225
226
227def upload_log(build_id):
228  """Upload log file to GCS."""
229  status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
230  gcb_bucket = get_storage_client().get_bucket(build_project.GCB_LOGS_BUCKET)
231  log_name = 'log-{0}.txt'.format(build_id)
232  log = gcb_bucket.blob(log_name)
233  dest_log = status_bucket.blob(log_name)
234
235  if not log.exists():
236    print('Failed to find build log {0}'.format(log_name), file=sys.stderr)
237    return False
238
239  if dest_log.exists():
240    return True
241
242  gcb_bucket.copy_blob(log, status_bucket)
243  return True
244
245
246# pylint: disable=no-member
247def update_status(event, context):
248  """Entry point for cloud function to update build statuses and badges."""
249  del context
250
251  if 'data' in event:
252    status_type = base64.b64decode(event['data']).decode()
253  else:
254    raise RuntimeError('No data')
255
256  if status_type == 'badges':
257    update_badges()
258    return
259
260  if status_type == 'fuzzing':
261    tag = build_project.FUZZING_BUILD_TAG
262    status_filename = FUZZING_STATUS_FILENAME
263  elif status_type == 'coverage':
264    tag = build_and_run_coverage.COVERAGE_BUILD_TAG
265    status_filename = COVERAGE_STATUS_FILENAME
266  else:
267    raise RuntimeError('Invalid build status type ' + status_type)
268
269  with ndb.Client().context():
270    update_build_status(tag, status_filename)
271
272
273def load_status_from_gcs(filename):
274  """Load statuses from bucket."""
275  status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
276  status = json.loads(status_bucket.blob(filename).download_as_string())
277  result = {}
278
279  for project in status['projects']:
280    if project['history']:
281      result[project['name']] = project['history'][0]['success']
282
283  return result
284
285
286def update_badges():
287  """Update badges."""
288  project_build_statuses = load_status_from_gcs(FUZZING_STATUS_FILENAME)
289  coverage_build_statuses = load_status_from_gcs(COVERAGE_STATUS_FILENAME)
290
291  with concurrent.futures.ThreadPoolExecutor(max_workers=32) as executor:
292    futures = []
293    with ndb.Client().context():
294      for project in Project.query():
295        if project.name not in project_build_statuses:
296          continue
297        # Certain projects (e.g. JVM and Python) do not have any coverage
298        # builds, but should still receive a badge.
299        coverage_build_status = None
300        if project.name in coverage_build_statuses:
301          coverage_build_status = coverage_build_statuses[project.name]
302
303        futures.append(
304            executor.submit(update_build_badges, project.name,
305                            project_build_statuses[project.name],
306                            coverage_build_status))
307    concurrent.futures.wait(futures)
308