• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 Google Inc.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14#
15################################################################################
16"""Cloud function to request builds."""
17import base64
18import concurrent.futures
19import json
20import sys
21
22import google.auth
23from googleapiclient.discovery import build
24from google.cloud import ndb
25from google.cloud import storage
26
27import build_and_run_coverage
28import build_project
29from datastore_entities import BuildsHistory
30from datastore_entities import LastSuccessfulBuild
31from datastore_entities import Project
32
33BADGE_DIR = 'badge_images'
34BADGE_IMAGE_TYPES = {'svg': 'image/svg+xml', 'png': 'image/png'}
35DESTINATION_BADGE_DIR = 'badges'
36MAX_BUILD_LOGS = 7
37
38STATUS_BUCKET = 'oss-fuzz-build-logs'
39
40FUZZING_STATUS_FILENAME = 'status.json'
41COVERAGE_STATUS_FILENAME = 'status-coverage.json'
42
43# pylint: disable=invalid-name
44_client = None
45
46
47class MissingBuildLogError(Exception):
48  """Missing build log file in cloud storage."""
49
50
51# pylint: disable=global-statement
52def get_storage_client():
53  """Return storage client."""
54  global _client
55  if not _client:
56    _client = storage.Client()
57
58  return _client
59
60
61def is_build_successful(build_obj):
62  """Check build success."""
63  return build_obj['status'] == 'SUCCESS'
64
65
66def upload_status(data, status_filename):
67  """Upload json file to cloud storage."""
68  bucket = get_storage_client().get_bucket(STATUS_BUCKET)
69  blob = bucket.blob(status_filename)
70  blob.cache_control = 'no-cache'
71  blob.upload_from_string(json.dumps(data), content_type='application/json')
72
73
74def sort_projects(projects):
75  """Sort projects in order Failures, Successes, Not yet built."""
76
77  def key_func(project):
78    if not project['history']:
79      return 2  # Order projects without history last.
80
81    if project['history'][0]['success']:
82      # Successful builds come second.
83      return 1
84
85    # Build failures come first.
86    return 0
87
88  projects.sort(key=key_func)
89
90
91def get_build(cloudbuild, image_project, build_id):
92  """Get build object from cloudbuild."""
93  return cloudbuild.projects().builds().get(projectId=image_project,
94                                            id=build_id).execute()
95
96
97def update_last_successful_build(project, build_tag):
98  """Update last successful build."""
99  last_successful_build = ndb.Key(LastSuccessfulBuild,
100                                  project['name'] + '-' + build_tag).get()
101  if not last_successful_build and 'last_successful_build' not in project:
102    return
103
104  if 'last_successful_build' not in project:
105    project['last_successful_build'] = {
106        'build_id': last_successful_build.build_id,
107        'finish_time': last_successful_build.finish_time
108    }
109  else:
110    if last_successful_build:
111      last_successful_build.build_id = project['last_successful_build'][
112          'build_id']
113      last_successful_build.finish_time = project['last_successful_build'][
114          'finish_time']
115    else:
116      last_successful_build = LastSuccessfulBuild(
117          id=project['name'] + '-' + build_tag,
118          project=project['name'],
119          build_id=project['last_successful_build']['build_id'],
120          finish_time=project['last_successful_build']['finish_time'])
121    last_successful_build.put()
122
123
124# pylint: disable=no-member
125def get_build_history(build_ids):
126  """Returns build object for the last finished build of project."""
127  credentials, image_project = google.auth.default()
128  cloudbuild = build('cloudbuild',
129                     'v1',
130                     credentials=credentials,
131                     cache_discovery=False)
132
133  history = []
134  last_successful_build = None
135
136  for build_id in reversed(build_ids):
137    project_build = get_build(cloudbuild, image_project, build_id)
138    if project_build['status'] not in ('SUCCESS', 'FAILURE', 'TIMEOUT'):
139      continue
140
141    if (not last_successful_build and is_build_successful(project_build)):
142      last_successful_build = {
143          'build_id': build_id,
144          'finish_time': project_build['finishTime'],
145      }
146
147    if not upload_log(build_id):
148      log_name = f'log-{build_id}'
149      raise MissingBuildLogError(f'Missing build log file {log_name}')
150
151    history.append({
152        'build_id': build_id,
153        'finish_time': project_build['finishTime'],
154        'success': is_build_successful(project_build)
155    })
156
157    if len(history) == MAX_BUILD_LOGS:
158      break
159
160  project = {'history': history}
161  if last_successful_build:
162    project['last_successful_build'] = last_successful_build
163  return project
164
165
166# pylint: disable=too-many-locals
167def update_build_status(build_tag, status_filename):
168  """Update build statuses."""
169  projects = []
170
171  def process_project(project_build):
172    """Process a project."""
173    project = get_build_history(project_build.build_ids)
174    project['name'] = project_build.project
175    print('Processing project', project['name'])
176    return project
177
178  with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
179    futures = []
180    for project_build in BuildsHistory.query(
181        BuildsHistory.build_tag == build_tag).order('project'):
182      futures.append(executor.submit(process_project, project_build))
183
184    for future in concurrent.futures.as_completed(futures):
185      project = future.result()
186      update_last_successful_build(project, build_tag)
187      projects.append(project)
188
189  sort_projects(projects)
190  data = {'projects': projects}
191  upload_status(data, status_filename)
192
193
194def update_build_badges(project, last_build_successful,
195                        last_coverage_build_successful):
196  """Upload badges of given project."""
197  badge = 'building'
198  # last_coverage_build_successful is False if there was an unsuccessful build
199  # and None if the target does not support coverage (e.g. Python or Java
200  # targets).
201  if last_coverage_build_successful is False:
202    badge = 'coverage_failing'
203  if not last_build_successful:
204    badge = 'failing'
205
206  print(f'[badge] {project}: {badge}')
207
208  for extension in BADGE_IMAGE_TYPES:
209    badge_name = f'{badge}.{extension}'
210
211    # Copy blob from badge_images/badge_name to badges/project/
212    blob_name = f'{BADGE_DIR}/{badge_name}'
213
214    destination_blob_name = f'{DESTINATION_BADGE_DIR}/{project}.{extension}'
215
216    status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
217    badge_blob = status_bucket.blob(blob_name)
218    status_bucket.copy_blob(badge_blob,
219                            status_bucket,
220                            new_name=destination_blob_name)
221
222
223def upload_log(build_id):
224  """Upload log file to GCS."""
225  status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
226  gcb_bucket = get_storage_client().get_bucket(build_project.GCB_LOGS_BUCKET)
227  log_name = f'log-{build_id}.txt'
228  log = gcb_bucket.blob(log_name)
229  dest_log = status_bucket.blob(log_name)
230
231  if not log.exists():
232    print('Failed to find build log', log_name, file=sys.stderr)
233    return False
234
235  if dest_log.exists():
236    return True
237
238  gcb_bucket.copy_blob(log, status_bucket)
239  return True
240
241
242# pylint: disable=no-member
243def update_status(event, context):
244  """Entry point for cloud function to update build statuses and badges."""
245  del context
246
247  if 'data' in event:
248    status_type = base64.b64decode(event['data']).decode()
249  else:
250    raise RuntimeError('No data')
251
252  if status_type == 'badges':
253    update_badges()
254    return
255
256  if status_type == 'fuzzing':
257    tag = build_project.FUZZING_BUILD_TYPE
258    status_filename = FUZZING_STATUS_FILENAME
259  elif status_type == 'coverage':
260    tag = build_and_run_coverage.COVERAGE_BUILD_TYPE
261    status_filename = COVERAGE_STATUS_FILENAME
262  else:
263    raise RuntimeError('Invalid build status type ' + status_type)
264
265  with ndb.Client().context():
266    update_build_status(tag, status_filename)
267
268
269def load_status_from_gcs(filename):
270  """Load statuses from bucket."""
271  status_bucket = get_storage_client().get_bucket(STATUS_BUCKET)
272  status = json.loads(status_bucket.blob(filename).download_as_string())
273  result = {}
274
275  for project in status['projects']:
276    if project['history']:
277      result[project['name']] = project['history'][0]['success']
278
279  return result
280
281
282def update_badges():
283  """Update badges."""
284  project_build_statuses = load_status_from_gcs(FUZZING_STATUS_FILENAME)
285  coverage_build_statuses = load_status_from_gcs(COVERAGE_STATUS_FILENAME)
286
287  with concurrent.futures.ThreadPoolExecutor(max_workers=32) as executor:
288    futures = []
289    with ndb.Client().context():
290      for project in Project.query():
291        if project.name not in project_build_statuses:
292          continue
293        # Certain projects (e.g. JVM and Python) do not have any coverage
294        # builds, but should still receive a badge.
295        coverage_build_status = None
296        if project.name in coverage_build_statuses:
297          coverage_build_status = coverage_build_statuses[project.name]
298
299        futures.append(
300            executor.submit(update_build_badges, project.name,
301                            project_build_statuses[project.name],
302                            coverage_build_status))
303    concurrent.futures.wait(futures)
304