• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2016 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5
6from recipe_engine import recipe_api
7import shlex
8
9
10DEFAULT_TASK_EXPIRATION = 20*60*60
11DEFAULT_TASK_TIMEOUT = 4*60*60
12DEFAULT_IO_TIMEOUT = 40*60
13
14MILO_LOG_LINK = 'https://luci-milo.appspot.com/swarming/task/%s'
15
16
17class SkiaSwarmingApi(recipe_api.RecipeApi):
18  """Provides steps to run Skia tasks on swarming bots."""
19
20  @property
21  def swarming_temp_dir(self):
22    """Path where artifacts like isolate file and json output will be stored."""
23    return self.m.path['start_dir'].join('swarming_temp_dir')
24
25  @property
26  def tasks_output_dir(self):
27    """Directory where the outputs of the swarming tasks will be stored."""
28    return self.swarming_temp_dir.join('outputs')
29
30  def isolated_file_path(self, task_name):
31    """Get the path to the given task's .isolated file."""
32    return self.swarming_temp_dir.join('skia-task-%s.isolated' % task_name)
33
34  def setup(self, luci_go_dir, swarming_rev=None):
35    """Performs setup steps for swarming."""
36    self.m.swarming_client.checkout(revision=swarming_rev)
37    self.m.swarming.check_client_version(step_test_data=(0, 8, 6))
38    self.setup_go_isolate(luci_go_dir)
39    self.m.swarming.add_default_tag('allow_milo:1')
40
41  # TODO(rmistry): Remove once the Go binaries are moved to recipes or buildbot.
42  def setup_go_isolate(self, luci_go_dir):
43    """Generates and puts in place the isolate Go binary."""
44    depot_tools_path = self.m.depot_tools.package_repo_resource()
45    env = {'PATH': self.m.path.pathsep.join([
46                       str(depot_tools_path), '%(PATH)s'])}
47    with self.m.context(env=env):
48      self.m.step('download luci-go linux',
49                  ['download_from_google_storage', '--no_resume',
50                   '--platform=linux*', '--no_auth',
51                   '--bucket', 'chromium-luci',
52                   '-d', luci_go_dir.join('linux64')])
53      self.m.step('download luci-go mac',
54                  ['download_from_google_storage', '--no_resume',
55                   '--platform=darwin', '--no_auth',
56                   '--bucket', 'chromium-luci',
57                   '-d', luci_go_dir.join('mac64')])
58      self.m.step('download luci-go win',
59                  ['download_from_google_storage', '--no_resume',
60                   '--platform=win32', '--no_auth', '--bucket',
61                   'chromium-luci',
62                   '-d', luci_go_dir.join('win64')])
63    # Copy binaries to the expected location.
64    dest = self.m.path['start_dir'].join('luci-go')
65    self.m.run.rmtree(dest)
66    self.m.file.copytree('Copy Go binary',
67                         source=luci_go_dir,
68                         dest=dest)
69
70  def create_isolated_gen_json(self, isolate_path, base_dir, os_type,
71                               task_name, extra_variables, blacklist=None):
72    """Creates an isolated.gen.json file (used by the isolate recipe module).
73
74    Args:
75      isolate_path: path obj. Path to the isolate file.
76      base_dir: path obj. Dir that is the base of all paths in the isolate file.
77      os_type: str. The OS type to use when archiving the isolate file.
78          Eg: linux.
79      task_name: str. The isolated.gen.json file will be suffixed by this str.
80      extra_variables: dict of str to str. The extra vars to pass to isolate.
81          Eg: {'SLAVE_NUM': '1', 'MASTER': 'ChromiumPerfFYI'}
82      blacklist: list of regular expressions indicating which files/directories
83          not to archive.
84    """
85    self.m.file.ensure_directory(
86        'makedirs swarming tmp dir',
87        self.swarming_temp_dir)
88    isolated_path = self.isolated_file_path(task_name)
89    isolate_args = [
90      '--isolate', isolate_path,
91      '--isolated', isolated_path,
92      '--config-variable', 'OS', os_type,
93    ]
94    if blacklist:
95      for b in blacklist:
96        isolate_args.extend(['--blacklist', b])
97    for k, v in extra_variables.iteritems():
98      isolate_args.extend(['--extra-variable', k, v])
99    isolated_gen_dict = {
100      'version': 1,
101      'dir': base_dir,
102      'args': isolate_args,
103    }
104    isolated_gen_json = self.swarming_temp_dir.join(
105        '%s.isolated.gen.json' % task_name)
106    self.m.file.write_text(
107        'Write %s.isolated.gen.json' % task_name,
108        isolated_gen_json,
109        self.m.json.dumps(isolated_gen_dict, indent=4),
110    )
111
112  def batcharchive(self, targets):
113    """Calls batcharchive on the skia.isolated.gen.json file.
114
115    Args:
116      targets: list of str. The suffixes of the isolated.gen.json files to
117               archive.
118
119    Returns:
120      list of tuples containing (task_name, swarming_hash).
121    """
122    return self.m.isolate.isolate_tests(
123        verbose=True,  # To avoid no output timeouts.
124        build_dir=self.swarming_temp_dir,
125        targets=targets).presentation.properties['swarm_hashes'].items()
126
127  def trigger_swarming_tasks(
128      self, swarm_hashes, dimensions, idempotent=False, store_output=True,
129      extra_args=None, expiration=None, hard_timeout=None, io_timeout=None,
130      cipd_packages=None):
131    """Triggers swarming tasks using swarm hashes.
132
133    Args:
134      swarm_hashes: list of str. List of swarm hashes from the isolate server.
135      dimensions: dict of str to str. The dimensions to run the task on.
136                  Eg: {'os': 'Ubuntu', 'gpu': '10de', 'pool': 'Skia'}
137      idempotent: bool. Whether or not to de-duplicate tasks.
138      store_output: bool. Whether task output should be stored.
139      extra_args: list of str. Extra arguments to pass to the task.
140      expiration: int. Task will expire if not picked up within this time.
141                  DEFAULT_TASK_EXPIRATION is used if this argument is None.
142      hard_timeout: int. Task will timeout if not completed within this time.
143                    DEFAULT_TASK_TIMEOUT is used if this argument is None.
144      io_timeout: int. Task will timeout if there is no output within this time.
145                  DEFAULT_IO_TIMEOUT is used if this argument is None.
146      cipd_packages: CIPD packages which these tasks depend on.
147
148    Returns:
149      List of swarming.SwarmingTask instances.
150    """
151    swarming_tasks = []
152    for task_name, swarm_hash in swarm_hashes:
153      swarming_task = self.m.swarming.task(
154          title=task_name,
155          cipd_packages=cipd_packages,
156          isolated_hash=swarm_hash)
157      if store_output:
158        swarming_task.task_output_dir = self.tasks_output_dir.join(task_name)
159      swarming_task.dimensions = dimensions
160      swarming_task.idempotent = idempotent
161      swarming_task.priority = 90
162      swarming_task.expiration = (
163          expiration if expiration else DEFAULT_TASK_EXPIRATION)
164      swarming_task.hard_timeout = (
165          hard_timeout if hard_timeout else DEFAULT_TASK_TIMEOUT)
166      swarming_task.io_timeout = (
167          io_timeout if io_timeout else DEFAULT_IO_TIMEOUT)
168      if extra_args:
169        swarming_task.extra_args = extra_args
170      revision = self.m.properties.get('revision')
171      if revision:
172        swarming_task.tags.add('revision:%s' % revision)
173      swarming_tasks.append(swarming_task)
174    step_results = self.m.swarming.trigger(swarming_tasks)
175    for step_result in step_results:
176      self._add_log_links(step_result, step_result.json.output)
177    return swarming_tasks
178
179  def collect_swarming_task(self, swarming_task):
180    """Collects the specified swarming task.
181
182    Args:
183      swarming_task: An instance of swarming.SwarmingTask.
184    """
185    try:
186      rv = self.m.swarming.collect_task(swarming_task)
187    except self.m.step.StepFailure as e:  # pragma: no cover
188      step_result = self.m.step.active_result
189      # Change step result to Infra failure if the swarming task failed due to
190      # expiration, time outs, bot crashes or task cancelations.
191      # Infra failures have step.EXCEPTION.
192      states_infra_failure = (
193          self.m.swarming.State.EXPIRED, self.m.swarming.State.TIMED_OUT,
194          self.m.swarming.State.BOT_DIED, self.m.swarming.State.CANCELED)
195      summary = step_result.swarming.summary
196      if summary['shards'][0]['state'] in states_infra_failure:
197        step_result.presentation.status = self.m.step.EXCEPTION
198        raise self.m.step.InfraFailure(e.name, step_result)
199      raise
200    finally:
201      step_result = self.m.step.active_result
202      # Add log link.
203      self._add_log_links(step_result, step_result.swarming.summary)
204    return rv
205
206  def _add_log_links(self, step_result, summary):
207    """Add Milo log links to all shards in the step."""
208    ids = []
209    shards = summary.get('shards')
210    if shards:
211      for shard in shards:
212        ids.append(shard['id'])
213    else:
214      for _, task in summary.get('tasks', {}).iteritems():
215        ids.append(task['task_id'])
216    for idx, task_id in enumerate(ids):
217      link = MILO_LOG_LINK % task_id
218      k = 'view steps on Milo'
219      if len(ids) > 1:  # pragma: nocover
220        k += ' (shard index %d, %d total)' % (idx, len(ids))
221      step_result.presentation.links[k] = link
222
223