• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 The Chromium Authors
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4"""Methods related to querying the ResultDB BigQuery tables."""
5
6import json
7import logging
8import math
9import multiprocessing.pool
10import os
11import subprocess
12import threading
13import time
14from typing import Any, Collection, Dict, Iterable, List, Optional, Tuple, Union
15
16import six
17
18from typ import expectations_parser
19from typ import json_results
20from unexpected_passes_common import builders as builders_module
21from unexpected_passes_common import constants
22from unexpected_passes_common import data_types
23from unexpected_passes_common import expectations
24from unexpected_passes_common import multiprocessing_utils
25
26DEFAULT_NUM_SAMPLES = 100
27MAX_ROWS = (2**31) - 1
28MAX_QUERY_TRIES = 3
29# Used to prevent us from triggering too many queries simultaneously and causing
30# a bunch of rate limit errors. Anything below 1.5 seemed to result in enough
31# rate limit errors to cause problems. Raising above that for safety.
32QUERY_DELAY = 2
33# The target number of results/rows per query when running in large query mode.
34# Higher values = longer individual query times and higher chances of running
35# out of memory in BigQuery. Lower values = more parallelization overhead and
36# more issues with rate limit errors.
37TARGET_RESULTS_PER_QUERY = 20000
38
39# Subquery for getting all try builds that were used for CL submission. 30 days
40# is chosen because the ResultDB tables we pull data from only keep data around
41# for 30 days.
42SUBMITTED_BUILDS_TEMPLATE = """\
43    SELECT
44      CONCAT("build-", CAST(unnested_builds.id AS STRING)) as id
45    FROM
46      `commit-queue.{project_view}.attempts`,
47      UNNEST(builds) as unnested_builds,
48      UNNEST(gerrit_changes) as unnested_changes
49    WHERE
50      unnested_builds.host = "cr-buildbucket.appspot.com"
51      AND unnested_changes.submit_status = "SUCCESS"
52      AND start_time > TIMESTAMP_SUB(CURRENT_TIMESTAMP(),
53                                     INTERVAL 30 DAY)"""
54
55QueryResult = Dict[str, Any]
56QueryParameters = Dict[str, Dict[str, Any]]
57
58# pylint: disable=super-with-arguments,useless-object-inheritance
59
60
61class BigQueryQuerier(object):
62  """Class to handle all BigQuery queries for a script invocation."""
63
64  def __init__(self,
65               suite: Optional[str],
66               project: str,
67               num_samples: int,
68               large_query_mode: bool,
69               num_jobs: Optional[int],
70               use_batching: bool = True):
71    """
72    Args:
73      suite: A string containing the name of the suite that is being queried
74          for. Can be None if there is no differentiation between different
75          suites.
76      project: A string containing the billing project to use for BigQuery.
77      num_samples: An integer containing the number of builds to pull results
78          from.
79      large_query_mode: A boolean indicating whether large query mode should be
80          used. In this mode, an initial, smaller query is made and its results
81          are used to perform additional filtering on a second, larger query in
82          BigQuery. This works around hitting a hard memory limit when running
83          the ORDER BY clause.
84      num_jobs: An integer specifying how many jobs to run in parallel. If None,
85          all jobs will be run in parallel at the same time.
86      use_batching: Whether to use batching when running queries. Batching
87          allows a much greater amount of parallelism due to avoiding usage
88          limits, but also adds a variable amount of overhead since there need
89          to be free resources.
90    """
91    self._suite = suite
92    self._project = project
93    self._num_samples = num_samples or DEFAULT_NUM_SAMPLES
94    self._large_query_mode = large_query_mode
95    self._num_jobs = num_jobs
96    self._use_batching = use_batching
97
98    assert self._num_samples > 0
99    assert (self._num_jobs is None or self._num_jobs > 0)
100
101  def FillExpectationMapForBuilders(
102      self, expectation_map: data_types.TestExpectationMap,
103      builders: Collection[data_types.BuilderEntry]
104  ) -> Dict[str, data_types.ResultListType]:
105    """Fills |expectation_map| with results from |builders|.
106
107    Args:
108      expectation_map: A data_types.TestExpectationMap. Will be modified
109          in-place.
110      builders: An iterable of data_types.BuilderEntry containing the builders
111          to query.
112
113    Returns:
114      A dict containing any results that were retrieved that did not have a
115      matching expectation in |expectation_map| in the following format:
116      {
117        |builder_type|:|builder_name| (str): [
118          result1 (data_types.Result),
119          result2 (data_types.Result),
120          ...
121        ],
122      }
123    """
124    start_time = time.time()
125    logging.debug('Starting to fill expectation map for %d builders',
126                  len(builders))
127    assert isinstance(expectation_map, data_types.TestExpectationMap)
128    # Ensure that all the builders are of the same type since we make some
129    # assumptions about that later on.
130    assert builders
131    builder_type = None
132    for b in builders:
133      if builder_type is None:
134        builder_type = b.builder_type
135      else:
136        assert b.builder_type == builder_type
137
138    # Filter out any builders that we can easily determine do not currently
139    # produce data we care about.
140    builders = self._FilterOutInactiveBuilders(builders, builder_type)
141
142    # If we don't have an explicit number of jobs set, spin up a separate
143    # process for each query/add step. This is wasteful in the sense that we'll
144    # have a bunch of idle processes once faster steps start finishing, but
145    # ensures that we start slow queries early and avoids the overhead of
146    # passing large amounts of data between processes. See crbug.com/1182459 for
147    # more information on performance considerations.
148    num_jobs = self._num_jobs or len(builders)
149    args = [(b, expectation_map) for b in builders]
150
151    with multiprocessing_utils.GetProcessPoolContext(num_jobs) as pool:
152      results = pool.map(self._QueryAddCombined, args)
153
154    tmp_expectation_map = data_types.TestExpectationMap()
155    all_unmatched_results = {}
156
157    for (unmatched_results, prefixed_builder_name, merge_map) in results:
158      tmp_expectation_map.Merge(merge_map, expectation_map)
159      if unmatched_results:
160        all_unmatched_results[prefixed_builder_name] = unmatched_results
161
162    expectation_map.clear()
163    expectation_map.update(tmp_expectation_map)
164
165    logging.debug('Filling expectation map took %f', time.time() - start_time)
166    return all_unmatched_results
167
168  def _FilterOutInactiveBuilders(self,
169                                 builders: Iterable[data_types.BuilderEntry],
170                                 builder_type: str
171                                 ) -> List[data_types.BuilderEntry]:
172    """Filters out any builders that are not producing data.
173
174    This helps save time on querying, as querying for the builder names is cheap
175    while querying for individual results from a builder is expensive. Filtering
176    out inactive builders lets us preemptively remove builders that we know we
177    won't get any data from, and thus don't need to waste time querying.
178
179    Args:
180      builders: An iterable of data_types.BuilderEntry containing the builders
181          to query.
182      builder_type: A string containing the type of builder to query, either
183          "ci" or "try".
184
185    Returns:
186      A copy of |builders| with any inactive builders removed.
187    """
188    include_internal_builders = any(b.is_internal_builder for b in builders)
189    query = self._GetActiveBuilderQuery(
190        builder_type, include_internal_builders).encode('utf-8')
191    cmd = GenerateBigQueryCommand(self._project, {}, batch=False)
192    with open(os.devnull, 'w', newline='', encoding='utf-8') as devnull:
193      p = subprocess.Popen(cmd,
194                           stdout=subprocess.PIPE,
195                           stderr=devnull,
196                           stdin=subprocess.PIPE)
197    stdout, _ = p.communicate(query)
198    if not isinstance(stdout, six.string_types):
199      stdout = stdout.decode('utf-8')
200    results = json.loads(stdout)
201
202    # We filter from an initial list instead of directly using the returned
203    # builders since there are cases where they aren't equivalent, such as for
204    # GPU tests if a particular builder doesn't run a particular suite. This
205    # could be encapsulated in the query, but this would cause the query to take
206    # longer. Since generating the initial list locally is basically
207    # instantenous and we're optimizing for runtime, filtering is the better
208    # option.
209    active_builders = {r['builder_name'] for r in results}
210    filtered_builders = [b for b in builders if b.name in active_builders]
211    return filtered_builders
212
213  def _QueryAddCombined(
214      self,
215      inputs: Tuple[data_types.BuilderEntry, data_types.TestExpectationMap]
216  ) -> Tuple[data_types.ResultListType, str, data_types.TestExpectationMap]:
217    """Combines the query and add steps for use in a process pool.
218
219    Args:
220      inputs: An iterable of inputs for QueryBuilder() and
221          data_types.TestExpectationMap.AddResultList(). Should be in the order:
222          builder expectation_map
223
224    Returns:
225      The output of data_types.TestExpectationMap.AddResultList().
226    """
227    start_time = time.time()
228    builder, expectation_map = inputs
229    logging.debug('Starting query for builder %s', builder.name)
230    results, expectation_files = self.QueryBuilder(builder)
231    logging.debug('Query for builder %s took %f', builder.name,
232                  time.time() - start_time)
233
234    start_time = time.time()
235    prefixed_builder_name = '%s/%s:%s' % (builder.project, builder.builder_type,
236                                          builder.name)
237    logging.debug('Starting data processing for builder %s', builder.name)
238    unmatched_results = expectation_map.AddResultList(prefixed_builder_name,
239                                                      results,
240                                                      expectation_files)
241    logging.debug('Data processing for builder %s took %f', builder.name,
242                  time.time() - start_time)
243
244    return unmatched_results, prefixed_builder_name, expectation_map
245
246  def QueryBuilder(self, builder: data_types.BuilderEntry
247                   ) -> Tuple[data_types.ResultListType, Optional[List[str]]]:
248    """Queries ResultDB for results from |builder|.
249
250    Args:
251      builder: A data_types.BuilderEntry containing the builder to query.
252
253    Returns:
254      A tuple (results, expectation_files). |results| is the results returned by
255      the query converted into a list of data_types.Result objects.
256      |expectation_files| is a set of strings denoting which expectation files
257      are relevant to |results|, or None if all should be used.
258    """
259
260    query_generator = self._GetQueryGeneratorForBuilder(builder)
261    if not query_generator:
262      # No affected tests on this builder, so early return.
263      return [], None
264
265    # Query for the test data from the builder, splitting the query if we run
266    # into the BigQuery hard memory limit. Even if we keep failing, this will
267    # eventually stop due to getting a QuerySplitError when we can't split the
268    # query any further.
269    query_results = None
270    while query_results is None:
271      try:
272        query_results = self._RunBigQueryCommandsForJsonOutput(
273            query_generator.GetQueries(), {
274                '': {
275                    'builder_name': builder.name
276                },
277                'INT64': {
278                    'num_builds': self._num_samples
279                }
280            })
281      except MemoryLimitError:
282        logging.warning(
283            'Query to builder %s hit BigQuery hard memory limit, trying again '
284            'with more query splitting.', builder.name)
285        query_generator.SplitQuery()
286
287    results = []
288    if not query_results:
289      # Don't bother logging if we know this is a fake CI builder.
290      if not (builder.builder_type == constants.BuilderTypes.CI
291              and builder in builders_module.GetInstance().GetFakeCiBuilders()):
292        logging.warning(
293            'Did not get results for "%s", but this may be because its '
294            'results do not apply to any expectations for this suite.',
295            builder.name)
296      return results, None
297
298    # It's possible that a builder runs multiple versions of a test with
299    # different expectation files for each version. So, find a result for each
300    # unique step and get the expectation files from all of them.
301    results_for_each_step = {}
302    for qr in query_results:
303      step_name = qr['step_name']
304      if step_name not in results_for_each_step:
305        results_for_each_step[step_name] = qr
306
307    expectation_files = []
308    for qr in results_for_each_step.values():
309      # None is a special value indicating "use all expectation files", so
310      # handle that.
311      ef = self._GetRelevantExpectationFilesForQueryResult(qr)
312      if ef is None:
313        expectation_files = None
314        break
315      expectation_files.extend(ef)
316    if expectation_files is not None:
317      expectation_files = list(set(expectation_files))
318
319    for r in query_results:
320      if self._ShouldSkipOverResult(r):
321        continue
322      results.append(self._ConvertJsonResultToResultObject(r))
323    logging.debug('Got %d results for %s builder %s', len(results),
324                  builder.builder_type, builder.name)
325    return results, expectation_files
326
327  def _ConvertJsonResultToResultObject(self, json_result: QueryResult
328                                       ) -> data_types.Result:
329    """Converts a single BigQuery JSON result to a data_types.Result.
330
331    Args:
332      json_result: A single row/result from BigQuery in JSON format.
333
334    Returns:
335      A data_types.Result object containing the information from |json_result|.
336    """
337    build_id = _StripPrefixFromBuildId(json_result['id'])
338    test_name = self._StripPrefixFromTestId(json_result['test_id'])
339    actual_result = _ConvertActualResultToExpectationFileFormat(
340        json_result['status'])
341    tags = expectations.GetInstance().FilterToKnownTags(json_result['typ_tags'])
342    step = json_result['step_name']
343    return data_types.Result(test_name, tags, actual_result, step, build_id)
344
345  def _GetRelevantExpectationFilesForQueryResult(self, query_result: QueryResult
346                                                 ) -> Optional[Iterable[str]]:
347    """Gets the relevant expectation file names for a given query result.
348
349    Args:
350      query_result: A dict containing single row/result from a BigQuery query.
351
352    Returns:
353      An iterable of strings containing expectation file names that are
354      relevant to |query_result|, or None if all expectation files should be
355      considered relevant.
356    """
357    raise NotImplementedError()
358
359  def _ShouldSkipOverResult(self, result: QueryResult) -> bool:
360    """Whether |result| should be ignored and skipped over.
361
362    Args:
363      result: A dict containing a single BigQuery result row.
364
365    Returns:
366      True if the result should be skipped over/ignored, otherwise False.
367    """
368    del result
369    return False
370
371  def _GetQueryGeneratorForBuilder(self, builder: data_types.BuilderEntry
372                                   ) -> Optional['BaseQueryGenerator']:
373    """Returns a BaseQueryGenerator instance to only include relevant tests.
374
375    Args:
376      builder: A data_types.BuilderEntry containing the builder to query.
377
378    Returns:
379      None if the query returned no results. Otherwise, some instance of a
380      BaseQueryGenerator.
381    """
382    raise NotImplementedError()
383
384  def _RunBigQueryCommandsForJsonOutput(self, queries: Union[str, List[str]],
385                                        parameters: QueryParameters
386                                        ) -> List[QueryResult]:
387    """Runs the given BigQuery queries and returns their outputs as JSON.
388
389    Args:
390      queries: A string or list of strings containing valid BigQuery queries to
391          run or a single string containing a query.
392      parameters: A dict specifying parameters to substitute in the query in
393          the format {type: {key: value}}. For example, the dict:
394          {'INT64': {'num_builds': 5}}
395          would result in --parameter=num_builds:INT64:5 being passed to
396          BigQuery.
397
398    Returns:
399      The combined results of |queries| in JSON.
400    """
401    if isinstance(queries, str):
402      queries = [queries]
403    assert isinstance(queries, list)
404
405    processes = set()
406    processes_lock = threading.Lock()
407
408    def run_cmd_in_thread(inputs: Tuple[List[str], str]) -> str:
409      cmd, query = inputs
410      query = query.encode('utf-8')
411      with open(os.devnull, 'w', newline='', encoding='utf-8') as devnull:
412        with processes_lock:
413          # Starting many queries at once causes us to hit rate limits much more
414          # frequently, so stagger query starts to help avoid that.
415          time.sleep(QUERY_DELAY)
416          p = subprocess.Popen(cmd,
417                               stdout=subprocess.PIPE,
418                               stderr=devnull,
419                               stdin=subprocess.PIPE)
420          processes.add(p)
421
422        # We pass in the query via stdin instead of including it on the
423        # commandline because we can run into command length issues in large
424        # query mode.
425        stdout, _ = p.communicate(query)
426        if not isinstance(stdout, six.string_types):
427          stdout = stdout.decode('utf-8')
428        if p.returncode:
429          # When running many queries in parallel, it's possible to hit the
430          # rate limit for the account if we're unlucky, so try again if we do.
431          if 'Exceeded rate limits' in stdout:
432            raise RateLimitError()
433          error_msg = 'Error running command %s. stdout: %s' % (cmd, stdout)
434          if 'memory' in stdout:
435            raise MemoryLimitError(error_msg)
436          raise RuntimeError(error_msg)
437        return stdout
438
439    def run_cmd(cmd: List[str], tries: int) -> List[str]:
440      if tries >= MAX_QUERY_TRIES:
441        raise RuntimeError('Query failed too many times, aborting')
442
443      # We use a thread pool with a thread for each query/process instead of
444      # just creating the processes due to guidance from the Python docs:
445      # https://docs.python.org/3/library/subprocess.html#subprocess.Popen.stderr
446      # We need to write to stdin to pass the query in, but using
447      # stdout/stderr/stdin directly is discouraged due to the potential for
448      # deadlocks. The suggested method (using .communicate()) blocks, so we
449      # need the thread pool to maintain parallelism.
450      pool = multiprocessing.pool.ThreadPool(len(queries))
451
452      def cleanup():
453        pool.terminate()
454        for p in processes:
455          try:
456            p.terminate()
457          except OSError:
458            # We can fail to terminate if the process is already finished, so
459            # ignore such failures.
460            pass
461        processes.clear()
462
463      args = [(cmd, q) for q in queries]
464      try:
465        return pool.map(run_cmd_in_thread, args)
466      except RateLimitError:
467        logging.warning('Query hit rate limit, retrying')
468        cleanup()
469        return run_cmd(cmd, tries + 1)
470      finally:
471        cleanup()
472      raise RuntimeError('Hit branch that should  be unreachable')
473
474    bq_cmd = GenerateBigQueryCommand(self._project,
475                                     parameters,
476                                     batch=self._use_batching)
477    stdouts = run_cmd(bq_cmd, 0)
478    combined_json = []
479    for result in [json.loads(s) for s in stdouts]:
480      for row in result:
481        combined_json.append(row)
482    return combined_json
483
484  def _StripPrefixFromTestId(self, test_id: str) -> str:
485    """Strips the prefix from a test ID, leaving only the test case name.
486
487    Args:
488      test_id: A string containing a full ResultDB test ID, e.g.
489          ninja://target/directory.suite.class.test_case
490
491    Returns:
492      A string containing the test cases name extracted from |test_id|.
493    """
494    raise NotImplementedError()
495
496  def _GetActiveBuilderQuery(self, builder_type: str,
497                             include_internal_builders: bool) -> str:
498    """Gets the SQL query for determining which builders actually produce data.
499
500    Args:
501      builder_type: A string containing the type of builders to query, either
502          "ci" or "try".
503      include_internal_builders: A boolean indicating whether internal builders
504          should be included in the data that the query will access.
505
506    Returns:
507      A string containing a SQL query that will get all the names of all
508      relevant builders that are active/producing data.
509    """
510    raise NotImplementedError()
511
512
513class BaseQueryGenerator(object):
514  """Abstract base class for query generators."""
515
516  def __init__(self, builder: data_types.BuilderEntry):
517    self._builder = builder
518
519  def SplitQuery(self) -> None:
520    """Splits the query into more clauses/queries."""
521    raise NotImplementedError('SplitQuery must be overridden in a child class')
522
523  def GetClauses(self) -> List[str]:
524    """Gets string representations of the test filters.
525
526    Returns:
527      A list of strings, each string being a valid SQL clause that applies a
528      portion of the test filter to a query.
529    """
530    raise NotImplementedError('GetClauses must be overridden in a child class')
531
532  def GetQueries(self) -> List[str]:
533    """Gets string representations of the queries to run.
534
535    Returns:
536      A list of strings, each string being a valid SQL query that queries a
537      portion of the tests of interest.
538    """
539    raise NotImplementedError('GetQueries must be overridden in a child class')
540
541
542# pylint: disable=abstract-method
543class FixedQueryGenerator(BaseQueryGenerator):
544  """Concrete test filter that cannot be split."""
545
546  def __init__(self, builder: data_types.BuilderEntry, test_filter: str):
547    """
548    Args:
549      test_filter: A string containing the test filter SQL clause to use.
550    """
551    super(FixedQueryGenerator, self).__init__(builder)
552    self._test_filter = test_filter
553
554  def SplitQuery(self) -> None:
555    raise QuerySplitError('Tried to split a query without any test IDs to use, '
556                          'use --large-query-mode')
557
558  def GetClauses(self) -> List[str]:
559    return [self._test_filter]
560# pylint: enable=abstract-method
561
562
563# pylint: disable=abstract-method
564class SplitQueryGenerator(BaseQueryGenerator):
565  """Concrete test filter that can be split to a desired size."""
566
567  def __init__(self, builder: data_types.BuilderEntry, test_ids: List[str],
568               target_num_samples: int):
569    """
570    Args:
571      test_ids: A list of strings containing the test IDs to use in the test
572          test filter.
573      target_num_samples: The target/max number of samples to get from each
574          query that uses clauses from this test filter.
575    """
576    super(SplitQueryGenerator, self).__init__(builder)
577    self._test_id_lists = []
578    self._target_num_samples = target_num_samples
579    self._clauses = []
580    self._PerformInitialSplit(test_ids)
581
582  def _PerformInitialSplit(self, test_ids: List[str]) -> None:
583    """Evenly splits |test_ids| into lists that are  ~|_target_num_samples| long
584
585    Only to be called from the constructor.
586
587    Args:
588      test_ids: A list of test IDs to split and assign to the _test_id_lists
589          member.
590    """
591    assert isinstance(test_ids[0], six.string_types)
592
593    num_lists = int(math.ceil(float(len(test_ids)) / self._target_num_samples))
594    list_size = int(math.ceil(float(len(test_ids)) / num_lists))
595
596    split_lists = []
597    start = 0
598    for _ in range(num_lists):
599      end = min(len(test_ids), start + list_size)
600      split_lists.append(test_ids[start:end])
601      start = end
602    self._test_id_lists = split_lists
603    self._GenerateClauses()
604
605  def _GenerateClauses(self) -> None:
606    test_filter_clauses = []
607    for id_list in self._test_id_lists:
608      clause = 'AND test_id IN UNNEST([%s])' % ', '.join(id_list)
609      test_filter_clauses.append(clause)
610    self._clauses = test_filter_clauses
611
612  def SplitQuery(self) -> None:
613    def _SplitListInHalf(l: list) -> Tuple[list, list]:
614      assert len(l) > 1
615      front = l[:len(l) // 2]
616      back = l[len(l) // 2:]
617      return front, back
618
619    tmp_test_id_lists = []
620    for til in self._test_id_lists:
621      if len(til) <= 1:
622        raise QuerySplitError(
623            'Cannot split query any further, try lowering --num-samples')
624      front, back = _SplitListInHalf(til)
625      tmp_test_id_lists.append(front)
626      tmp_test_id_lists.append(back)
627    self._test_id_lists = tmp_test_id_lists
628    self._GenerateClauses()
629
630  def GetClauses(self) -> List[str]:
631    return self._clauses
632# pylint: enable=abstract-method
633
634
635def GenerateBigQueryCommand(project: str,
636                            parameters: QueryParameters,
637                            batch: bool = True) -> List[str]:
638  """Generate a BigQuery commandline.
639
640  Does not contain the actual query, as that is passed in via stdin.
641
642  Args:
643    project: A string containing the billing project to use for BigQuery.
644    parameters: A dict specifying parameters to substitute in the query in
645        the format {type: {key: value}}. For example, the dict:
646        {'INT64': {'num_builds': 5}}
647        would result in --parameter=num_builds:INT64:5 being passed to BigQuery.
648    batch: Whether to run the query in batch mode or not. Batching adds some
649        random amount of overhead since it means the query has to wait for idle
650        resources, but also allows for much better parallelism.
651
652  Returns:
653    A list containing the BigQuery commandline, suitable to be passed to a
654    method from the subprocess module.
655  """
656  cmd = [
657      'bq',
658      'query',
659      '--max_rows=%d' % MAX_ROWS,
660      '--format=json',
661      '--project_id=%s' % project,
662      '--use_legacy_sql=false',
663  ]
664
665  if batch:
666    cmd.append('--batch')
667
668  for parameter_type, parameter_pairs in parameters.items():
669    for k, v in parameter_pairs.items():
670      cmd.append('--parameter=%s:%s:%s' % (k, parameter_type, v))
671  return cmd
672
673
674def _StripPrefixFromBuildId(build_id: str) -> str:
675  # Build IDs provided by ResultDB are prefixed with "build-"
676  split_id = build_id.split('-')
677  assert len(split_id) == 2
678  return split_id[-1]
679
680
681def _ConvertActualResultToExpectationFileFormat(actual_result: str) -> str:
682  # Web tests use ResultDB's ABORT value for both test timeouts and device
683  # failures, but Abort is not defined in typ. So, map it to timeout now.
684  if actual_result == 'ABORT':
685    actual_result = json_results.ResultType.Timeout
686  # The result reported to ResultDB is in the format PASS/FAIL, while the
687  # expected results in an expectation file are in the format Pass/Failure.
688  return expectations_parser.RESULT_TAGS[actual_result]
689
690
691class RateLimitError(Exception):
692  """Exception raised when BigQuery hits a rate limit error."""
693
694
695class MemoryLimitError(Exception):
696  """Exception raised when BigQuery hits its hard memory limit."""
697
698
699class QuerySplitError(Exception):
700  """Exception raised when a query cannot be split any further."""
701