• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2019, The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
15"""ATest execution info generator."""
16
17
18from __future__ import print_function
19
20import argparse
21import glob
22import json
23import logging
24import os
25import pathlib
26import shutil
27import sys
28import time
29from typing import Callable, List
30
31from atest import atest_enum
32from atest import atest_utils
33from atest import constants
34from atest import usb_speed_detect as usb
35from atest.atest_enum import ExitCode
36from atest.logstorage import log_uploader
37from atest.metrics import metrics
38from atest.metrics import metrics_utils
39
40_ARGS_KEY = 'args'
41_STATUS_PASSED_KEY = 'PASSED'
42_STATUS_FAILED_KEY = 'FAILED'
43_STATUS_IGNORED_KEY = 'IGNORED'
44_SUMMARY_KEY = 'summary'
45_TOTAL_SUMMARY_KEY = 'total_summary'
46_TEST_RUNNER_KEY = 'test_runner'
47_TEST_NAME_KEY = 'test_name'
48_TEST_TIME_KEY = 'test_time'
49_TEST_DETAILS_KEY = 'details'
50_TEST_RESULT_NAME = 'test_result'
51_TEST_RESULT_LINK = 'test_result_link'
52_EXIT_CODE_ATTR = 'EXIT_CODE'
53_MAIN_MODULE_KEY = '__main__'
54_UUID_LEN = 30
55_RESULT_LEN = 20
56_RESULT_URL_LEN = 35
57_COMMAND_LEN = 50
58_LOGCAT_FMT = '{}/log/invocation_*/{}*device_logcat_test*'
59_APK_CHANGE_DETECTOR_CLASSNAME = 'ApkChangeDetector'
60_APP_INSTALL_SKIP_KEY = 'Skipping the installation of'
61_APP_INSTALL_KEY = 'Installing apk'
62_HOST_LOG_PREFIX = 'host_log_'
63
64_SUMMARY_MAP_TEMPLATE = {
65    _STATUS_PASSED_KEY: 0,
66    _STATUS_FAILED_KEY: 0,
67    _STATUS_IGNORED_KEY: 0,
68}
69
70PREPARE_END_TIME = None
71
72
73def preparation_time(start_time):
74  """Return the preparation time.
75
76  Args:
77      start_time: The time.
78
79  Returns:
80      The preparation time if PREPARE_END_TIME is set, None otherwise.
81  """
82  return PREPARE_END_TIME - start_time if PREPARE_END_TIME else None
83
84
85def symlink_latest_result(test_result_dir):
86  """Make the symbolic link to latest result.
87
88  Args:
89      test_result_dir: A string of the dir path.
90  """
91  symlink = os.path.join(constants.ATEST_RESULT_ROOT, 'LATEST')
92  if os.path.exists(symlink) or os.path.islink(symlink):
93    os.remove(symlink)
94  os.symlink(test_result_dir, symlink)
95
96
97def print_test_result(root, history_arg):
98  """Make a list of latest n test result.
99
100  Args:
101      root: A string of the test result root path.
102      history_arg: A string of an integer or uuid. If it's an integer string,
103        the number of lines of test result will be given; else it will be
104        treated a uuid and print test result accordingly in detail.
105  """
106  if not history_arg.isdigit():
107    path = os.path.join(constants.ATEST_RESULT_ROOT, history_arg, 'test_result')
108    print_test_result_by_path(path)
109    return
110  target = '%s/20*_*_*' % root
111  paths = glob.glob(target)
112  paths.sort(reverse=True)
113  if has_url_results():
114    print(
115        '{:-^{uuid_len}} {:-^{result_len}} {:-^{result_url_len}}'
116        ' {:-^{command_len}}'.format(
117            'uuid',
118            'result',
119            'result_url',
120            'command',
121            uuid_len=_UUID_LEN,
122            result_len=_RESULT_LEN,
123            result_url_len=_RESULT_URL_LEN,
124            command_len=_COMMAND_LEN,
125        )
126    )
127  else:
128    print(
129        '{:-^{uuid_len}} {:-^{result_len}} {:-^{command_len}}'.format(
130            'uuid',
131            'result',
132            'command',
133            uuid_len=_UUID_LEN,
134            result_len=_RESULT_LEN,
135            command_len=_COMMAND_LEN,
136        )
137    )
138  for path in paths[0 : int(history_arg) + 1]:
139    result_path = os.path.join(path, 'test_result')
140    result = atest_utils.load_json_safely(result_path)
141    total_summary = result.get(_TOTAL_SUMMARY_KEY, {})
142    summary_str = ', '.join(
143        [k[:1] + ':' + str(v) for k, v in total_summary.items()]
144    )
145    test_result_url = result.get(_TEST_RESULT_LINK, '')
146    if has_url_results():
147      print(
148          '{:<{uuid_len}} {:<{result_len}} '
149          '{:<{result_url_len}} atest {:<{command_len}}'.format(
150              os.path.basename(path),
151              summary_str,
152              test_result_url,
153              result.get(_ARGS_KEY, ''),
154              uuid_len=_UUID_LEN,
155              result_len=_RESULT_LEN,
156              result_url_len=_RESULT_URL_LEN,
157              command_len=_COMMAND_LEN,
158          )
159      )
160    else:
161      print(
162          '{:<{uuid_len}} {:<{result_len}} atest {:<{command_len}}'.format(
163              os.path.basename(path),
164              summary_str,
165              result.get(_ARGS_KEY, ''),
166              uuid_len=_UUID_LEN,
167              result_len=_RESULT_LEN,
168              command_len=_COMMAND_LEN,
169          )
170      )
171
172
173def print_test_result_by_path(path):
174  """Print latest test result.
175
176  Args:
177      path: A string of test result path.
178  """
179  result = atest_utils.load_json_safely(path)
180  if not result:
181    return
182  print('\natest {}'.format(result.get(_ARGS_KEY, '')))
183  test_result_url = result.get(_TEST_RESULT_LINK, '')
184  if test_result_url:
185    print('\nTest Result Link: {}'.format(test_result_url))
186  print('\nTotal Summary:\n{}'.format(atest_utils.delimiter('-')))
187  total_summary = result.get(_TOTAL_SUMMARY_KEY, {})
188  print(', '.join([(k + ':' + str(v)) for k, v in total_summary.items()]))
189  fail_num = total_summary.get(_STATUS_FAILED_KEY)
190  if fail_num > 0:
191    message = '%d test failed' % fail_num
192    print(f'\n{atest_utils.mark_red(message)}\n{"-" * len(message)}')
193    test_runner = result.get(_TEST_RUNNER_KEY, {})
194    for runner_name in test_runner.keys():
195      test_dict = test_runner.get(runner_name, {})
196      for test_name in test_dict:
197        test_details = test_dict.get(test_name, {})
198        for fail in test_details.get(_STATUS_FAILED_KEY):
199          print(atest_utils.mark_red(f'{fail.get(_TEST_NAME_KEY)}'))
200          failure_files = glob.glob(
201              _LOGCAT_FMT.format(
202                  os.path.dirname(path), fail.get(_TEST_NAME_KEY)
203              )
204          )
205          if failure_files:
206            print(
207                '{} {}'.format(
208                    atest_utils.mark_cyan('LOGCAT-ON-FAILURES:'),
209                    failure_files[0],
210                )
211            )
212          print(
213              '{} {}'.format(
214                  atest_utils.mark_cyan('STACKTRACE:\n'),
215                  fail.get(_TEST_DETAILS_KEY),
216              )
217          )
218
219
220def has_non_test_options(args: argparse.ArgumentParser):
221  """check whether non-test option in the args.
222
223  Args:
224      args: An argparse.ArgumentParser class instance holding parsed args.
225
226  Returns:
227      True, if args has at least one non-test option.
228      False, otherwise.
229  """
230  return (
231      args.collect_tests_only
232      or args.dry_run
233      or args.history
234      or args.version
235      or args.latest_result
236      or args.history
237  )
238
239
240def has_url_results():
241  """Get if contains url info."""
242  for root, _, files in os.walk(constants.ATEST_RESULT_ROOT):
243    for file in files:
244      if file != 'test_result':
245        continue
246      json_file = os.path.join(root, 'test_result')
247      result = atest_utils.load_json_safely(json_file)
248      url_link = result.get(_TEST_RESULT_LINK, '')
249      if url_link:
250        return True
251  return False
252
253
254def parse_test_log_and_send_app_installation_stats_metrics(
255    log_path: pathlib.Path,
256) -> None:
257  """Parse log and send app installation statistic metrics."""
258  if not log_path:
259    return
260
261  # Attempt to find all host logs
262  absolute_host_log_paths = glob.glob(
263      str(log_path / f'**/{_HOST_LOG_PREFIX}*'), recursive=True
264  )
265
266  if not absolute_host_log_paths:
267    return
268
269  skipped_count = 0
270  not_skipped_count = 0
271  try:
272    for host_log_path in absolute_host_log_paths:
273      if not os.path.isfile(host_log_path):
274        continue
275
276      # Open the host log and parse app installation skip metric
277      with open(f'{host_log_path}', 'r') as host_log_file:
278        for line in host_log_file:
279          if (
280              _APP_INSTALL_SKIP_KEY in line
281              and _APK_CHANGE_DETECTOR_CLASSNAME in line
282          ):
283            skipped_count += 1
284          elif _APP_INSTALL_KEY in line:
285            # TODO(b/394384055): Check classname for unskipped APKs as well.
286            not_skipped_count += 1
287    logging.debug('%d APK(s) skipped installation.', skipped_count)
288    logging.debug('%d APK(s) did not skip installation.', not_skipped_count)
289    metrics.LocalDetectEvent(
290        detect_type=atest_enum.DetectType.APP_INSTALLATION_SKIPPED_COUNT,
291        result=skipped_count,
292    )
293    metrics.LocalDetectEvent(
294        detect_type=atest_enum.DetectType.APP_INSTALLATION_NOT_SKIPPED_COUNT,
295        result=not_skipped_count,
296    )
297  except Exception as e:
298    logging.debug('An error occurred when accessing certain host logs: %s', e)
299
300
301class AtestExecutionInfo:
302  """Class that stores the whole test progress information in JSON format.
303
304  ----
305  For example, running command
306      atest hello_world_test HelloWorldTest
307
308  will result in storing the execution detail in JSON:
309  {
310    "args": "hello_world_test HelloWorldTest",
311    "test_runner": {
312        "AtestTradefedTestRunner": {
313            "hello_world_test": {
314                "FAILED": [
315                    {"test_time": "(5ms)",
316                     "details": "Hello, Wor...",
317                     "test_name": "HelloWorldTest#PrintHelloWorld"}
318                    ],
319                "summary": {"FAILED": 1, "PASSED": 0, "IGNORED": 0}
320            },
321            "HelloWorldTests": {
322                "PASSED": [
323                    {"test_time": "(27ms)",
324                     "details": null,
325                     "test_name": "...HelloWorldTest#testHalloWelt"},
326                    {"test_time": "(1ms)",
327                     "details": null,
328                     "test_name": "....HelloWorldTest#testHelloWorld"}
329                    ],
330                "summary": {"FAILED": 0, "PASSED": 2, "IGNORED": 0}
331            }
332        }
333    },
334    "total_summary": {"FAILED": 1, "PASSED": 2, "IGNORED": 0}
335  }
336  """
337
338  result_reporters = []
339
340  def __init__(
341      self,
342      args: List[str],
343      work_dir: str,
344      args_ns: argparse.ArgumentParser,
345      get_exit_code_func: Callable[[], int] = None,
346      start_time: float = None,
347      repo_out_dir: pathlib.Path = None,
348  ):
349    """Initialise an AtestExecutionInfo instance.
350
351    Args:
352        args: Command line parameters.
353        work_dir: The directory for saving information.
354        args_ns: An argparse.ArgumentParser class instance holding parsed args.
355        get_exit_code_func: A callable that returns the exit_code value.
356        start_time: The execution start time. Can be None.
357        repo_out_dir: The repo output directory. Can be None.
358
359    Returns:
360           A json format string.
361    """
362    self.args = args
363    self.work_dir = work_dir
364    self.result_file_obj = None
365    self.args_ns = args_ns
366    self.get_exit_code_func = get_exit_code_func
367    self.test_result = os.path.join(self.work_dir, _TEST_RESULT_NAME)
368    self._proc_usb_speed = None
369    logging.debug(
370        'A %s object is created with args %s, work_dir %s',
371        __class__,
372        args,
373        work_dir,
374    )
375    self._start_time = start_time if start_time is not None else time.time()
376    self._repo_out_dir = (
377        repo_out_dir
378        if repo_out_dir is not None
379        else atest_utils.get_build_out_dir()
380    )
381
382  def __enter__(self):
383    """Create and return information file object."""
384    try:
385      self.result_file_obj = open(self.test_result, 'w')
386    except IOError:
387      atest_utils.print_and_log_error('Cannot open file %s', self.test_result)
388
389    self._proc_usb_speed = atest_utils.run_multi_proc(
390        func=self._send_usb_metrics_and_warning
391    )
392
393    return self.result_file_obj
394
395  def __exit__(self, exit_type, value, traceback):
396    """Write execution information and close information file."""
397
398    if self._proc_usb_speed:
399      # Usb speed detection is not an obligatory function of atest,
400      # so it can be skipped if the process hasn't finished by the time atest
401      # is ready to exit.
402      if self._proc_usb_speed.is_alive():
403        self._proc_usb_speed.terminate()
404
405    log_path = pathlib.Path(self.work_dir)
406
407    build_log_path = log_path / 'build_logs'
408    build_log_path.mkdir()
409    AtestExecutionInfo._copy_build_artifacts_to_log_dir(
410        self._start_time,
411        time.time(),
412        self._repo_out_dir,
413        build_log_path,
414        'build.trace',
415    )
416    AtestExecutionInfo._copy_build_artifacts_to_log_dir(
417        self._start_time,
418        time.time(),
419        self._repo_out_dir,
420        build_log_path,
421        'verbose.log',
422    )
423
424    html_path = None
425
426    if self.result_file_obj and not has_non_test_options(self.args_ns):
427      self.result_file_obj.write(
428          AtestExecutionInfo._generate_execution_detail(self.args)
429      )
430      self.result_file_obj.close()
431      atest_utils.prompt_suggestions(self.test_result)
432      html_path = atest_utils.generate_result_html(self.test_result)
433      symlink_latest_result(self.work_dir)
434
435    if self.get_exit_code_func:
436      main_exit_code = self.get_exit_code_func()
437    else:
438      main_module = sys.modules.get(_MAIN_MODULE_KEY)
439      main_exit_code = (
440          value.code
441          if isinstance(value, SystemExit)
442          else (getattr(main_module, _EXIT_CODE_ATTR, ExitCode.ERROR))
443      )
444
445    print()
446    if log_path:
447      print(f'Test logs: {log_path / "log"}')
448      parse_test_log_and_send_app_installation_stats_metrics(log_path)
449    log_link = html_path if html_path else log_path
450    if log_link:
451      print(atest_utils.mark_magenta(f'Log file list: file://{log_link}'))
452    bug_report_url = AtestExecutionInfo._create_bug_report_url()
453    if bug_report_url:
454      print(atest_utils.mark_magenta(f'Report an issue: {bug_report_url}'))
455    print()
456
457    # Do not send stacktrace with send_exit_event when exit code is not
458    # ERROR.
459    if main_exit_code != ExitCode.ERROR:
460      logging.debug('send_exit_event:%s', main_exit_code)
461      metrics_utils.send_exit_event(main_exit_code)
462    else:
463      logging.debug('handle_exc_and_send_exit_event:%s', main_exit_code)
464      metrics_utils.handle_exc_and_send_exit_event(main_exit_code)
465
466    if log_uploader.is_uploading_logs():
467      log_uploader.upload_logs_detached(log_path)
468
469  def _send_usb_metrics_and_warning(self):
470    # Read the USB speed and send usb metrics.
471    device_ids = usb.get_adb_device_identifiers()
472    if not device_ids:
473      return
474
475    usb_speed_dir_name = usb.get_udc_driver_usb_device_dir_name()
476    if not usb_speed_dir_name:
477      return
478
479    usb_negotiated_speed = usb.get_udc_driver_usb_device_attribute_speed_value(
480        usb_speed_dir_name, usb.UsbAttributeName.NEGOTIATED_SPEED
481    )
482    usb_max_speed = usb.get_udc_driver_usb_device_attribute_speed_value(
483        usb_speed_dir_name, usb.UsbAttributeName.MAXIMUM_SPEED
484    )
485    usb.verify_and_print_usb_speed_warning(
486        device_ids, usb_negotiated_speed, usb_max_speed
487    )
488
489    metrics.LocalDetectEvent(
490        detect_type=atest_enum.DetectType.USB_NEGOTIATED_SPEED,
491        result=usb_negotiated_speed,
492    )
493    metrics.LocalDetectEvent(
494        detect_type=atest_enum.DetectType.USB_MAX_SPEED,
495        result=usb_max_speed,
496    )
497
498  @staticmethod
499  def _create_bug_report_url() -> str:
500    if not metrics.is_internal_user():
501      return ''
502    if not log_uploader.is_uploading_logs():
503      return 'http://go/new-atest-issue'
504    return f'http://go/from-atest-runid/{metrics.get_run_id()}'
505
506  @staticmethod
507  def _copy_build_artifacts_to_log_dir(
508      start_time: float,
509      end_time: float,
510      repo_out_path: pathlib.Path,
511      log_path: pathlib.Path,
512      file_name_prefix: str,
513  ):
514    """Copy build trace files to log directory.
515
516    Params:
517      start_time: The start time of the build.
518      end_time: The end time of the build.
519      repo_out_path: The path to the repo out directory.
520      log_path: The path to the log directory.
521      file_name_prefix: The prefix of the file name.
522    """
523    for file in repo_out_path.iterdir():
524      if (
525          file.is_file()
526          and file.name.startswith(file_name_prefix)
527          and start_time <= file.stat().st_mtime <= end_time
528      ):
529        shutil.copy(file, log_path / file.name)
530
531  @staticmethod
532  def _generate_execution_detail(args):
533    """Generate execution detail.
534
535    Args:
536        args: Command line parameters that you want to save.
537
538    Returns:
539        A json format string.
540    """
541    info_dict = {_ARGS_KEY: ' '.join(args)}
542    try:
543      AtestExecutionInfo._arrange_test_result(
544          info_dict, AtestExecutionInfo.result_reporters
545      )
546      return json.dumps(info_dict)
547    except ValueError as err:
548      atest_utils.print_and_log_warning(
549          'Parsing test result failed due to : %s', err
550      )
551    return {}
552
553  @staticmethod
554  def _arrange_test_result(info_dict, reporters):
555    """Append test result information in given dict.
556
557    Arrange test information to below
558    "test_runner": {
559        "test runner name": {
560            "test name": {
561                "FAILED": [
562                    {"test time": "",
563                     "details": "",
564                     "test name": ""}
565                ],
566            "summary": {"FAILED": 0, "PASSED": 0, "IGNORED": 0}
567            },
568        },
569    "total_summary": {"FAILED": 0, "PASSED": 0, "IGNORED": 0}
570
571    Args:
572        info_dict: A dict you want to add result information in.
573        reporters: A list of result_reporter.
574
575    Returns:
576        A dict contains test result information data.
577    """
578    info_dict[_TEST_RUNNER_KEY] = {}
579    for reporter in reporters:
580      if reporter.test_result_link:
581        info_dict[_TEST_RESULT_LINK] = reporter.test_result_link
582      for test in reporter.all_test_results:
583        runner = info_dict[_TEST_RUNNER_KEY].setdefault(test.runner_name, {})
584        group = runner.setdefault(test.group_name, {})
585        result_dict = {
586            _TEST_NAME_KEY: test.test_name,
587            _TEST_TIME_KEY: test.test_time,
588            _TEST_DETAILS_KEY: test.details,
589        }
590        group.setdefault(test.status, []).append(result_dict)
591
592    total_test_group_summary = _SUMMARY_MAP_TEMPLATE.copy()
593    for runner in info_dict[_TEST_RUNNER_KEY]:
594      for group in info_dict[_TEST_RUNNER_KEY][runner]:
595        group_summary = _SUMMARY_MAP_TEMPLATE.copy()
596        for status in info_dict[_TEST_RUNNER_KEY][runner][group]:
597          count = len(info_dict[_TEST_RUNNER_KEY][runner][group][status])
598          if status in _SUMMARY_MAP_TEMPLATE:
599            group_summary[status] = count
600            total_test_group_summary[status] += count
601        info_dict[_TEST_RUNNER_KEY][runner][group][_SUMMARY_KEY] = group_summary
602    info_dict[_TOTAL_SUMMARY_KEY] = total_test_group_summary
603    return info_dict
604