• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python3
2# Copyright 2019, The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#     http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16"""Atest tool functions."""
17
18# pylint: disable=line-too-long
19
20from __future__ import print_function
21
22import json
23import logging
24import os
25import pickle
26import re
27import shutil
28import subprocess
29import sys
30import tempfile
31import time
32
33from pathlib import Path
34
35from atest import atest_utils as au
36from atest import constants
37
38from atest.atest_enum import DetectType, ExitCode
39from atest.metrics import metrics, metrics_utils
40
41UPDATEDB = 'updatedb'
42LOCATE = 'locate'
43ACLOUD_DURATION = 'duration'
44SEARCH_TOP = os.getenv(constants.ANDROID_BUILD_TOP, '')
45# When adding new index, remember to append constants to below tuple.
46INDEXES = (constants.CC_CLASS_INDEX,
47           constants.CLASS_INDEX,
48           constants.LOCATE_CACHE,
49           constants.PACKAGE_INDEX,
50           constants.QCLASS_INDEX)
51
52# The list was generated by command:
53# find `gettop` -type d -wholename `gettop`/out -prune  -o -type d -name '.*'
54# -print | awk -F/ '{{print $NF}}'| sort -u
55PRUNENAMES = ['.abc', '.appveyor', '.azure-pipelines',
56              '.bazelci', '.build-id', '.buildkite', '.buildscript',
57              '.cargo', '.ci', '.circleci', '.clusterfuzzlite', '.conan',
58              '.devcontainer',
59              '.dwz',
60              '.externalToolBuilders',
61              '.git', '.githooks', '.github', '.gitlab', '.gitlab-ci', '.google',
62              '.hidden',
63              '.idea', '.intermediates',
64              '.jenkins',
65              '.kokoro',
66              '.libs_cffi_backend',
67              '.more', '.mvn',
68              '.prebuilt_info', '.private', '__pycache__',
69              '.repo',
70              '.settings', '.static', '.svn',
71              '.test',
72              '.travis',
73              '.travis_scripts',
74              '.tx',
75              '.vscode']
76PRUNEPATHS = ['prebuilts']
77ACLOUD_REPORT_FILE_RE = re.compile(r'.*--report[_-]file(=|\s+)(?P<report_file>[\w/.]+)')
78
79def _delete_indexes():
80    """Delete all available index files."""
81    for index in INDEXES:
82        if os.path.isfile(index):
83            os.remove(index)
84
85
86def get_report_file(results_dir, acloud_args):
87    """Get the acloud report file path.
88
89    This method can parse either string:
90        --acloud-create '--report-file=/tmp/acloud.json'
91        --acloud-create '--report-file /tmp/acloud.json'
92    and return '/tmp/acloud.json' as the report file. Otherwise returning the
93    default path(/tmp/atest_result/<hashed_dir>/acloud_status.json).
94
95    Args:
96        results_dir: string of directory to store atest results.
97        acloud_args: string of acloud create.
98
99    Returns:
100        A string path of acloud report file.
101    """
102    match = ACLOUD_REPORT_FILE_RE.match(acloud_args)
103    if match:
104        return match.group('report_file')
105    return os.path.join(results_dir, 'acloud_status.json')
106
107
108def has_command(cmd):
109    """Detect if the command is available in PATH.
110
111    Args:
112        cmd: A string of the tested command.
113
114    Returns:
115        True if found, False otherwise.
116    """
117    return bool(shutil.which(cmd))
118
119
120def run_updatedb(search_root=SEARCH_TOP, output_cache=constants.LOCATE_CACHE,
121                 **kwargs):
122    """Run updatedb and generate cache in $ANDROID_HOST_OUT/indexes/plocate.db
123
124    Args:
125        search_root: The path of the search root(-U).
126        output_cache: The filename of the updatedb cache(-o).
127        kwargs: (optional)
128            prunepaths: A list of paths unwanted to be searched(-e).
129            prunenames: A list of dirname that won't be cached(-n).
130    """
131    prunenames = kwargs.pop('prunenames', ' '.join(PRUNENAMES))
132    _prunepaths = [os.path.join(SEARCH_TOP, p) for p in PRUNEPATHS]
133    _prunepaths.append(str(au.get_build_out_dir()))
134    prunepaths = kwargs.pop('prunepaths', ' '.join(_prunepaths))
135    if kwargs:
136        raise TypeError('Unexpected **kwargs: %r' % kwargs)
137    updatedb_cmd = [UPDATEDB, '-l0']
138    updatedb_cmd.append('-U%s' % search_root)
139    updatedb_cmd.append('-n%s' % prunenames)
140    updatedb_cmd.append('-o%s' % output_cache)
141    # (b/206866627) /etc/updatedb.conf excludes /mnt from scanning on Linux.
142    # Use --prunepaths to override the default configuration.
143    updatedb_cmd.append('--prunepaths')
144    updatedb_cmd.append(prunepaths)
145    # Support scanning bind mounts as well.
146    updatedb_cmd.extend(['--prune-bind-mounts', 'no'])
147
148    logging.debug('Running updatedb... ')
149    try:
150        full_env_vars = os.environ.copy()
151        logging.debug('Executing: %s', updatedb_cmd)
152        if not os.path.isdir(constants.INDEX_DIR):
153            os.makedirs(constants.INDEX_DIR)
154        subprocess.run(updatedb_cmd, env=full_env_vars, check=True)
155    except (KeyboardInterrupt, SystemExit):
156        logging.error('Process interrupted or failure.')
157    # Delete indexes when plocate.db is locked() or other CalledProcessError.
158    # (b/141588997)
159    except subprocess.CalledProcessError as err:
160        logging.error('Executing %s error.', UPDATEDB)
161        metrics_utils.handle_exc_and_send_exit_event(
162            constants.PLOCATEDB_LOCKED)
163        if err.output:
164            logging.error(err.output)
165        os.remove(output_cache)
166
167
168def _dump_index(dump_file, output, output_re, key, value):
169    """Dump indexed data with pickle.
170
171    Args:
172        dump_file: A string of absolute path of the index file.
173        output: A string generated by locate and grep.
174        output_re: An regex which is used for grouping patterns.
175        key: A string for dictionary key, e.g. classname, package,
176             cc_class, etc.
177        value: A set of path.
178
179    The data structure will be like:
180    {
181      'Foo': {'/path/to/Foo.java', '/path2/to/Foo.kt'},
182      'Boo': {'/path3/to/Boo.java'}
183    }
184    """
185    temp_file = tempfile.NamedTemporaryFile()
186    _dict = {}
187    with open(temp_file.name, 'wb') as cache_file:
188        if isinstance(output, bytes):
189            output = output.decode()
190        for entry in output.splitlines():
191            match = output_re.match(entry)
192            if match:
193                _dict.setdefault(match.group(key), set()).add(
194                    match.group(value))
195        try:
196            pickle.dump(_dict, cache_file, protocol=2)
197        except IOError:
198            logging.error('Failed in dumping %s', dump_file)
199    shutil.copy(temp_file.name, dump_file)
200    temp_file.close()
201
202
203# pylint: disable=anomalous-backslash-in-string
204def get_cc_result(locatedb=constants.LOCATE_CACHE, **kwargs):
205    """Search all testable cc/cpp and grep TEST(), TEST_F() or TEST_P().
206
207    After searching cc/cpp files, index corresponding data types in parallel.
208
209    Args:
210        locatedb: A string of the absolute path of the plocate.db
211        kwargs: (optional)
212            cc_class_index: A path string of the CC class index.
213    """
214    find_cc_cmd = (
215        f"{LOCATE} -id{locatedb} --regex '/*.test.*\.(cc|cpp)$'"
216        f"| xargs egrep -sH '{constants.CC_GREP_RE}' 2>/dev/null || true")
217    logging.debug('Probing CC classes:\n %s', find_cc_cmd)
218    result = subprocess.check_output(find_cc_cmd, shell=True)
219
220    cc_class_index = kwargs.pop('cc_class_index', constants.CC_CLASS_INDEX)
221    au.run_multi_proc(func=_index_cc_classes, args=[result, cc_class_index])
222
223
224# pylint: disable=anomalous-backslash-in-string
225def get_java_result(locatedb=constants.LOCATE_CACHE, **kwargs):
226    """Search all testable java/kt and grep package.
227
228    After searching java/kt files, index corresponding data types in parallel.
229
230    Args:
231        locatedb: A string of the absolute path of the plocate.db
232        kwargs: (optional)
233            class_index: A path string of the Java class index.
234            qclass_index: A path string of the qualified class index.
235            package_index: A path string of the package index.
236    """
237    package_grep_re = r'^\s*package\s+[a-z][[:alnum:]]+[^{]'
238    find_java_cmd = (
239        f"{LOCATE} -id{locatedb} --regex '/*.test.*\.(java|kt)$' "
240        # (b/204398677) suppress stderr when indexing target terminated.
241        f"| xargs egrep -sH '{package_grep_re}' 2>/dev/null|| true")
242    logging.debug('Probing Java classes:\n %s', find_java_cmd)
243    result = subprocess.check_output(find_java_cmd, shell=True)
244
245    class_index = kwargs.pop('class_index', constants.CLASS_INDEX)
246    qclass_index = kwargs.pop('qclass_index', constants.QCLASS_INDEX)
247    package_index = kwargs.pop('package_index', constants.PACKAGE_INDEX)
248    au.run_multi_proc(func=_index_java_classes, args=[result, class_index])
249    au.run_multi_proc(func=_index_qualified_classes, args=[result, qclass_index])
250    au.run_multi_proc(func=_index_packages, args=[result, package_index])
251
252
253def _index_cc_classes(output, index):
254    """Index CC classes.
255
256    The data structure is like:
257    {
258      'FooTestCase': {'/path1/to/the/FooTestCase.cpp',
259                      '/path2/to/the/FooTestCase.cc'}
260    }
261
262    Args:
263        output: A string object generated by get_cc_result().
264        index: A string path of the index file.
265    """
266    logging.debug('indexing CC classes.')
267    _dump_index(dump_file=index, output=output,
268                output_re=constants.CC_OUTPUT_RE,
269                key='test_name', value='file_path')
270
271
272def _index_java_classes(output, index):
273    """Index Java classes.
274    The data structure is like:
275    {
276        'FooTestCase': {'/path1/to/the/FooTestCase.java',
277                        '/path2/to/the/FooTestCase.kt'}
278    }
279
280    Args:
281        output: A string object generated by get_java_result().
282        index: A string path of the index file.
283    """
284    logging.debug('indexing Java classes.')
285    _dump_index(dump_file=index, output=output,
286                output_re=constants.CLASS_OUTPUT_RE,
287                key='class', value='java_path')
288
289
290def _index_packages(output, index):
291    """Index Java packages.
292    The data structure is like:
293    {
294        'a.b.c.d': {'/path1/to/a/b/c/d/',
295                    '/path2/to/a/b/c/d/'
296    }
297
298    Args:
299        output: A string object generated by get_java_result().
300        index: A string path of the index file.
301    """
302    logging.debug('indexing packages.')
303    _dump_index(dump_file=index,
304                output=output, output_re=constants.PACKAGE_OUTPUT_RE,
305                key='package', value='java_dir')
306
307
308def _index_qualified_classes(output, index):
309    """Index Fully Qualified Java Classes(FQCN).
310    The data structure is like:
311    {
312        'a.b.c.d.FooTestCase': {'/path1/to/a/b/c/d/FooTestCase.java',
313                                '/path2/to/a/b/c/d/FooTestCase.kt'}
314    }
315
316    Args:
317        output: A string object generated by get_java_result().
318        index: A string path of the index file.
319    """
320    logging.debug('indexing qualified classes.')
321    temp_file = tempfile.NamedTemporaryFile()
322    _dict = {}
323    with open(temp_file.name, 'wb') as cache_file:
324        if isinstance(output, bytes):
325            output = output.decode()
326        for entry in output.split('\n'):
327            match = constants.QCLASS_OUTPUT_RE.match(entry)
328            if match:
329                fqcn = match.group('package') + '.' + match.group('class')
330                _dict.setdefault(fqcn, set()).add(match.group('java_path'))
331        try:
332            pickle.dump(_dict, cache_file, protocol=2)
333        except (KeyboardInterrupt, SystemExit):
334            logging.error('Process interrupted or failure.')
335        except IOError:
336            logging.error('Failed in dumping %s', index)
337    shutil.copy(temp_file.name, index)
338    temp_file.close()
339
340
341def index_targets(output_cache=constants.LOCATE_CACHE):
342    """The entrypoint of indexing targets.
343
344    Utilise plocate database to index reference types of CLASS, CC_CLASS,
345    PACKAGE and QUALIFIED_CLASS.
346
347    (b/206886222) The checksum and file size of plocate.db may differ even the
348    src is not changed at all; therefore, it will skip indexing when both
349    conditions are fulfilled:
350      - not undergo `repo sync` before running atest.
351      - file numbers recorded in current and previous plocate.db are the same.
352
353    Args:
354        output_cache: A file path of the updatedb cache
355                      (e.g. /path/to/plocate.db).
356    """
357    unavailable_cmds = [
358        cmd for cmd in [UPDATEDB, LOCATE] if not has_command(cmd)]
359    if unavailable_cmds:
360        logging.debug('command %s is unavailable; skip indexing...',
361                      ' '.join(unavailable_cmds))
362        return
363
364    # Get the amount of indexed files.
365    get_num_cmd = f'{LOCATE} -d{output_cache} --count /'
366    ret, pre_number = subprocess.getstatusoutput(get_num_cmd)
367    if ret != 0:
368        logging.debug('Failed to run %s', get_num_cmd)
369        pre_number = sys.maxsize
370
371    run_updatedb(SEARCH_TOP, output_cache)
372    checksum_file = os.path.join(constants.INDEX_DIR, 'repo_sync.md5')
373    repo_syncd = not au.check_md5(checksum_file, missing_ok=False)
374    if repo_syncd:
375        repo_file = Path(SEARCH_TOP).joinpath(
376            '.repo/.repo_fetchtimes.json')
377        au.run_multi_proc(
378            func=au.save_md5,
379            args=[[repo_file], checksum_file])
380    if not repo_syncd and pre_number == subprocess.getoutput(get_num_cmd):
381        logging.debug('%s remains the same. Ignore indexing', output_cache)
382        return
383    logging.debug('Indexing targets... ')
384    au.run_multi_proc(func=get_java_result, args=[output_cache])
385    au.run_multi_proc(func=get_cc_result, args=[output_cache])
386
387
388def acloud_create(report_file, args, no_metrics_notice=True):
389    """Method which runs acloud create with specified args in background.
390
391    Args:
392        report_file: A path string of acloud report file.
393        args: A string of arguments.
394        no_metrics_notice: Boolean whether sending data to metrics or not.
395    """
396    notice = constants.NO_METRICS_ARG if no_metrics_notice else ""
397    match = ACLOUD_REPORT_FILE_RE.match(args)
398    report_file_arg = f'--report-file={report_file}' if not match else ""
399
400    # (b/161759557) Assume yes for acloud create to streamline atest flow.
401    acloud_cmd = ('acloud create -y {ACLOUD_ARGS} '
402                  '{REPORT_FILE_ARG} '
403                  '{METRICS_NOTICE} '
404                  ).format(ACLOUD_ARGS=args,
405                           REPORT_FILE_ARG=report_file_arg,
406                           METRICS_NOTICE=notice)
407    au.colorful_print("\nCreating AVD via acloud...", constants.CYAN)
408    logging.debug('Executing: %s', acloud_cmd)
409    start = time.time()
410    proc = subprocess.Popen(acloud_cmd, shell=True)
411    proc.communicate()
412    acloud_duration = time.time() - start
413    logging.info('"acloud create" process has completed.')
414    # Insert acloud create duration into the report file.
415    result = au.load_json_safely(report_file)
416    if result:
417        result[ACLOUD_DURATION] = acloud_duration
418        try:
419            with open(report_file, 'w+') as _wfile:
420                _wfile.write(json.dumps(result))
421        except OSError as e:
422            logging.error("Failed dumping duration to the report file: %s",
423                          str(e))
424
425
426def acloud_create_validator(results_dir, args):
427    """Check lunch'd target before running 'acloud create'.
428
429    Args:
430        results_dir: A string of the results directory.
431        args: An argparse.Namespace object.
432
433    Returns:
434        If the target is valid:
435            A tuple of (multiprocessing.Process,
436                        report_file path)
437        else:
438            A tuple of (None, None)
439    """
440    target = os.getenv('TARGET_PRODUCT')
441    if not re.match(r'^(aosp_|)cf_.*', target):
442        au.colorful_print(
443            f'{target} is not in cuttlefish family; will not create any AVD.',
444            constants.RED)
445        return None, None
446    if args.start_avd:
447        args.acloud_create = []
448    acloud_args = ' '.join(args.acloud_create)
449    report_file = get_report_file(results_dir, acloud_args)
450    acloud_proc = au.run_multi_proc(
451        func=acloud_create,
452        args=[report_file, acloud_args, args.no_metrics])
453    return acloud_proc, report_file
454
455
456def probe_acloud_status(report_file, find_build_duration):
457    """Method which probes the 'acloud create' result status.
458
459    If the report file exists and the status is 'SUCCESS', then the creation is
460    successful.
461
462    Args:
463        report_file: A path string of acloud report file.
464        find_build_duration: A float of seconds.
465
466    Returns:
467        0: success.
468        8: acloud creation failure.
469        9: invalid acloud create arguments.
470    """
471    # 1. Created but the status is not 'SUCCESS'
472    if Path(report_file).exists():
473        if not au.load_json_safely(report_file):
474            return ExitCode.AVD_CREATE_FAILURE
475        with open(report_file, 'r') as rfile:
476            result = json.load(rfile)
477
478        if result.get('status') == 'SUCCESS':
479            logging.info('acloud create successfully!')
480            # Always fetch the adb of the first created AVD.
481            adb_port = result.get('data').get('devices')[0].get('adb_port')
482            is_remote_instance = result.get('command') == 'create_cf'
483            adb_ip = '127.0.0.1' if is_remote_instance else '0.0.0.0'
484            os.environ[constants.ANDROID_SERIAL] = f'{adb_ip}:{adb_port}'
485
486            acloud_duration = get_acloud_duration(report_file)
487            if find_build_duration - acloud_duration >= 0:
488                # find+build took longer, saved acloud create time.
489                logging.debug('Saved acloud create time: %ss.',
490                              acloud_duration)
491                metrics.LocalDetectEvent(
492                    detect_type=DetectType.ACLOUD_CREATE,
493                    result=round(acloud_duration))
494            else:
495                # acloud create took longer, saved find+build time.
496                logging.debug('Saved Find and Build time: %ss.',
497                              find_build_duration)
498                metrics.LocalDetectEvent(
499                    detect_type=DetectType.FIND_BUILD,
500                    result=round(find_build_duration))
501            return ExitCode.SUCCESS
502        au.colorful_print(
503            'acloud create failed. Please check\n{}\nfor detail'.format(
504                report_file), constants.RED)
505        return ExitCode.AVD_CREATE_FAILURE
506
507    # 2. Failed to create because of invalid acloud arguments.
508    logging.error('Invalid acloud arguments found!')
509    return ExitCode.AVD_INVALID_ARGS
510
511
512def get_acloud_duration(report_file):
513    """Method which gets the duration of 'acloud create' from a report file.
514
515    Args:
516        report_file: A path string of acloud report file.
517
518    Returns:
519        An float of seconds which acloud create takes.
520    """
521    content = au.load_json_safely(report_file)
522    if not content:
523        return 0
524    return content.get(ACLOUD_DURATION, 0)
525
526
527if __name__ == '__main__':
528    if not os.getenv(constants.ANDROID_HOST_OUT, ''):
529        sys.exit()
530    index_targets()
531