• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright (c) 2012 The Chromium Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import inspect
6import logging
7import os
8import re
9import signal
10import socket
11import struct
12import time
13import urllib2
14import uuid
15import wave
16
17from autotest_lib.client.common_lib import base_utils
18from autotest_lib.client.common_lib import error
19from autotest_lib.client.common_lib import global_config
20from autotest_lib.client.common_lib import lsbrelease_utils
21from autotest_lib.client.cros import constants
22
23
24CONFIG = global_config.global_config
25
26# Keep checking if the pid is alive every second until the timeout (in seconds)
27CHECK_PID_IS_ALIVE_TIMEOUT = 6
28
29_LOCAL_HOST_LIST = ('localhost', '127.0.0.1')
30
31# The default address of a vm gateway.
32DEFAULT_VM_GATEWAY = '10.0.2.2'
33
34# Google Storage bucket URI to store results in.
35DEFAULT_OFFLOAD_GSURI = CONFIG.get_config_value(
36        'CROS', 'results_storage_server', default=None)
37
38# Default Moblab Ethernet Interface.
39MOBLAB_ETH = 'eth0'
40
41# A list of subnets that requires dedicated devserver and drone in the same
42# subnet. Each item is a tuple of (subnet_ip, mask_bits), e.g.,
43# ('192.168.0.0', 24))
44RESTRICTED_SUBNETS = []
45restricted_subnets_list = CONFIG.get_config_value(
46        'CROS', 'restricted_subnets', type=list, default=[])
47for subnet in restricted_subnets_list:
48    ip, mask_bits = subnet.split(':')
49    RESTRICTED_SUBNETS.append((ip, int(mask_bits)))
50
51# regex pattern for CLIENT/wireless_ssid_ config. For example, global config
52# can have following config in CLIENT section to indicate that hosts in subnet
53# 192.168.0.1/24 should use wireless ssid of `ssid_1`
54# wireless_ssid_192.168.0.1/24: ssid_1
55WIRELESS_SSID_PATTERN = 'wireless_ssid_(.*)/(\d+)'
56
57def ping(host, deadline=None, tries=None, timeout=60):
58    """Attempt to ping |host|.
59
60    Shell out to 'ping' to try to reach |host| for |timeout| seconds.
61    Returns exit code of ping.
62
63    Per 'man ping', if you specify BOTH |deadline| and |tries|, ping only
64    returns 0 if we get responses to |tries| pings within |deadline| seconds.
65
66    Specifying |deadline| or |count| alone should return 0 as long as
67    some packets receive responses.
68
69    @param host: the host to ping.
70    @param deadline: seconds within which |tries| pings must succeed.
71    @param tries: number of pings to send.
72    @param timeout: number of seconds after which to kill 'ping' command.
73    @return exit code of ping command.
74    """
75    args = [host]
76    if deadline:
77        args.append('-w%d' % deadline)
78    if tries:
79        args.append('-c%d' % tries)
80    return base_utils.run('ping', args=args,
81                          ignore_status=True, timeout=timeout,
82                          stdout_tee=base_utils.TEE_TO_LOGS,
83                          stderr_tee=base_utils.TEE_TO_LOGS).exit_status
84
85
86def host_is_in_lab_zone(hostname):
87    """Check if the host is in the CLIENT.dns_zone.
88
89    @param hostname: The hostname to check.
90    @returns True if hostname.dns_zone resolves, otherwise False.
91    """
92    host_parts = hostname.split('.')
93    dns_zone = CONFIG.get_config_value('CLIENT', 'dns_zone', default=None)
94    fqdn = '%s.%s' % (host_parts[0], dns_zone)
95    try:
96        socket.gethostbyname(fqdn)
97        return True
98    except socket.gaierror:
99        return False
100
101
102def host_could_be_in_afe(hostname):
103    """Check if the host could be in Autotest Front End.
104
105    Report whether or not a host could be in AFE, without actually
106    consulting AFE. This method exists because some systems are in the
107    lab zone, but not actually managed by AFE.
108
109    @param hostname: The hostname to check.
110    @returns True if hostname is in lab zone, and does not match *-dev-*
111    """
112    # Do the 'dev' check first, so that we skip DNS lookup if the
113    # hostname matches. This should give us greater resilience to lab
114    # failures.
115    return (hostname.find('-dev-') == -1) and host_is_in_lab_zone(hostname)
116
117
118def get_chrome_version(job_views):
119    """
120    Retrieves the version of the chrome binary associated with a job.
121
122    When a test runs we query the chrome binary for it's version and drop
123    that value into a client keyval. To retrieve the chrome version we get all
124    the views associated with a test from the db, including those of the
125    server and client jobs, and parse the version out of the first test view
126    that has it. If we never ran a single test in the suite the job_views
127    dictionary will not contain a chrome version.
128
129    This method cannot retrieve the chrome version from a dictionary that
130    does not conform to the structure of an autotest tko view.
131
132    @param job_views: a list of a job's result views, as returned by
133                      the get_detailed_test_views method in rpc_interface.
134    @return: The chrome version string, or None if one can't be found.
135    """
136
137    # Aborted jobs have no views.
138    if not job_views:
139        return None
140
141    for view in job_views:
142        if (view.get('attributes')
143            and constants.CHROME_VERSION in view['attributes'].keys()):
144
145            return view['attributes'].get(constants.CHROME_VERSION)
146
147    logging.warning('Could not find chrome version for failure.')
148    return None
149
150
151def get_interface_mac_address(interface):
152    """Return the MAC address of a given interface.
153
154    @param interface: Interface to look up the MAC address of.
155    """
156    interface_link = base_utils.run(
157            'ip addr show %s | grep link/ether' % interface).stdout
158    # The output will be in the format of:
159    # 'link/ether <mac> brd ff:ff:ff:ff:ff:ff'
160    return interface_link.split()[1]
161
162
163def get_offload_gsuri():
164    """Return the GSURI to offload test results to.
165
166    For the normal use case this is the results_storage_server in the
167    global_config.
168
169    However partners using Moblab will be offloading their results to a
170    subdirectory of their image storage buckets. The subdirectory is
171    determined by the MAC Address of the Moblab device.
172
173    @returns gsuri to offload test results to.
174    """
175    if not lsbrelease_utils.is_moblab():
176        return DEFAULT_OFFLOAD_GSURI
177    moblab_id_filepath = '/home/moblab/.moblab_id'
178    if os.path.exists(moblab_id_filepath):
179        with open(moblab_id_filepath, 'r') as moblab_id_file:
180            random_id = moblab_id_file.read()
181    else:
182        random_id = uuid.uuid1()
183        with open(moblab_id_filepath, 'w') as moblab_id_file:
184            moblab_id_file.write('%s' % random_id)
185    return '%sresults/%s/%s/' % (
186            CONFIG.get_config_value('CROS', 'image_storage_server'),
187            get_interface_mac_address(MOBLAB_ETH), random_id)
188
189
190# TODO(petermayo): crosbug.com/31826 Share this with _GsUpload in
191# //chromite.git/buildbot/prebuilt.py somewhere/somehow
192def gs_upload(local_file, remote_file, acl, result_dir=None,
193              transfer_timeout=300, acl_timeout=300):
194    """Upload to GS bucket.
195
196    @param local_file: Local file to upload
197    @param remote_file: Remote location to upload the local_file to.
198    @param acl: name or file used for controlling access to the uploaded
199                file.
200    @param result_dir: Result directory if you want to add tracing to the
201                       upload.
202    @param transfer_timeout: Timeout for this upload call.
203    @param acl_timeout: Timeout for the acl call needed to confirm that
204                        the uploader has permissions to execute the upload.
205
206    @raise CmdError: the exit code of the gsutil call was not 0.
207
208    @returns True/False - depending on if the upload succeeded or failed.
209    """
210    # https://developers.google.com/storage/docs/accesscontrol#extension
211    CANNED_ACLS = ['project-private', 'private', 'public-read',
212                   'public-read-write', 'authenticated-read',
213                   'bucket-owner-read', 'bucket-owner-full-control']
214    _GSUTIL_BIN = 'gsutil'
215    acl_cmd = None
216    if acl in CANNED_ACLS:
217        cmd = '%s cp -a %s %s %s' % (_GSUTIL_BIN, acl, local_file, remote_file)
218    else:
219        # For private uploads we assume that the overlay board is set up
220        # properly and a googlestore_acl.xml is present, if not this script
221        # errors
222        cmd = '%s cp -a private %s %s' % (_GSUTIL_BIN, local_file, remote_file)
223        if not os.path.exists(acl):
224            logging.error('Unable to find ACL File %s.', acl)
225            return False
226        acl_cmd = '%s setacl %s %s' % (_GSUTIL_BIN, acl, remote_file)
227    if not result_dir:
228        base_utils.run(cmd, timeout=transfer_timeout, verbose=True)
229        if acl_cmd:
230            base_utils.run(acl_cmd, timeout=acl_timeout, verbose=True)
231        return True
232    with open(os.path.join(result_dir, 'tracing'), 'w') as ftrace:
233        ftrace.write('Preamble\n')
234        base_utils.run(cmd, timeout=transfer_timeout, verbose=True,
235                       stdout_tee=ftrace, stderr_tee=ftrace)
236        if acl_cmd:
237            ftrace.write('\nACL setting\n')
238            # Apply the passed in ACL xml file to the uploaded object.
239            base_utils.run(acl_cmd, timeout=acl_timeout, verbose=True,
240                           stdout_tee=ftrace, stderr_tee=ftrace)
241        ftrace.write('Postamble\n')
242        return True
243
244
245def gs_ls(uri_pattern):
246    """Returns a list of URIs that match a given pattern.
247
248    @param uri_pattern: a GS URI pattern, may contain wildcards
249
250    @return A list of URIs matching the given pattern.
251
252    @raise CmdError: the gsutil command failed.
253
254    """
255    gs_cmd = ' '.join(['gsutil', 'ls', uri_pattern])
256    result = base_utils.system_output(gs_cmd).splitlines()
257    return [path.rstrip() for path in result if path]
258
259
260def nuke_pids(pid_list, signal_queue=[signal.SIGTERM, signal.SIGKILL]):
261    """
262    Given a list of pid's, kill them via an esclating series of signals.
263
264    @param pid_list: List of PID's to kill.
265    @param signal_queue: Queue of signals to send the PID's to terminate them.
266
267    @return: A mapping of the signal name to the number of processes it
268        was sent to.
269    """
270    sig_count = {}
271    # Though this is slightly hacky it beats hardcoding names anyday.
272    sig_names = dict((k, v) for v, k in signal.__dict__.iteritems()
273                     if v.startswith('SIG'))
274    for sig in signal_queue:
275        logging.debug('Sending signal %s to the following pids:', sig)
276        sig_count[sig_names.get(sig, 'unknown_signal')] = len(pid_list)
277        for pid in pid_list:
278            logging.debug('Pid %d', pid)
279            try:
280                os.kill(pid, sig)
281            except OSError:
282                # The process may have died from a previous signal before we
283                # could kill it.
284                pass
285        if sig == signal.SIGKILL:
286            return sig_count
287        pid_list = [pid for pid in pid_list if base_utils.pid_is_alive(pid)]
288        if not pid_list:
289            break
290        time.sleep(CHECK_PID_IS_ALIVE_TIMEOUT)
291    failed_list = []
292    for pid in pid_list:
293        if base_utils.pid_is_alive(pid):
294            failed_list.append('Could not kill %d for process name: %s.' % pid,
295                               base_utils.get_process_name(pid))
296    if failed_list:
297        raise error.AutoservRunError('Following errors occured: %s' %
298                                     failed_list, None)
299    return sig_count
300
301
302def externalize_host(host):
303    """Returns an externally accessible host name.
304
305    @param host: a host name or address (string)
306
307    @return An externally visible host name or address
308
309    """
310    return socket.gethostname() if host in _LOCAL_HOST_LIST else host
311
312
313def urlopen_socket_timeout(url, data=None, timeout=5):
314    """
315    Wrapper to urllib2.urlopen with a socket timeout.
316
317    This method will convert all socket timeouts to
318    TimeoutExceptions, so we can use it in conjunction
319    with the rpc retry decorator and continue to handle
320    other URLErrors as we see fit.
321
322    @param url: The url to open.
323    @param data: The data to send to the url (eg: the urlencoded dictionary
324                 used with a POST call).
325    @param timeout: The timeout for this urlopen call.
326
327    @return: The response of the urlopen call.
328
329    @raises: error.TimeoutException when a socket timeout occurs.
330             urllib2.URLError for errors that not caused by timeout.
331             urllib2.HTTPError for errors like 404 url not found.
332    """
333    old_timeout = socket.getdefaulttimeout()
334    socket.setdefaulttimeout(timeout)
335    try:
336        return urllib2.urlopen(url, data=data)
337    except urllib2.URLError as e:
338        if type(e.reason) is socket.timeout:
339            raise error.TimeoutException(str(e))
340        raise
341    finally:
342        socket.setdefaulttimeout(old_timeout)
343
344
345def parse_chrome_version(version_string):
346    """
347    Parse a chrome version string and return version and milestone.
348
349    Given a chrome version of the form "W.X.Y.Z", return "W.X.Y.Z" as
350    the version and "W" as the milestone.
351
352    @param version_string: Chrome version string.
353    @return: a tuple (chrome_version, milestone). If the incoming version
354             string is not of the form "W.X.Y.Z", chrome_version will
355             be set to the incoming "version_string" argument and the
356             milestone will be set to the empty string.
357    """
358    match = re.search('(\d+)\.\d+\.\d+\.\d+', version_string)
359    ver = match.group(0) if match else version_string
360    milestone = match.group(1) if match else ''
361    return ver, milestone
362
363
364def is_localhost(server):
365    """Check if server is equivalent to localhost.
366
367    @param server: Name of the server to check.
368
369    @return: True if given server is equivalent to localhost.
370
371    @raise socket.gaierror: If server name failed to be resolved.
372    """
373    if server in _LOCAL_HOST_LIST:
374        return True
375    try:
376        return (socket.gethostbyname(socket.gethostname()) ==
377                socket.gethostbyname(server))
378    except socket.gaierror:
379        logging.error('Failed to resolve server name %s.', server)
380        return False
381
382
383def is_puppylab_vm(server):
384    """Check if server is a virtual machine in puppylab.
385
386    In the virtual machine testing environment (i.e., puppylab), each
387    shard VM has a hostname like localhost:<port>.
388
389    @param server: Server name to check.
390
391    @return True if given server is a virtual machine in puppylab.
392
393    """
394    # TODO(mkryu): This is a puppylab specific hack. Please update
395    # this method if you have a better solution.
396    regex = re.compile(r'(.+):\d+')
397    m = regex.match(server)
398    if m:
399        return m.group(1) in _LOCAL_HOST_LIST
400    return False
401
402
403def get_function_arg_value(func, arg_name, args, kwargs):
404    """Get the value of the given argument for the function.
405
406    @param func: Function being called with given arguments.
407    @param arg_name: Name of the argument to look for value.
408    @param args: arguments for function to be called.
409    @param kwargs: keyword arguments for function to be called.
410
411    @return: The value of the given argument for the function.
412
413    @raise ValueError: If the argument is not listed function arguemnts.
414    @raise KeyError: If no value is found for the given argument.
415    """
416    if arg_name in kwargs:
417        return kwargs[arg_name]
418
419    argspec = inspect.getargspec(func)
420    index = argspec.args.index(arg_name)
421    try:
422        return args[index]
423    except IndexError:
424        try:
425            # The argument can use a default value. Reverse the default value
426            # so argument with default value can be counted from the last to
427            # the first.
428            return argspec.defaults[::-1][len(argspec.args) - index - 1]
429        except IndexError:
430            raise KeyError('Argument %s is not given a value. argspec: %s, '
431                           'args:%s, kwargs:%s' %
432                           (arg_name, argspec, args, kwargs))
433
434
435def version_match(build_version, release_version, update_url=''):
436    """Compare release versino from lsb-release with cros-version label.
437
438    build_version is a string based on build name. It is prefixed with builder
439    info and branch ID, e.g., lumpy-release/R43-6809.0.0. It may not include
440    builder info, e.g., lumpy-release, in which case, update_url shall be passed
441    in to determine if the build is a trybot or pgo-generate build.
442    release_version is retrieved from lsb-release.
443    These two values might not match exactly.
444
445    The method is designed to compare version for following 6 scenarios with
446    samples of build version and expected release version:
447    1. trybot non-release build (paladin, pre-cq or test-ap build).
448    build version:   trybot-lumpy-paladin/R27-3837.0.0-b123
449    release version: 3837.0.2013_03_21_1340
450
451    2. trybot release build.
452    build version:   trybot-lumpy-release/R27-3837.0.0-b456
453    release version: 3837.0.0
454
455    3. buildbot official release build.
456    build version:   lumpy-release/R27-3837.0.0
457    release version: 3837.0.0
458
459    4. non-official paladin rc build.
460    build version:   lumpy-paladin/R27-3878.0.0-rc7
461    release version: 3837.0.0-rc7
462
463    5. chrome-perf build.
464    build version:   lumpy-chrome-perf/R28-3837.0.0-b2996
465    release version: 3837.0.0
466
467    6. pgo-generate build.
468    build version:   lumpy-release-pgo-generate/R28-3837.0.0-b2996
469    release version: 3837.0.0-pgo-generate
470
471    TODO: This logic has a bug if a trybot paladin build failed to be
472    installed in a DUT running an older trybot paladin build with same
473    platform number, but different build number (-b###). So to conclusively
474    determine if a tryjob paladin build is imaged successfully, we may need
475    to find out the date string from update url.
476
477    @param build_version: Build name for cros version, e.g.
478                          peppy-release/R43-6809.0.0 or R43-6809.0.0
479    @param release_version: Release version retrieved from lsb-release,
480                            e.g., 6809.0.0
481    @param update_url: Update url which include the full builder information.
482                       Default is set to empty string.
483
484    @return: True if the values match, otherwise returns False.
485    """
486    # If the build is from release, CQ or PFQ builder, cros-version label must
487    # be ended with release version in lsb-release.
488    if build_version.endswith(release_version):
489        return True
490
491    # Remove R#- and -b# at the end of build version
492    stripped_version = re.sub(r'(R\d+-|-b\d+)', '', build_version)
493    # Trim the builder info, e.g., trybot-lumpy-paladin/
494    stripped_version = stripped_version.split('/')[-1]
495
496    is_trybot_non_release_build = (
497            re.match(r'.*trybot-.+-(paladin|pre-cq|test-ap)', build_version) or
498            re.match(r'.*trybot-.+-(paladin|pre-cq|test-ap)', update_url))
499
500    # Replace date string with 0 in release_version
501    release_version_no_date = re.sub(r'\d{4}_\d{2}_\d{2}_\d+', '0',
502                                    release_version)
503    has_date_string = release_version != release_version_no_date
504
505    is_pgo_generate_build = (
506            re.match(r'.+-pgo-generate', build_version) or
507            re.match(r'.+-pgo-generate', update_url))
508
509    # Remove |-pgo-generate| in release_version
510    release_version_no_pgo = release_version.replace('-pgo-generate', '')
511    has_pgo_generate = release_version != release_version_no_pgo
512
513    if is_trybot_non_release_build:
514        if not has_date_string:
515            logging.error('A trybot paladin or pre-cq build is expected. '
516                          'Version "%s" is not a paladin or pre-cq  build.',
517                          release_version)
518            return False
519        return stripped_version == release_version_no_date
520    elif is_pgo_generate_build:
521        if not has_pgo_generate:
522            logging.error('A pgo-generate build is expected. Version '
523                          '"%s" is not a pgo-generate build.',
524                          release_version)
525            return False
526        return stripped_version == release_version_no_pgo
527    else:
528        if has_date_string:
529            logging.error('Unexpected date found in a non trybot paladin or '
530                          'pre-cq build.')
531            return False
532        # Versioned build, i.e., rc or release build.
533        return stripped_version == release_version
534
535
536def get_real_user():
537    """Get the real user that runs the script.
538
539    The function check environment variable SUDO_USER for the user if the
540    script is run with sudo. Otherwise, it returns the value of environment
541    variable USER.
542
543    @return: The user name that runs the script.
544
545    """
546    user = os.environ.get('SUDO_USER')
547    if not user:
548        user = os.environ.get('USER')
549    return user
550
551
552def sudo_require_password():
553    """Test if the process can run sudo command without using password.
554
555    @return: True if the process needs password to run sudo command.
556
557    """
558    try:
559        base_utils.run('sudo -n true')
560        return False
561    except error.CmdError:
562        logging.warn('sudo command requires password.')
563        return True
564
565
566def is_in_container():
567    """Check if the process is running inside a container.
568
569    @return: True if the process is running inside a container, otherwise False.
570    """
571    result = base_utils.run('grep -q "/lxc/" /proc/1/cgroup',
572                            verbose=False, ignore_status=True)
573    return result.exit_status == 0
574
575
576def is_flash_installed():
577    """
578    The Adobe Flash binary is only distributed with internal builds.
579    """
580    return (os.path.exists('/opt/google/chrome/pepper/libpepflashplayer.so')
581        and os.path.exists('/opt/google/chrome/pepper/pepper-flash.info'))
582
583
584def verify_flash_installed():
585    """
586    The Adobe Flash binary is only distributed with internal builds.
587    Warn users of public builds of the extra dependency.
588    """
589    if not is_flash_installed():
590        raise error.TestNAError('No Adobe Flash binary installed.')
591
592
593def is_in_same_subnet(ip_1, ip_2, mask_bits=24):
594    """Check if two IP addresses are in the same subnet with given mask bits.
595
596    The two IP addresses are string of IPv4, e.g., '192.168.0.3'.
597
598    @param ip_1: First IP address to compare.
599    @param ip_2: Second IP address to compare.
600    @param mask_bits: Number of mask bits for subnet comparison. Default to 24.
601
602    @return: True if the two IP addresses are in the same subnet.
603
604    """
605    mask = ((2L<<mask_bits-1) -1)<<(32-mask_bits)
606    ip_1_num = struct.unpack('!I', socket.inet_aton(ip_1))[0]
607    ip_2_num = struct.unpack('!I', socket.inet_aton(ip_2))[0]
608    return ip_1_num & mask == ip_2_num & mask
609
610
611def get_ip_address(hostname):
612    """Get the IP address of given hostname.
613
614    @param hostname: Hostname of a DUT.
615
616    @return: The IP address of given hostname. None if failed to resolve
617             hostname.
618    """
619    try:
620        if hostname:
621            return socket.gethostbyname(hostname)
622    except socket.gaierror as e:
623        logging.error('Failed to get IP address of %s, error: %s.', hostname, e)
624
625
626def get_servers_in_same_subnet(host_ip, mask_bits, servers=None,
627                               server_ip_map=None):
628    """Get the servers in the same subnet of the given host ip.
629
630    @param host_ip: The IP address of a dut to look for devserver.
631    @param mask_bits: Number of mask bits.
632    @param servers: A list of servers to be filtered by subnet specified by
633                    host_ip and mask_bits.
634    @param server_ip_map: A map between the server name and its IP address.
635            The map can be pre-built for better performance, e.g., when
636            allocating a drone for an agent task.
637
638    @return: A list of servers in the same subnet of the given host ip.
639
640    """
641    matched_servers = []
642    if not servers and not server_ip_map:
643        raise ValueError('Either `servers` or `server_ip_map` must be given.')
644    if not servers:
645        servers = server_ip_map.keys()
646    for server in servers:
647        server_ip = server_ip_map.get(server) or get_ip_address(server)
648        if server_ip and is_in_same_subnet(server_ip, host_ip, mask_bits):
649            matched_servers.append(server)
650    return matched_servers
651
652
653def get_restricted_subnet(hostname, restricted_subnets=RESTRICTED_SUBNETS):
654    """Get the restricted subnet of given hostname.
655
656    @param hostname: Name of the host to look for matched restricted subnet.
657    @param restricted_subnets: A list of restricted subnets, default is set to
658            RESTRICTED_SUBNETS.
659
660    @return: A tuple of (subnet_ip, mask_bits), which defines a restricted
661             subnet.
662    """
663    host_ip = get_ip_address(hostname)
664    if not host_ip:
665        return
666    for subnet_ip, mask_bits in restricted_subnets:
667        if is_in_same_subnet(subnet_ip, host_ip, mask_bits):
668            return subnet_ip, mask_bits
669
670
671def get_wireless_ssid(hostname):
672    """Get the wireless ssid based on given hostname.
673
674    The method tries to locate the wireless ssid in the same subnet of given
675    hostname first. If none is found, it returns the default setting in
676    CLIENT/wireless_ssid.
677
678    @param hostname: Hostname of the test device.
679
680    @return: wireless ssid for the test device.
681    """
682    default_ssid = CONFIG.get_config_value('CLIENT', 'wireless_ssid',
683                                           default=None)
684    host_ip = get_ip_address(hostname)
685    if not host_ip:
686        return default_ssid
687
688    # Get all wireless ssid in the global config.
689    ssids = CONFIG.get_config_value_regex('CLIENT', WIRELESS_SSID_PATTERN)
690
691    for key, value in ssids.items():
692        # The config key filtered by regex WIRELESS_SSID_PATTERN has a format of
693        # wireless_ssid_[subnet_ip]/[maskbit], for example:
694        # wireless_ssid_192.168.0.1/24
695        # Following line extract the subnet ip and mask bit from the key name.
696        match = re.match(WIRELESS_SSID_PATTERN, key)
697        subnet_ip, maskbit = match.groups()
698        if is_in_same_subnet(subnet_ip, host_ip, int(maskbit)):
699            return value
700    return default_ssid
701
702
703def parse_android_build(build_name):
704    """Get branch, target, build_id from the given build_name.
705
706    @param build_name: Name of an Android build, should be formated as
707                       branch/target/build_id
708
709    @return: Tuple of branch, target, build_id
710    @raise ValueError: If the build_name is not correctly formated.
711    """
712    branch, target, build_id = build_name.split('/')
713    return branch, target, build_id
714
715
716def extract_wav_frames(wave_file):
717    """Extract all frames from a WAV file.
718
719    wave_file: A Wave_read object representing a WAV file opened for reading.
720
721    @return: A list containing the frames in the WAV file.
722    """
723    num_frames = wave_file.getnframes()
724    sample_width = wave_file.getsampwidth()
725    if sample_width == 1:
726        fmt = '%iB'  # Read 1 byte.
727    elif sample_width == 2:
728        fmt = '%ih'  # Read 2 bytes.
729    elif sample_width == 4:
730        fmt = '%ii'  # Read 4 bytes.
731    else:
732        raise ValueError('Unsupported sample width')
733    return list(struct.unpack(fmt % num_frames * wave_file.getnchannels(),
734                              wave_file.readframes(num_frames)))
735
736
737def check_wav_file(filename, num_channels=None, sample_rate=None,
738                   sample_width=None):
739    """Checks a WAV file and returns its peak PCM values.
740
741    @param filename: Input WAV file to analyze.
742    @param num_channels: Number of channels to expect (None to not check).
743    @param sample_rate: Sample rate to expect (None to not check).
744    @param sample_width: Sample width to expect (None to not check).
745
746    @return A list of the absolute maximum PCM values for each channel in the
747            WAV file.
748
749    @raise ValueError: Failed to process the WAV file or validate an attribute.
750    """
751    chk_file = None
752    try:
753        chk_file = wave.open(filename, 'r')
754        if num_channels is not None and chk_file.getnchannels() != num_channels:
755            raise ValueError('Expected %d channels but got %d instead.',
756                             num_channels, chk_file.getnchannels())
757        if sample_rate is not None and chk_file.getframerate() != sample_rate:
758            raise ValueError('Expected sample rate %d but got %d instead.',
759                             sample_rate, chk_file.getframerate())
760        if sample_width is not None and chk_file.getsampwidth() != sample_width:
761            raise ValueError('Expected sample width %d but got %d instead.',
762                             sample_width, chk_file.getsampwidth())
763        frames = extract_wav_frames(chk_file)
764    except wave.Error as e:
765        raise ValueError('Error processing WAV file: %s' % e)
766    finally:
767        if chk_file is not None:
768            chk_file.close()
769
770    # Since 8-bit PCM is unsigned with an offset of 128, we subtract the offset
771    # to make it signed since the rest of the code assumes signed numbers.
772    if chk_file.getsampwidth() == 1:
773        frames = [val - 128 for val in frames]
774
775    peaks = []
776    for i in range(chk_file.getnchannels()):
777        peaks.append(max(map(abs, frames[i::chk_file.getnchannels()])))
778    return peaks;
779
780
781def which(exec_file):
782    """Finds an executable file.
783
784    If the file name contains a path component, it is checked as-is.
785    Otherwise, we check with each of the path components found in the system
786    PATH prepended. This behavior is similar to the 'which' command-line tool.
787
788    @param exec_file: Name or path to desired executable.
789
790    @return: An actual path to the executable, or None if not found.
791    """
792    if os.path.dirname(exec_file):
793        return exec_file if os.access(exec_file, os.X_OK) else None
794    sys_path = os.environ.get('PATH')
795    prefix_list = sys_path.split(os.pathsep) if sys_path else []
796    for prefix in prefix_list:
797        path = os.path.join(prefix, exec_file)
798        if os.access(path, os.X_OK):
799            return path
800