1#!/usr/bin/env python3.4 2# 3# Copyright 2019 - The Android Open Source Project 4# 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16 17import bokeh, bokeh.plotting 18import collections 19import logging 20import math 21import re 22import statistics 23import time 24from acts.controllers.android_device import AndroidDevice 25from acts.controllers.utils_lib import ssh 26from concurrent.futures import ThreadPoolExecutor 27 28SHORT_SLEEP = 1 29MED_SLEEP = 6 30TEST_TIMEOUT = 10 31STATION_DUMP = 'iw wlan0 station dump' 32SCAN = 'wpa_cli scan' 33SCAN_RESULTS = 'wpa_cli scan_results' 34SIGNAL_POLL = 'wpa_cli signal_poll' 35WPA_CLI_STATUS = 'wpa_cli status' 36CONST_3dB = 3.01029995664 37RSSI_ERROR_VAL = float('nan') 38RTT_REGEX = re.compile(r'^\[(?P<timestamp>\S+)\] .*? time=(?P<rtt>\S+)') 39LOSS_REGEX = re.compile(r'(?P<loss>\S+)% packet loss') 40 41 42# Threading decorator 43def nonblocking(f): 44 """Creates a decorator transforming function calls to non-blocking""" 45 46 def wrap(*args, **kwargs): 47 executor = ThreadPoolExecutor(max_workers=1) 48 thread_future = executor.submit(f, *args, **kwargs) 49 # Ensure resources are freed up when executor ruturns or raises 50 executor.shutdown(wait=False) 51 return thread_future 52 53 return wrap 54 55 56# Plotting Utilities 57class BokehFigure(): 58 def __init__(self, 59 title=None, 60 x_label=None, 61 primary_y=None, 62 secondary_y=None, 63 height=700, 64 width=1300, 65 title_size=15, 66 axis_label_size=12): 67 self.figure_data = [] 68 self.fig_property = { 69 'title': title, 70 'x_label': x_label, 71 'primary_y_label': primary_y, 72 'secondary_y_label': secondary_y, 73 'num_lines': 0, 74 'title_size': '{}pt'.format(title_size), 75 'axis_label_size': '{}pt'.format(axis_label_size) 76 } 77 self.TOOLS = ( 78 'box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save') 79 self.COLORS = [ 80 'black', 81 'blue', 82 'blueviolet', 83 'brown', 84 'burlywood', 85 'cadetblue', 86 'cornflowerblue', 87 'crimson', 88 'cyan', 89 'darkblue', 90 'darkgreen', 91 'darkmagenta', 92 'darkorange', 93 'darkred', 94 'deepskyblue', 95 'goldenrod', 96 'green', 97 'grey', 98 'indigo', 99 'navy', 100 'olive', 101 'orange', 102 'red', 103 'salmon', 104 'teal', 105 'yellow', 106 ] 107 self.MARKERS = [ 108 'asterisk', 'circle', 'circle_cross', 'circle_x', 'cross', 109 'diamond', 'diamond_cross', 'hex', 'inverted_triangle', 'square', 110 'square_x', 'square_cross', 'triangle', 'x' 111 ] 112 self.plot = bokeh.plotting.figure( 113 plot_width=width, 114 plot_height=height, 115 title=title, 116 tools=self.TOOLS, 117 output_backend='webgl') 118 self.plot.add_tools( 119 bokeh.models.tools.WheelZoomTool(dimensions='width')) 120 self.plot.add_tools( 121 bokeh.models.tools.WheelZoomTool(dimensions='height')) 122 123 def add_line(self, 124 x_data, 125 y_data, 126 legend, 127 color=None, 128 width=3, 129 style='solid', 130 marker=None, 131 marker_size=10, 132 shaded_region=None, 133 y_axis='default'): 134 if y_axis not in ['default', 'secondary']: 135 raise ValueError('y_axis must be default or secondary') 136 if color == None: 137 color = self.COLORS[self.fig_property['num_lines'] % len( 138 self.COLORS)] 139 if style == 'dashed': 140 style = [5, 5] 141 self.figure_data.append({ 142 'x_data': x_data, 143 'y_data': y_data, 144 'legend': legend, 145 'color': color, 146 'width': width, 147 'style': style, 148 'marker': marker, 149 'marker_size': marker_size, 150 'shaded_region': shaded_region, 151 'y_range_name': y_axis 152 }) 153 self.fig_property['num_lines'] += 1 154 155 def generate_figure(self, output_file=None): 156 two_axes = False 157 for line in self.figure_data: 158 self.plot.line( 159 line['x_data'], 160 line['y_data'], 161 legend=line['legend'], 162 line_width=line['width'], 163 color=line['color'], 164 line_dash=line['style'], 165 name=line['y_range_name'], 166 y_range_name=line['y_range_name']) 167 if line['shaded_region']: 168 band_x = line['shaded_region']['x_vector'] 169 band_x.extend(line['shaded_region']['x_vector'][::-1]) 170 band_y = line['shaded_region']['lower_limit'] 171 band_y.extend(line['shaded_region']['upper_limit'][::-1]) 172 self.plot.patch( 173 band_x, 174 band_y, 175 color='#7570B3', 176 line_alpha=0.1, 177 fill_alpha=0.1) 178 if line['marker'] in self.MARKERS: 179 marker_func = getattr(self.plot, line['marker']) 180 marker_func( 181 line['x_data'], 182 line['y_data'], 183 size=line['marker_size'], 184 legend=line['legend'], 185 fill_color=line['color'], 186 name=line['y_range_name'], 187 y_range_name=line['y_range_name']) 188 if line['y_range_name'] == 'secondary': 189 two_axes = True 190 191 #x-axis formatting 192 self.plot.xaxis.axis_label = self.fig_property['x_label'] 193 self.plot.x_range.range_padding = 0 194 self.plot.xaxis[0].axis_label_text_font_size = self.fig_property[ 195 'axis_label_size'] 196 #y-axis formatting 197 self.plot.yaxis[0].axis_label = self.fig_property['primary_y_label'] 198 self.plot.yaxis[0].axis_label_text_font_size = self.fig_property[ 199 'axis_label_size'] 200 self.plot.y_range = bokeh.models.DataRange1d(names=['default']) 201 if two_axes and 'secondary' not in self.plot.extra_y_ranges: 202 self.plot.extra_y_ranges = { 203 'secondary': bokeh.models.DataRange1d(names=['secondary']) 204 } 205 self.plot.add_layout( 206 bokeh.models.LinearAxis( 207 y_range_name='secondary', 208 axis_label=self.fig_property['secondary_y_label'], 209 axis_label_text_font_size=self. 210 fig_property['axis_label_size']), 'right') 211 # plot formatting 212 self.plot.legend.location = 'top_right' 213 self.plot.legend.click_policy = 'hide' 214 self.plot.title.text_font_size = self.fig_property['title_size'] 215 216 if output_file is not None: 217 bokeh.plotting.output_file(output_file) 218 bokeh.plotting.save(self.plot) 219 return self.plot 220 221 def save_figure(self, output_file): 222 bokeh.plotting.output_file(output_file) 223 bokeh.plotting.save(self.plot) 224 225 226def bokeh_plot(data_sets, 227 legends, 228 fig_property, 229 shaded_region=None, 230 output_file_path=None): 231 """Plot bokeh figs. 232 Args: 233 data_sets: data sets including lists of x_data and lists of y_data 234 ex: [[[x_data1], [x_data2]], [[y_data1],[y_data2]]] 235 legends: list of legend for each curve 236 fig_property: dict containing the plot property, including title, 237 lables, linewidth, circle size, etc. 238 shaded_region: optional dict containing data for plot shading 239 output_file_path: optional path at which to save figure 240 Returns: 241 plot: bokeh plot figure object 242 """ 243 TOOLS = ('box_zoom,box_select,pan,crosshair,redo,undo,reset,hover,save') 244 plot = bokeh.plotting.figure( 245 plot_width=1300, 246 plot_height=700, 247 title=fig_property['title'], 248 tools=TOOLS, 249 output_backend='webgl') 250 plot.add_tools(bokeh.models.tools.WheelZoomTool(dimensions='width')) 251 plot.add_tools(bokeh.models.tools.WheelZoomTool(dimensions='height')) 252 colors = [ 253 'red', 'green', 'blue', 'olive', 'orange', 'salmon', 'black', 'navy', 254 'yellow', 'darkred', 'goldenrod' 255 ] 256 if shaded_region: 257 band_x = shaded_region['x_vector'] 258 band_x.extend(shaded_region['x_vector'][::-1]) 259 band_y = shaded_region['lower_limit'] 260 band_y.extend(shaded_region['upper_limit'][::-1]) 261 plot.patch( 262 band_x, band_y, color='#7570B3', line_alpha=0.1, fill_alpha=0.1) 263 264 for x_data, y_data, legend in zip(data_sets[0], data_sets[1], legends): 265 index_now = legends.index(legend) 266 color = colors[index_now % len(colors)] 267 plot.line( 268 x_data, 269 y_data, 270 legend=str(legend), 271 line_width=fig_property['linewidth'], 272 color=color) 273 plot.circle( 274 x_data, 275 y_data, 276 size=fig_property['markersize'], 277 legend=str(legend), 278 fill_color=color) 279 280 # Plot properties 281 plot.xaxis.axis_label = fig_property['x_label'] 282 plot.yaxis.axis_label = fig_property['y_label'] 283 plot.legend.location = 'top_right' 284 plot.legend.click_policy = 'hide' 285 plot.title.text_font_size = {'value': '15pt'} 286 if output_file_path is not None: 287 bokeh.plotting.output_file(output_file_path) 288 bokeh.plotting.save(plot) 289 return plot 290 291 292def save_bokeh_plots(plot_array, output_file_path): 293 all_plots = bokeh.layouts.column(children=plot_array) 294 bokeh.plotting.output_file(output_file_path) 295 bokeh.plotting.save(all_plots) 296 297 298class PingResult(object): 299 """An object that contains the results of running ping command. 300 301 Attributes: 302 connected: True if a connection was made. False otherwise. 303 packet_loss_percentage: The total percentage of packets lost. 304 transmission_times: The list of PingTransmissionTimes containing the 305 timestamps gathered for transmitted packets. 306 rtts: An list-like object enumerating all round-trip-times of 307 transmitted packets. 308 timestamps: A list-like object enumerating the beginning timestamps of 309 each packet transmission. 310 ping_interarrivals: A list-like object enumerating the amount of time 311 between the beginning of each subsequent transmission. 312 """ 313 314 def __init__(self, ping_output): 315 self.packet_loss_percentage = 100 316 self.transmission_times = [] 317 318 self.rtts = _ListWrap(self.transmission_times, lambda entry: entry.rtt) 319 self.timestamps = _ListWrap( 320 self.transmission_times, lambda entry: entry.timestamp) 321 self.ping_interarrivals = _PingInterarrivals(self.transmission_times) 322 323 for line in ping_output: 324 if 'loss' in line: 325 match = re.search(LOSS_REGEX, line) 326 self.packet_loss_percentage = float(match.group('loss')) 327 if 'time=' in line: 328 match = re.search(RTT_REGEX, line) 329 self.transmission_times.append( 330 PingTransmissionTimes( 331 float(match.group('timestamp')), 332 float(match.group('rtt')))) 333 self.connected = len( 334 ping_output) > 1 and self.packet_loss_percentage < 100 335 336 def __getitem__(self, item): 337 if item == 'rtt': 338 return self.rtts 339 if item == 'connected': 340 return self.connected 341 if item == 'packet_loss_percentage': 342 return self.packet_loss_percentage 343 raise ValueError('Invalid key. Please use an attribute instead.') 344 345 def as_dict(self): 346 return { 347 'connected': 1 if self.connected else 0, 348 'rtt': list(self.rtts), 349 'time_stamp': list(self.timestamps), 350 'ping_interarrivals': list(self.ping_interarrivals), 351 'packet_loss_percentage': self.packet_loss_percentage 352 } 353 354 355class PingTransmissionTimes(object): 356 """A class that holds the timestamps for a packet sent via the ping command. 357 358 Attributes: 359 rtt: The round trip time for the packet sent. 360 timestamp: The timestamp the packet started its trip. 361 """ 362 363 def __init__(self, timestamp, rtt): 364 self.rtt = rtt 365 self.timestamp = timestamp 366 367 368class _ListWrap(object): 369 """A convenient helper class for treating list iterators as native lists.""" 370 371 def __init__(self, wrapped_list, func): 372 self.__wrapped_list = wrapped_list 373 self.__func = func 374 375 def __getitem__(self, key): 376 return self.__func(self.__wrapped_list[key]) 377 378 def __iter__(self): 379 for item in self.__wrapped_list: 380 yield self.__func(item) 381 382 def __len__(self): 383 return len(self.__wrapped_list) 384 385 386class _PingInterarrivals(object): 387 """A helper class for treating ping interarrivals as a native list.""" 388 389 def __init__(self, ping_entries): 390 self.__ping_entries = ping_entries 391 392 def __getitem__(self, key): 393 return (self.__ping_entries[key + 1].timestamp - 394 self.__ping_entries[key].timestamp) 395 396 def __iter__(self): 397 for index in range(len(self.__ping_entries) - 1): 398 yield self[index] 399 400 def __len__(self): 401 return max(0, len(self.__ping_entries) - 1) 402 403 404def get_ping_stats(src_device, dest_address, ping_duration, ping_interval, 405 ping_size): 406 """Run ping to or from the DUT. 407 408 The function computes either pings the DUT or pings a remote ip from 409 DUT. 410 411 Args: 412 src_device: object representing device to ping from 413 dest_address: ip address to ping 414 ping_duration: timeout to set on the the ping process (in seconds) 415 ping_interval: time between pings (in seconds) 416 ping_size: size of ping packet payload 417 Returns: 418 ping_result: dict containing ping results and other meta data 419 """ 420 ping_cmd = 'ping -w {} -i {} -s {} -D'.format( 421 ping_duration, 422 ping_interval, 423 ping_size, 424 ) 425 if isinstance(src_device, AndroidDevice): 426 ping_cmd = '{} {}'.format(ping_cmd, dest_address) 427 ping_output = src_device.adb.shell( 428 ping_cmd, timeout=ping_duration + TEST_TIMEOUT, ignore_status=True) 429 elif isinstance(src_device, ssh.connection.SshConnection): 430 ping_cmd = 'sudo {} {}'.format(ping_cmd, dest_address) 431 ping_output = src_device.run(ping_cmd, ignore_status=True).stdout 432 else: 433 raise TypeError( 434 'Unable to ping using src_device of type %s.' % type(src_device)) 435 return PingResult(ping_output.splitlines()) 436 437 438@nonblocking 439def get_ping_stats_nb(src_device, dest_address, ping_duration, ping_interval, 440 ping_size): 441 return get_ping_stats(src_device, dest_address, ping_duration, 442 ping_interval, ping_size) 443 444 445@nonblocking 446def start_iperf_client_nb(iperf_client, iperf_server_address, iperf_args, tag, 447 timeout): 448 return iperf_client.start(iperf_server_address, iperf_args, tag, timeout) 449 450 451# Rssi Utilities 452def empty_rssi_result(): 453 return collections.OrderedDict([('data', []), ('mean', None), 454 ('stdev', None)]) 455 456 457def get_connected_rssi(dut, 458 num_measurements=1, 459 polling_frequency=SHORT_SLEEP, 460 first_measurement_delay=0): 461 """Gets all RSSI values reported for the connected access point/BSSID. 462 463 Args: 464 dut: android device object from which to get RSSI 465 num_measurements: number of scans done, and RSSIs collected 466 polling_frequency: time to wait between RSSI measurements 467 Returns: 468 connected_rssi: dict containing the measurements results for 469 all reported RSSI values (signal_poll, per chain, etc.) and their 470 statistics 471 """ 472 # yapf: disable 473 connected_rssi = collections.OrderedDict( 474 [('time_stamp', []), 475 ('bssid', []), ('frequency', []), 476 ('signal_poll_rssi', empty_rssi_result()), 477 ('signal_poll_avg_rssi', empty_rssi_result()), 478 ('chain_0_rssi', empty_rssi_result()), 479 ('chain_1_rssi', empty_rssi_result())]) 480 # yapf: enable 481 t0 = time.time() 482 time.sleep(first_measurement_delay) 483 for idx in range(num_measurements): 484 measurement_start_time = time.time() 485 connected_rssi['time_stamp'].append(measurement_start_time - t0) 486 # Get signal poll RSSI 487 status_output = dut.adb.shell(WPA_CLI_STATUS) 488 match = re.search('bssid=.*', status_output) 489 if match: 490 bssid = match.group(0).split('=')[1] 491 connected_rssi['bssid'].append(bssid) 492 else: 493 connected_rssi['bssid'].append(RSSI_ERROR_VAL) 494 signal_poll_output = dut.adb.shell(SIGNAL_POLL) 495 match = re.search('FREQUENCY=.*', signal_poll_output) 496 if match: 497 frequency = int(match.group(0).split('=')[1]) 498 connected_rssi['frequency'].append(frequency) 499 else: 500 connected_rssi['frequency'].append(RSSI_ERROR_VAL) 501 match = re.search('RSSI=.*', signal_poll_output) 502 if match: 503 temp_rssi = int(match.group(0).split('=')[1]) 504 if temp_rssi == -9999 or temp_rssi == 0: 505 connected_rssi['signal_poll_rssi']['data'].append( 506 RSSI_ERROR_VAL) 507 else: 508 connected_rssi['signal_poll_rssi']['data'].append(temp_rssi) 509 else: 510 connected_rssi['signal_poll_rssi']['data'].append(RSSI_ERROR_VAL) 511 match = re.search('AVG_RSSI=.*', signal_poll_output) 512 if match: 513 connected_rssi['signal_poll_avg_rssi']['data'].append( 514 int(match.group(0).split('=')[1])) 515 else: 516 connected_rssi['signal_poll_avg_rssi']['data'].append( 517 RSSI_ERROR_VAL) 518 # Get per chain RSSI 519 per_chain_rssi = dut.adb.shell(STATION_DUMP) 520 match = re.search('.*signal avg:.*', per_chain_rssi) 521 if match: 522 per_chain_rssi = per_chain_rssi[per_chain_rssi.find('[') + 523 1:per_chain_rssi.find(']')] 524 per_chain_rssi = per_chain_rssi.split(', ') 525 connected_rssi['chain_0_rssi']['data'].append( 526 int(per_chain_rssi[0])) 527 connected_rssi['chain_1_rssi']['data'].append( 528 int(per_chain_rssi[1])) 529 else: 530 connected_rssi['chain_0_rssi']['data'].append(RSSI_ERROR_VAL) 531 connected_rssi['chain_1_rssi']['data'].append(RSSI_ERROR_VAL) 532 measurement_elapsed_time = time.time() - measurement_start_time 533 time.sleep(max(0, polling_frequency - measurement_elapsed_time)) 534 535 # Compute mean RSSIs. Only average valid readings. 536 # Output RSSI_ERROR_VAL if no valid connected readings found. 537 for key, val in connected_rssi.copy().items(): 538 if 'data' not in val: 539 continue 540 filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)] 541 if filtered_rssi_values: 542 connected_rssi[key]['mean'] = statistics.mean(filtered_rssi_values) 543 if len(filtered_rssi_values) > 1: 544 connected_rssi[key]['stdev'] = statistics.stdev( 545 filtered_rssi_values) 546 else: 547 connected_rssi[key]['stdev'] = 0 548 else: 549 connected_rssi[key]['mean'] = RSSI_ERROR_VAL 550 connected_rssi[key]['stdev'] = RSSI_ERROR_VAL 551 return connected_rssi 552 553 554@nonblocking 555def get_connected_rssi_nb(dut, 556 num_measurements=1, 557 polling_frequency=SHORT_SLEEP, 558 first_measurement_delay=0): 559 return get_connected_rssi(dut, num_measurements, polling_frequency, 560 first_measurement_delay) 561 562 563def get_scan_rssi(dut, tracked_bssids, num_measurements=1): 564 """Gets scan RSSI for specified BSSIDs. 565 566 Args: 567 dut: android device object from which to get RSSI 568 tracked_bssids: array of BSSIDs to gather RSSI data for 569 num_measurements: number of scans done, and RSSIs collected 570 Returns: 571 scan_rssi: dict containing the measurement results as well as the 572 statistics of the scan RSSI for all BSSIDs in tracked_bssids 573 """ 574 scan_rssi = collections.OrderedDict() 575 for bssid in tracked_bssids: 576 scan_rssi[bssid] = empty_rssi_result() 577 for idx in range(num_measurements): 578 scan_output = dut.adb.shell(SCAN) 579 time.sleep(MED_SLEEP) 580 scan_output = dut.adb.shell(SCAN_RESULTS) 581 for bssid in tracked_bssids: 582 bssid_result = re.search( 583 bssid + '.*', scan_output, flags=re.IGNORECASE) 584 if bssid_result: 585 bssid_result = bssid_result.group(0).split('\t') 586 scan_rssi[bssid]['data'].append(int(bssid_result[2])) 587 else: 588 scan_rssi[bssid]['data'].append(RSSI_ERROR_VAL) 589 # Compute mean RSSIs. Only average valid readings. 590 # Output RSSI_ERROR_VAL if no readings found. 591 for key, val in scan_rssi.items(): 592 filtered_rssi_values = [x for x in val['data'] if not math.isnan(x)] 593 if filtered_rssi_values: 594 scan_rssi[key]['mean'] = statistics.mean(filtered_rssi_values) 595 if len(filtered_rssi_values) > 1: 596 scan_rssi[key]['stdev'] = statistics.stdev( 597 filtered_rssi_values) 598 else: 599 scan_rssi[key]['stdev'] = 0 600 else: 601 scan_rssi[key]['mean'] = RSSI_ERROR_VAL 602 scan_rssi[key]['stdev'] = RSSI_ERROR_VAL 603 return scan_rssi 604 605 606@nonblocking 607def get_scan_rssi_nb(dut, tracked_bssids, num_measurements=1): 608 return get_scan_rssi(dut, tracked_bssids, num_measurements) 609 610 611## Attenuator Utilities 612def atten_by_label(atten_list, path_label, atten_level): 613 """Attenuate signals according to their path label. 614 615 Args: 616 atten_list: list of attenuators to iterate over 617 path_label: path label on which to set desired attenuation 618 atten_level: attenuation desired on path 619 """ 620 for atten in atten_list: 621 if path_label in atten.path: 622 atten.set_atten(atten_level) 623 624 625def get_server_address(ssh_connection, subnet): 626 """Get server address on a specific subnet 627 628 Args: 629 ssh_connection: object representing server for which we want an ip 630 subnet: string in ip address format, i.e., xxx.xxx.xxx.xxx, 631 representing the subnet of interest. 632 """ 633 subnet_str = subnet.split('.')[:-1] 634 subnet_str = '.'.join(subnet_str) 635 cmd = "ifconfig | grep 'inet addr:{}'".format(subnet_str) 636 try: 637 if_output = ssh_connection.run(cmd).stdout 638 ip_line = if_output.split('inet addr:')[1] 639 ip_address = ip_line.split(' ')[0] 640 except: 641 logging.warning('Could not find ip in requested subnet.') 642 return ip_address 643