1# Copyright 2020 The Android Open Source Project 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); 4# you may not use this file except in compliance with the License. 5# You may obtain a copy of the License at 6# 7# http://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, 11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12# See the License for the specific language governing permissions and 13# limitations under the License. 14"""Utility functions for sensor_fusion hardware rig.""" 15 16 17import bisect 18import codecs 19import logging 20import math 21import os 22import struct 23import time 24 25import cv2 26from matplotlib import pylab 27import matplotlib.pyplot 28import numpy as np 29import scipy.spatial 30import serial 31from serial.tools import list_ports 32 33import camera_properties_utils 34import image_processing_utils 35 36# Constants for Rotation Rig 37ARDUINO_ANGLE_MAX = 180.0 # degrees 38ARDUINO_BAUDRATE = 9600 39ARDUINO_CMD_LENGTH = 3 40ARDUINO_CMD_TIME = 2.0 * ARDUINO_CMD_LENGTH / ARDUINO_BAUDRATE # round trip 41ARDUINO_PID = 0x0043 42ARDUINO_SERVO_SPEED_MAX = 255 43ARDUINO_SERVO_SPEED_MIN = 1 44ARDUINO_SPEED_START_BYTE = 253 45ARDUINO_START_BYTE = 255 46ARDUINO_START_NUM_TRYS = 5 47ARDUINO_START_TIMEOUT = 300 # seconds 48ARDUINO_STRING = 'Arduino' 49ARDUINO_TEST_CMD = (b'\x01', b'\x02', b'\x03') 50ARDUINO_VALID_CH = ('1', '2', '3', '4', '5', '6') 51ARDUINO_VIDS = (0x2341, 0x2a03) 52 53CANAKIT_BAUDRATE = 115200 54CANAKIT_CMD_TIME = 0.05 # seconds (found experimentally) 55CANAKIT_DATA_DELIMITER = '\r\n' 56CANAKIT_PID = 0xfc73 57CANAKIT_SEND_TIMEOUT = 0.02 # seconds 58CANAKIT_SET_CMD = 'REL' 59CANAKIT_SLEEP_TIME = 2 # seconds (for full 90 degree rotation) 60CANAKIT_VALID_CMD = ('ON', 'OFF') 61CANAKIT_VALID_CH = ('1', '2', '3', '4') 62CANAKIT_VID = 0x04d8 63 64HS755HB_ANGLE_MAX = 202.0 # throw for rotation motor in degrees 65 66# From test_sensor_fusion 67_FEATURE_MARGIN = 0.20 # Only take feature points from center 20% so that 68 # rotation measured has less rolling shutter effect. 69_FEATURE_PTS_MIN = 30 # Min number of feature pts to perform rotation analysis. 70# cv2.goodFeatures to track. 71# 'POSTMASK' is the measurement method in all previous versions of Android. 72# 'POSTMASK' finds best features on entire frame and then masks the features 73# to the vertical center FEATURE_MARGIN for the measurement. 74# 'PREMASK' is a new measurement that is used when FEATURE_PTS_MIN is not 75# found in frame. This finds the best 2*FEATURE_PTS_MIN in the FEATURE_MARGIN 76# part of the frame. 77_CV2_FEATURE_PARAMS_POSTMASK = dict(maxCorners=240, 78 qualityLevel=0.3, 79 minDistance=7, 80 blockSize=7) 81_CV2_FEATURE_PARAMS_PREMASK = dict(maxCorners=2*_FEATURE_PTS_MIN, 82 qualityLevel=0.3, 83 minDistance=7, 84 blockSize=7) 85_GYRO_SAMP_RATE_MIN = 100.0 # Samples/second: min gyro sample rate. 86_CV2_LK_PARAMS = dict(winSize=(15, 15), 87 maxLevel=2, 88 criteria=(cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT, 89 10, 0.03)) # cv2.calcOpticalFlowPyrLK params. 90_ROTATION_PER_FRAME_MIN = 0.001 # rads/s 91_GYRO_ROTATION_PER_SEC_MAX = 2.0 # rads/s 92_R_SQUARED_TOLERANCE = 0.01 # tolerance for polynomial fitting r^2 93_SHIFT_DOMAIN_RADIUS = 5 # limited domain centered around best shift 94 95# unittest constants 96_COARSE_FIT_RANGE = 20 # Range area around coarse fit to do optimization. 97_CORR_TIME_OFFSET_MAX = 50 # ms max shift to try and match camera/gyro times. 98_CORR_TIME_OFFSET_STEP = 0.5 # ms step for shifts. 99 100# Unit translators 101_MSEC_TO_NSEC = 1000000 102_NSEC_TO_SEC = 1E-9 103_SEC_TO_NSEC = int(1/_NSEC_TO_SEC) 104_RADS_TO_DEGS = 180/math.pi 105 106_NUM_GYRO_PTS_TO_AVG = 20 107 108 109def polynomial_from_coefficients(coefficients): 110 """Return a polynomial function from a coefficient list, highest power first. 111 112 Args: 113 coefficients: list of coefficients (float) 114 Returns: 115 Function in the form of a*x^n + b*x^(n - 1) + ... + constant 116 """ 117 def polynomial(x): 118 n = len(coefficients) 119 return sum(coefficients[i] * x ** (n - i - 1) for i in range(n)) 120 return polynomial 121 122 123def smallest_absolute_minimum_of_polynomial(coefficients): 124 """Return the smallest minimum by absolute value from a coefficient list. 125 126 Args: 127 coefficients: list of coefficients (float) 128 Returns: 129 Smallest local minimum (by absolute value) on the function (float) 130 """ 131 first_derivative = np.polyder(coefficients, m=1) 132 second_derivative = np.polyder(coefficients, m=2) 133 extrema = np.roots(first_derivative) 134 smallest_absolute_minimum = None 135 for extremum in extrema: 136 if np.polyval(second_derivative, extremum) > 0: 137 if smallest_absolute_minimum is None or abs(extremum) < abs( 138 smallest_absolute_minimum): 139 smallest_absolute_minimum = extremum 140 if smallest_absolute_minimum is None: 141 raise AssertionError( 142 f'No minima were found on function described by {coefficients}.') 143 return smallest_absolute_minimum 144 145 146def serial_port_def(name): 147 """Determine the serial port and open. 148 149 Args: 150 name: string of device to locate (ie. 'Arduino', 'Canakit' or 'Default') 151 Returns: 152 serial port object 153 """ 154 serial_port = None 155 devices = list_ports.comports() 156 for device in devices: 157 if not (device.vid and device.pid): # Not all comm ports have vid and pid 158 continue 159 if name.lower() == 'arduino': 160 if (device.vid in ARDUINO_VIDS and device.pid == ARDUINO_PID): 161 logging.debug('Arduino: %s', str(device)) 162 serial_port = device.device 163 return serial.Serial(serial_port, ARDUINO_BAUDRATE, timeout=1) 164 165 elif name.lower() in ('canakit', 'default'): 166 if (device.vid == CANAKIT_VID and device.pid == CANAKIT_PID): 167 logging.debug('Canakit: %s', str(device)) 168 serial_port = device.device 169 return serial.Serial(serial_port, CANAKIT_BAUDRATE, 170 timeout=CANAKIT_SEND_TIMEOUT, 171 parity=serial.PARITY_EVEN, 172 stopbits=serial.STOPBITS_ONE, 173 bytesize=serial.EIGHTBITS) 174 raise ValueError(f'{name} device not connected.') 175 176 177def canakit_cmd_send(canakit_serial_port, cmd_str): 178 """Wrapper for sending serial command to Canakit. 179 180 Args: 181 canakit_serial_port: port to write for canakit 182 cmd_str: str; value to send to device. 183 """ 184 try: 185 logging.debug('writing port...') 186 canakit_serial_port.write(CANAKIT_DATA_DELIMITER.encode()) 187 time.sleep(CANAKIT_CMD_TIME) # This is critical for relay. 188 canakit_serial_port.write(cmd_str.encode()) 189 190 except IOError as io_error: 191 raise IOError( 192 f'Port {CANAKIT_VID}:{CANAKIT_PID} is not open!') from io_error 193 194 195def canakit_set_relay_channel_state(canakit_port, ch, state): 196 """Set Canakit relay channel and state. 197 198 Waits CANAKIT_SLEEP_TIME for rotation to occur. 199 200 Args: 201 canakit_port: serial port object for the Canakit port. 202 ch: string for channel number of relay to set. '1', '2', '3', or '4' 203 state: string of either 'ON' or 'OFF' 204 """ 205 logging.debug('Setting relay state %s', state) 206 if ch in CANAKIT_VALID_CH and state in CANAKIT_VALID_CMD: 207 canakit_cmd_send(canakit_port, CANAKIT_SET_CMD + ch + '.' + state + '\r\n') 208 time.sleep(CANAKIT_SLEEP_TIME) 209 else: 210 logging.debug('Invalid ch (%s) or state (%s), no command sent.', ch, state) 211 212 213def arduino_read_cmd(port): 214 """Read back Arduino command from serial port.""" 215 cmd = [] 216 for _ in range(ARDUINO_CMD_LENGTH): 217 cmd.append(port.read()) 218 return cmd 219 220 221def arduino_send_cmd(port, cmd): 222 """Send command to serial port.""" 223 for i in range(ARDUINO_CMD_LENGTH): 224 port.write(cmd[i]) 225 226 227def arduino_loopback_cmd(port, cmd): 228 """Send command to serial port.""" 229 arduino_send_cmd(port, cmd) 230 time.sleep(ARDUINO_CMD_TIME) 231 return arduino_read_cmd(port) 232 233 234def establish_serial_comm(port): 235 """Establish connection with serial port.""" 236 logging.debug('Establishing communication with %s', port.name) 237 trys = 1 238 hex_test = convert_to_hex(ARDUINO_TEST_CMD) 239 logging.debug(' test tx: %s %s %s', hex_test[0], hex_test[1], hex_test[2]) 240 start = time.time() 241 while time.time() < start + ARDUINO_START_TIMEOUT: 242 try: 243 cmd_read = arduino_loopback_cmd(port, ARDUINO_TEST_CMD) 244 except serial.serialutil.SerialException as _: 245 logging.debug('Port in use, trying again...') 246 continue 247 hex_read = convert_to_hex(cmd_read) 248 logging.debug(' test rx: %s %s %s', hex_read[0], hex_read[1], hex_read[2]) 249 if cmd_read != list(ARDUINO_TEST_CMD): 250 trys += 1 251 else: 252 logging.debug(' Arduino comm established after %d try(s)', trys) 253 break 254 else: 255 raise AssertionError(f'Arduino comm not established after {trys} tries ' 256 f'and {ARDUINO_START_TIMEOUT} seconds') 257 258 259def convert_to_hex(cmd): 260 return [('%0.2x' % int(codecs.encode(x, 'hex_codec'), 16) if x else '--') 261 for x in cmd] 262 263 264def arduino_rotate_servo_to_angle(ch, angle, serial_port, move_time): 265 """Rotate servo to the specified angle. 266 267 Args: 268 ch: str; servo to rotate in ARDUINO_VALID_CH 269 angle: int; servo angle to move to 270 serial_port: object; serial port 271 move_time: int; time in seconds 272 """ 273 if angle < 0 or angle > ARDUINO_ANGLE_MAX: 274 logging.debug('Angle must be between 0 and %d.', ARDUINO_ANGLE_MAX) 275 angle = 0 276 if angle > ARDUINO_ANGLE_MAX: 277 angle = ARDUINO_ANGLE_MAX 278 279 cmd = [struct.pack('B', i) for i in [ARDUINO_START_BYTE, int(ch), angle]] 280 arduino_send_cmd(serial_port, cmd) 281 time.sleep(move_time) 282 283 284def arduino_rotate_servo(ch, angles, move_time, serial_port): 285 """Rotate servo through 'angles'. 286 287 Args: 288 ch: str; servo to rotate 289 angles: list of ints; servo angles to move to 290 move_time: int; time required to allow for arduino movement 291 serial_port: object; serial port 292 """ 293 294 for angle in angles: 295 angle_norm = int(round(angle*ARDUINO_ANGLE_MAX/HS755HB_ANGLE_MAX, 0)) 296 arduino_rotate_servo_to_angle(ch, angle_norm, serial_port, move_time) 297 298 299def rotation_rig(rotate_cntl, rotate_ch, num_rotations, angles, servo_speed, 300 move_time, arduino_serial_port): 301 """Rotate the phone n times using rotate_cntl and rotate_ch defined. 302 303 rotate_ch is hard wired and must be determined from physical setup. 304 If using Arduino, serial port must be initialized and communication must be 305 established before rotation. 306 307 Args: 308 rotate_cntl: str to identify as 'arduino' or 'canakit' controller. 309 rotate_ch: str to identify rotation channel number. 310 num_rotations: int number of rotations. 311 angles: list of ints; servo angle to move to. 312 servo_speed: int number of move speed between [1, 255]. 313 move_time: int time required to allow for arduino movement. 314 arduino_serial_port: optional initialized serial port object 315 """ 316 317 logging.debug('Controller: %s, ch: %s', rotate_cntl, rotate_ch) 318 if arduino_serial_port: 319 # initialize servo at origin 320 logging.debug('Moving servo to origin') 321 arduino_rotate_servo_to_angle(rotate_ch, 0, arduino_serial_port, 1) 322 323 # set servo speed 324 set_servo_speed(rotate_ch, servo_speed, arduino_serial_port, delay=0) 325 elif rotate_cntl.lower() == 'canakit': 326 canakit_serial_port = serial_port_def('Canakit') 327 else: 328 logging.info('No rotation rig defined. Manual test: rotate phone by hand.') 329 330 # rotate phone 331 logging.debug('Rotating phone %dx', num_rotations) 332 for _ in range(num_rotations): 333 if rotate_cntl == 'arduino': 334 arduino_rotate_servo(rotate_ch, angles, move_time, arduino_serial_port) 335 elif rotate_cntl == 'canakit': 336 canakit_set_relay_channel_state(canakit_serial_port, rotate_ch, 'ON') 337 canakit_set_relay_channel_state(canakit_serial_port, rotate_ch, 'OFF') 338 logging.debug('Finished rotations') 339 if rotate_cntl == 'arduino': 340 logging.debug('Moving servo to origin') 341 arduino_rotate_servo_to_angle(rotate_ch, 0, arduino_serial_port, 1) 342 343 344def set_servo_speed(ch, servo_speed, serial_port, delay=0): 345 """Set servo to specified speed. 346 347 Args: 348 ch: str; servo to turn on in ARDUINO_VALID_CH 349 servo_speed: int; value of speed between 1 and 255 350 serial_port: object; serial port 351 delay: int; time in seconds 352 """ 353 logging.debug('Servo speed: %d', servo_speed) 354 if servo_speed < ARDUINO_SERVO_SPEED_MIN: 355 logging.debug('Servo speed must be >= %d.', ARDUINO_SERVO_SPEED_MIN) 356 servo_speed = ARDUINO_SERVO_SPEED_MIN 357 elif servo_speed > ARDUINO_SERVO_SPEED_MAX: 358 logging.debug('Servo speed must be <= %d.', ARDUINO_SERVO_SPEED_MAX) 359 servo_speed = ARDUINO_SERVO_SPEED_MAX 360 361 cmd = [struct.pack('B', i) for i in [ARDUINO_SPEED_START_BYTE, 362 int(ch), servo_speed]] 363 arduino_send_cmd(serial_port, cmd) 364 time.sleep(delay) 365 366 367def calc_max_rotation_angle(rotations, sensor_type): 368 """Calculates the max angle of deflection from rotations. 369 370 Args: 371 rotations: numpy array of rotation per event 372 sensor_type: string 'Camera' or 'Gyro' 373 374 Returns: 375 maximum angle of rotation for the given rotations 376 """ 377 rotations *= _RADS_TO_DEGS 378 rotations_sum = np.cumsum(rotations) 379 rotation_max = max(rotations_sum) 380 rotation_min = min(rotations_sum) 381 logging.debug('%s min: %.2f, max %.2f rotation (degrees)', 382 sensor_type, rotation_min, rotation_max) 383 logging.debug('%s max rotation: %.2f degrees', 384 sensor_type, (rotation_max-rotation_min)) 385 return rotation_max-rotation_min 386 387 388def get_gyro_rotations(gyro_events, cam_times): 389 """Get the rotation values of the gyro. 390 391 Integrates the gyro data between each camera frame to compute an angular 392 displacement. 393 394 Args: 395 gyro_events: List of gyro event objects. 396 cam_times: Array of N camera times, one for each frame. 397 398 Returns: 399 Array of N-1 gyro rotation measurements (rads/s). 400 """ 401 gyro_times = np.array([e['time'] for e in gyro_events]) 402 all_gyro_rots = np.array([e['z'] for e in gyro_events]) 403 gyro_rots = [] 404 if gyro_times[0] > cam_times[0] or gyro_times[-1] < cam_times[-1]: 405 raise AssertionError('Gyro times do not bound camera times! ' 406 f'gyro: {gyro_times[0]:.0f} -> {gyro_times[-1]:.0f} ' 407 f'cam: {cam_times[0]} -> {cam_times[-1]} (ns).') 408 409 # Integrate the gyro data between each pair of camera frame times. 410 for i_cam in range(len(cam_times)-1): 411 # Get the window of gyro samples within the current pair of frames. 412 # Note: bisect always picks first gyro index after camera time. 413 t_cam0 = cam_times[i_cam] 414 t_cam1 = cam_times[i_cam+1] 415 i_gyro_window0 = bisect.bisect(gyro_times, t_cam0) 416 i_gyro_window1 = bisect.bisect(gyro_times, t_cam1) 417 gyro_sum = 0 418 419 # Integrate samples within the window. 420 for i_gyro in range(i_gyro_window0, i_gyro_window1): 421 gyro_val = all_gyro_rots[i_gyro+1] 422 t_gyro0 = gyro_times[i_gyro] 423 t_gyro1 = gyro_times[i_gyro+1] 424 t_gyro_delta = (t_gyro1 - t_gyro0) * _NSEC_TO_SEC 425 gyro_sum += gyro_val * t_gyro_delta 426 427 # Handle the fractional intervals at the sides of the window. 428 for side, i_gyro in enumerate([i_gyro_window0-1, i_gyro_window1]): 429 gyro_val = all_gyro_rots[i_gyro+1] 430 t_gyro0 = gyro_times[i_gyro] 431 t_gyro1 = gyro_times[i_gyro+1] 432 t_gyro_delta = (t_gyro1 - t_gyro0) * _NSEC_TO_SEC 433 if side == 0: 434 f = (t_cam0 - t_gyro0) / (t_gyro1 - t_gyro0) 435 frac_correction = gyro_val * t_gyro_delta * (1.0 - f) 436 gyro_sum += frac_correction 437 else: 438 f = (t_cam1 - t_gyro0) / (t_gyro1 - t_gyro0) 439 frac_correction = gyro_val * t_gyro_delta * f 440 gyro_sum += frac_correction 441 gyro_rots.append(gyro_sum) 442 gyro_rots = np.array(gyro_rots) 443 return gyro_rots 444 445 446def procrustes_rotation(x, y): 447 """Performs a Procrustes analysis to conform points in x to y. 448 449 Procrustes analysis determines a linear transformation (translation, 450 reflection, orthogonal rotation and scaling) of the points in y to best 451 conform them to the points in matrix x, using the sum of squared errors 452 as the metric for fit criterion. 453 454 Args: 455 x: Target coordinate matrix 456 y: Input coordinate matrix 457 458 Returns: 459 The rotation component of the transformation that maps x to y. 460 """ 461 x0 = (x-x.mean(0)) / np.sqrt(((x-x.mean(0))**2.0).sum()) 462 y0 = (y-y.mean(0)) / np.sqrt(((y-y.mean(0))**2.0).sum()) 463 u, _, vt = np.linalg.svd(np.dot(x0.T, y0), full_matrices=False) 464 return np.dot(vt.T, u.T) 465 466 467def get_cam_rotations(frames, facing, h, file_name_stem, 468 start_frame, stabilized_video=False): 469 """Get the rotations of the camera between each pair of frames. 470 471 Takes N frames and returns N-1 angular displacements corresponding to the 472 rotations between adjacent pairs of frames, in radians. 473 Only takes feature points from center so that rotation measured has less 474 rolling shutter effect. 475 Requires FEATURE_PTS_MIN to have enough data points for accurate measurements. 476 Uses FEATURE_PARAMS for cv2 to identify features in checkerboard images. 477 Ensures camera rotates enough if not calling with stabilized video. 478 479 Args: 480 frames: List of N images (as RGB numpy arrays). 481 facing: Direction camera is facing. 482 h: Pixel height of each frame. 483 file_name_stem: file name stem including location for data. 484 start_frame: int; index to start at 485 stabilized_video: Boolean; if called with stabilized video 486 487 Returns: 488 numpy array of N-1 camera rotation measurements (rad). 489 """ 490 gframes = [] 491 for frame in frames: 492 frame = (frame * 255.0).astype(np.uint8) # cv2 uses [0, 255] 493 gframes.append(cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)) 494 num_frames = len(gframes) 495 logging.debug('num_frames: %d', num_frames) 496 # create mask 497 ymin = int(h * (1 - _FEATURE_MARGIN) / 2) 498 ymax = int(h * (1 + _FEATURE_MARGIN) / 2) 499 pre_mask = np.zeros_like(gframes[0]) 500 pre_mask[ymin:ymax, :] = 255 501 502 for masking in ['post', 'pre']: # Do post-masking (original) method 1st 503 logging.debug('Using %s masking method', masking) 504 rotations = [] 505 for i in range(1, num_frames): 506 j = i - 1 507 gframe0 = gframes[j] 508 gframe1 = gframes[i] 509 if masking == 'post': 510 p0 = cv2.goodFeaturesToTrack( 511 gframe0, mask=None, **_CV2_FEATURE_PARAMS_POSTMASK) 512 post_mask = (p0[:, 0, 1] >= ymin) & (p0[:, 0, 1] <= ymax) 513 p0_filtered = p0[post_mask] 514 else: 515 p0_filtered = cv2.goodFeaturesToTrack( 516 gframe0, mask=pre_mask, **_CV2_FEATURE_PARAMS_PREMASK) 517 num_features = len(p0_filtered) 518 if num_features < _FEATURE_PTS_MIN: 519 for pt in np.rint(p0_filtered).astype(int): 520 x, y = pt[0][0], pt[0][1] 521 cv2.circle(frames[j], (x, y), 3, (100, 255, 255), -1) 522 image_processing_utils.write_image( 523 frames[j], f'{file_name_stem}_features{j+start_frame:03d}.png') 524 msg = (f'Not enough features in frame {j+start_frame}. Need at least ' 525 f'{_FEATURE_PTS_MIN} features, got {num_features}.') 526 if masking == 'pre': 527 raise AssertionError(msg) 528 else: 529 logging.debug(msg) 530 break 531 else: 532 logging.debug('Number of features in frame %s is %d', 533 str(j+start_frame).zfill(3), num_features) 534 p1, st, _ = cv2.calcOpticalFlowPyrLK(gframe0, gframe1, p0_filtered, None, 535 **_CV2_LK_PARAMS) 536 tform = procrustes_rotation(p0_filtered[st == 1], p1[st == 1]) 537 if facing == camera_properties_utils.LENS_FACING_BACK: 538 rotation = -math.atan2(tform[0, 1], tform[0, 0]) 539 elif facing == camera_properties_utils.LENS_FACING_FRONT: 540 rotation = math.atan2(tform[0, 1], tform[0, 0]) 541 else: 542 raise AssertionError(f'Unknown lens facing: {facing}.') 543 rotations.append(rotation) 544 if i == 1: 545 # Save debug visualization of features that are being 546 # tracked in the first frame. 547 frame = frames[j] 548 for x, y in np.rint(p0_filtered[st == 1]).astype(int): 549 cv2.circle(frame, (x, y), 3, (100, 255, 255), -1) 550 image_processing_utils.write_image( 551 frame, f'{file_name_stem}_features{j+start_frame:03d}.png') 552 if i == num_frames-1: 553 logging.debug('Correct num of frames found: %d', i) 554 break # exit if enough features in all frames 555 if i != num_frames-1: 556 raise AssertionError('Neither method found enough features in all frames') 557 558 rotations = np.array(rotations) 559 rot_per_frame_max = max(abs(rotations)) 560 logging.debug('Max rotation in frame: %.2f degrees', 561 rot_per_frame_max*_RADS_TO_DEGS) 562 if rot_per_frame_max < _ROTATION_PER_FRAME_MIN and not stabilized_video: 563 logging.debug('Checking camera rotations on video.') 564 raise AssertionError(f'Device not moved enough: {rot_per_frame_max:.3f} ' 565 f'movement. THRESH: {_ROTATION_PER_FRAME_MIN} rads.') 566 else: 567 logging.debug('Skipped camera rotation check due to stabilized video.') 568 return rotations 569 570 571def get_best_alignment_offset(cam_times, cam_rots, gyro_events, degree=2): 572 """Find the best offset to align the camera and gyro motion traces. 573 574 This function integrates the shifted gyro data between camera samples 575 for a range of candidate shift values, and returns the shift that 576 result in the best correlation. 577 578 Uses a correlation distance metric between the curves, where a smaller 579 value means that the curves are better-correlated. 580 581 Fits a curve to the correlation distance data to measure the minima more 582 accurately, by looking at the correlation distances within a range of 583 +/- 10ms from the measured best score; note that this will use fewer 584 than the full +/- 10 range for the curve fit if the measured score 585 (which is used as the center of the fit) is within 10ms of the edge of 586 the +/- 50ms candidate range. 587 588 Args: 589 cam_times: Array of N camera times, one for each frame. 590 cam_rots: Array of N-1 camera rotation displacements (rad). 591 gyro_events: List of gyro event objects. 592 degree: Degree of polynomial 593 594 Returns: 595 Best alignment offset(ms), fit coefficients, candidates, and distances. 596 """ 597 # Measure the correlation distance over defined shift 598 shift_candidates = np.arange(-_CORR_TIME_OFFSET_MAX, 599 _CORR_TIME_OFFSET_MAX+_CORR_TIME_OFFSET_STEP, 600 _CORR_TIME_OFFSET_STEP).tolist() 601 spatial_distances = [] 602 for shift in shift_candidates: 603 shifted_cam_times = cam_times + shift*_MSEC_TO_NSEC 604 gyro_rots = get_gyro_rotations(gyro_events, shifted_cam_times) 605 spatial_distance = scipy.spatial.distance.correlation(cam_rots, gyro_rots) 606 logging.debug('shift %.1fms spatial distance: %.5f', shift, 607 spatial_distance) 608 spatial_distances.append(spatial_distance) 609 610 best_corr_dist = min(spatial_distances) 611 coarse_best_shift = shift_candidates[spatial_distances.index(best_corr_dist)] 612 logging.debug('Best shift without fitting is %.4f ms', coarse_best_shift) 613 614 # Fit a polynomial around coarse_best_shift to extract best fit 615 i = spatial_distances.index(best_corr_dist) 616 i_poly_fit_min = i - _COARSE_FIT_RANGE 617 i_poly_fit_max = i + _COARSE_FIT_RANGE + 1 618 shift_candidates = shift_candidates[i_poly_fit_min:i_poly_fit_max] 619 spatial_distances = spatial_distances[i_poly_fit_min:i_poly_fit_max] 620 logging.debug('Polynomial degree: %d', degree) 621 fit_coeffs, residuals, _, _, _ = np.polyfit( 622 shift_candidates, spatial_distances, degree, full=True 623 ) 624 logging.debug('Fit coefficients: %s', fit_coeffs) 625 logging.debug('Residuals: %s', residuals) 626 total_sum_of_squares = np.sum( 627 (spatial_distances - np.mean(spatial_distances)) ** 2 628 ) 629 # Calculate r-squared on the entire domain for debugging 630 r_squared = 1 - residuals[0] / total_sum_of_squares 631 logging.debug('r^2 on the entire domain: %f', r_squared) 632 633 # Calculate r-squared near the best shift 634 domain_around_best_shift = [coarse_best_shift - _SHIFT_DOMAIN_RADIUS, 635 coarse_best_shift + _SHIFT_DOMAIN_RADIUS] 636 logging.debug('Calculating r^2 on the limited domain of [%f, %f]', 637 domain_around_best_shift[0], domain_around_best_shift[1]) 638 small_shifts_and_distances = [ 639 (x, y) 640 for x, y in zip(shift_candidates, spatial_distances) 641 if domain_around_best_shift[0] <= x <= domain_around_best_shift[1] 642 ] 643 small_shift_candidates, small_spatial_distances = zip( 644 *small_shifts_and_distances 645 ) 646 logging.debug('Shift candidates on limited domain: %s', 647 small_shift_candidates) 648 logging.debug('Spatial distances on limited domain: %s', 649 small_spatial_distances) 650 limited_residuals = np.sum( 651 (np.polyval(fit_coeffs, small_shift_candidates) - small_spatial_distances) 652 ** 2 653 ) 654 logging.debug('Residuals on limited domain: %s', limited_residuals) 655 limited_total_sum_of_squares = np.sum( 656 (small_spatial_distances - np.mean(small_spatial_distances)) ** 2 657 ) 658 limited_r_squared = 1 - limited_residuals / limited_total_sum_of_squares 659 logging.debug('r^2 on limited domain: %f', limited_r_squared) 660 661 # Calculate exact_best_shift (x where y is minimum of parabola) 662 exact_best_shift = smallest_absolute_minimum_of_polynomial(fit_coeffs) 663 664 if abs(coarse_best_shift - exact_best_shift) > 2.0: 665 raise AssertionError( 666 f'Test failed. Bad fit to time-shift curve. Coarse best shift: ' 667 f'{coarse_best_shift}, Exact best shift: {exact_best_shift}.') 668 669 # Check fit of polynomial near the best shift 670 if not math.isclose(limited_r_squared, 1, abs_tol=_R_SQUARED_TOLERANCE): 671 logging.debug('r-squared on domain [%f, %f] was %f, expected 1.0, ' 672 'ATOL: %f', 673 domain_around_best_shift[0], domain_around_best_shift[1], 674 limited_r_squared, _R_SQUARED_TOLERANCE) 675 return None 676 677 return exact_best_shift, fit_coeffs, shift_candidates, spatial_distances 678 679 680def plot_camera_rotations(cam_rots, start_frame, video_quality, 681 plot_name_stem): 682 """Plot the camera rotations. 683 684 Args: 685 cam_rots: np array of camera rotations angle per frame 686 start_frame: int value of start frame 687 video_quality: str for video quality identifier 688 plot_name_stem: str (with path) of what to call plot 689 """ 690 691 pylab.figure(video_quality) 692 frames = range(start_frame, len(cam_rots)+start_frame) 693 pylab.title(f'Camera rotation vs frame {video_quality}') 694 pylab.plot(frames, cam_rots*_RADS_TO_DEGS, '-ro', label='x') 695 pylab.xlabel('frame #') 696 pylab.ylabel('camera rotation (degrees)') 697 matplotlib.pyplot.savefig(f'{plot_name_stem}_cam_rots.png') 698 pylab.close(video_quality) 699 700 701def plot_gyro_events(gyro_events, plot_name, log_path): 702 """Plot x, y, and z on the gyro events. 703 704 Samples are grouped into NUM_GYRO_PTS_TO_AVG groups and averaged to minimize 705 random spikes in data. 706 707 Args: 708 gyro_events: List of gyroscope events. 709 plot_name: name of plot(s). 710 log_path: location to save data. 711 """ 712 713 nevents = (len(gyro_events) // _NUM_GYRO_PTS_TO_AVG) * _NUM_GYRO_PTS_TO_AVG 714 gyro_events = gyro_events[:nevents] 715 times = np.array([(e['time'] - gyro_events[0]['time']) * _NSEC_TO_SEC 716 for e in gyro_events]) 717 x = np.array([e['x'] for e in gyro_events]) 718 y = np.array([e['y'] for e in gyro_events]) 719 z = np.array([e['z'] for e in gyro_events]) 720 721 # Group samples into size-N groups & average each together to minimize random 722 # spikes in data. 723 times = times[_NUM_GYRO_PTS_TO_AVG//2::_NUM_GYRO_PTS_TO_AVG] 724 x = x.reshape(nevents//_NUM_GYRO_PTS_TO_AVG, _NUM_GYRO_PTS_TO_AVG).mean(1) 725 y = y.reshape(nevents//_NUM_GYRO_PTS_TO_AVG, _NUM_GYRO_PTS_TO_AVG).mean(1) 726 z = z.reshape(nevents//_NUM_GYRO_PTS_TO_AVG, _NUM_GYRO_PTS_TO_AVG).mean(1) 727 728 pylab.figure(plot_name) 729 # x & y on same axes 730 pylab.subplot(2, 1, 1) 731 pylab.title(f'{plot_name}(mean of {_NUM_GYRO_PTS_TO_AVG} pts)') 732 pylab.plot(times, x, 'r', label='x') 733 pylab.plot(times, y, 'g', label='y') 734 pylab.ylim([np.amin(z), np.amax(z)]) 735 pylab.ylabel('gyro x,y movement (rads/s)') 736 pylab.legend() 737 738 # z on separate axes 739 pylab.subplot(2, 1, 2) 740 pylab.plot(times, z, 'b', label='z') 741 pylab.xlabel('time (seconds)') 742 pylab.ylabel('gyro z movement (rads/s)') 743 pylab.legend() 744 file_name = os.path.join(log_path, plot_name) 745 matplotlib.pyplot.savefig(f'{file_name}_gyro_events.png') 746 pylab.close(plot_name) 747 748 z_max = max(abs(z)) 749 logging.debug('z_max: %.3f', z_max) 750 if z_max > _GYRO_ROTATION_PER_SEC_MAX: 751 raise AssertionError( 752 f'Phone moved too rapidly! Please confirm controller firmware. ' 753 f'Max: {z_max:.3f}, TOL: {_GYRO_ROTATION_PER_SEC_MAX} rads/s') 754 755 756def conv_acceleration_to_movement(gyro_events, video_delay_time): 757 """Convert gyro_events time and speed to movement during video time. 758 759 Args: 760 gyro_events: sorted dict of entries with 'time', 'x', 'y', and 'z' 761 video_delay_time: time at which video starts (and the video's duration) 762 763 Returns: 764 'z' acceleration converted to movement for times around VIDEO playing. 765 """ 766 gyro_times = np.array([e['time'] for e in gyro_events]) 767 gyro_speed = np.array([e['z'] for e in gyro_events]) 768 gyro_time_min = gyro_times[0] 769 logging.debug('gyro start time: %dns', gyro_time_min) 770 logging.debug('gyro stop time: %dns', gyro_times[-1]) 771 gyro_rotations = [] 772 video_time_start = gyro_time_min + video_delay_time *_SEC_TO_NSEC 773 video_time_stop = video_time_start + video_delay_time *_SEC_TO_NSEC 774 logging.debug('video start time: %dns', video_time_start) 775 logging.debug('video stop time: %dns', video_time_stop) 776 777 for i, t in enumerate(gyro_times): 778 if video_time_start <= t <= video_time_stop: 779 gyro_rotations.append((gyro_times[i]-gyro_times[i-1])/_SEC_TO_NSEC * 780 gyro_speed[i]) 781 return np.array(gyro_rotations) 782