• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#      http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14"""Utility functions for sensor_fusion hardware rig."""
15
16
17import bisect
18import codecs
19import logging
20import math
21import os
22import struct
23import time
24
25import cv2
26import matplotlib
27from matplotlib import pyplot as plt
28import numpy as np
29import scipy.spatial
30import serial
31from serial.tools import list_ports
32
33import camera_properties_utils
34import image_processing_utils
35
36matplotlib.use('agg')  # Must be executed before any figure is created.
37
38# Constants for Rotation Rig
39ARDUINO_ANGLE_MAX = 180.0  # degrees
40ARDUINO_ANGLES_SENSOR_FUSION = (0, 90)  # degrees
41ARDUINO_ANGLES_STABILIZATION = (10, 25)  # degrees
42ARDUINO_BAUDRATE = 9600
43ARDUINO_CMD_LENGTH = 3
44ARDUINO_CMD_TIME = 2.0 * ARDUINO_CMD_LENGTH / ARDUINO_BAUDRATE  # round trip
45ARDUINO_MOVE_TIME_SENSOR_FUSION = 2  # seconds
46ARDUINO_MOVE_TIME_STABILIZATION = 0.3  # seconds
47ARDUINO_PID = 0x0043
48ARDUINO_SERVO_SPEED_MAX = 255
49ARDUINO_SERVO_SPEED_MIN = 1
50ARDUINO_SERVO_SPEED_SENSOR_FUSION = 20
51ARDUINO_SERVO_SPEED_STABILIZATION = 10
52ARDUINO_SERVO_SPEED_STABILIZATION_TABLET = 20
53ARDUINO_SPEED_START_BYTE = 253
54ARDUINO_START_BYTE = 255
55ARDUINO_START_NUM_TRYS = 5
56ARDUINO_START_TIMEOUT = 300  # seconds
57ARDUINO_STRING = 'Arduino'
58ARDUINO_TEST_CMD = (b'\x01', b'\x02', b'\x03')
59ARDUINO_VALID_CH = ('1', '2', '3', '4', '5', '6')
60ARDUINO_VIDS = (0x2341, 0x2a03)
61
62CANAKIT_BAUDRATE = 115200
63CANAKIT_CMD_TIME = 0.05  # seconds (found experimentally)
64CANAKIT_DATA_DELIMITER = '\r\n'
65CANAKIT_PID = 0xfc73
66CANAKIT_SEND_TIMEOUT = 0.02  # seconds
67CANAKIT_SET_CMD = 'REL'
68CANAKIT_SLEEP_TIME = 2  # seconds (for full 90 degree rotation)
69CANAKIT_VALID_CMD = ('ON', 'OFF')
70CANAKIT_VALID_CH = ('1', '2', '3', '4')
71CANAKIT_VID = 0x04d8
72
73HS755HB_ANGLE_MAX = 202.0  # throw for rotation motor in degrees
74
75# From test_sensor_fusion
76_FEATURE_MARGIN = 0.20  # Only take feature points from center 20% so that
77                        # rotation measured has less rolling shutter effect.
78_FEATURE_PTS_MIN = 30  # Min number of feature pts to perform rotation analysis.
79# cv2.goodFeatures to track.
80# 'POSTMASK' is the measurement method in all previous versions of Android.
81# 'POSTMASK' finds best features on entire frame and then masks the features
82# to the vertical center FEATURE_MARGIN for the measurement.
83# 'PREMASK' is a new measurement that is used when FEATURE_PTS_MIN is not
84# found in frame. This finds the best 2*FEATURE_PTS_MIN in the FEATURE_MARGIN
85# part of the frame.
86_CV2_FEATURE_PARAMS_POSTMASK = dict(maxCorners=240,
87                                    qualityLevel=0.3,
88                                    minDistance=7,
89                                    blockSize=7)
90_CV2_FEATURE_PARAMS_PREMASK = dict(maxCorners=2*_FEATURE_PTS_MIN,
91                                   qualityLevel=0.3,
92                                   minDistance=7,
93                                   blockSize=7)
94_GYRO_SAMP_RATE_MIN = 100.0  # Samples/second: min gyro sample rate.
95_CV2_LK_PARAMS = dict(winSize=(15, 15),
96                      maxLevel=2,
97                      criteria=(cv2.TERM_CRITERIA_EPS | cv2.TERM_CRITERIA_COUNT,
98                                10, 0.03))  # cv2.calcOpticalFlowPyrLK params.
99_ROTATION_PER_FRAME_MIN = 0.001  # rads/s
100_GYRO_ROTATION_PER_SEC_MAX = 2.0  # rads/s
101_R_SQUARED_TOLERANCE = 0.01  # tolerance for polynomial fitting r^2
102_SHIFT_DOMAIN_RADIUS = 5  # limited domain centered around best shift
103
104# unittest constants
105_COARSE_FIT_RANGE = 20  # Range area around coarse fit to do optimization.
106_CORR_TIME_OFFSET_MAX = 50  # ms max shift to try and match camera/gyro times.
107_CORR_TIME_OFFSET_STEP = 0.5  # ms step for shifts.
108
109# Unit translators
110_MSEC_TO_NSEC = 1000000
111_NSEC_TO_SEC = 1E-9
112_SEC_TO_NSEC = int(1/_NSEC_TO_SEC)
113_RADS_TO_DEGS = 180/math.pi
114
115_NUM_GYRO_PTS_TO_AVG = 20
116
117_PRE_MASKING = 'pre'
118_POST_MASKING = 'post'
119
120
121def polynomial_from_coefficients(coefficients):
122  """Return a polynomial function from a coefficient list, highest power first.
123
124  Args:
125    coefficients: list of coefficients (float)
126  Returns:
127    Function in the form of a*x^n + b*x^(n - 1) + ... + constant
128  """
129  def polynomial(x):
130    n = len(coefficients)
131    return sum(coefficients[i] * x ** (n - i - 1) for i in range(n))
132  return polynomial
133
134
135def smallest_absolute_minimum_of_polynomial(coefficients):
136  """Return the smallest minimum by absolute value from a coefficient list.
137
138  Args:
139    coefficients: list of coefficients (float)
140  Returns:
141    Smallest local minimum (by absolute value) on the function (float)
142  """
143  first_derivative = np.polyder(coefficients, m=1)
144  second_derivative = np.polyder(coefficients, m=2)
145  extrema = np.roots(first_derivative)
146  smallest_absolute_minimum = None
147  for extremum in extrema:
148    if np.polyval(second_derivative, extremum) > 0:
149      if smallest_absolute_minimum is None or abs(extremum) < abs(
150          smallest_absolute_minimum):
151        smallest_absolute_minimum = extremum
152  if smallest_absolute_minimum is None:
153    raise AssertionError(
154        f'No minima were found on function described by {coefficients}.')
155  return smallest_absolute_minimum
156
157
158def serial_port_def(name):
159  """Determine the serial port and open.
160
161  Args:
162    name: string of device to locate (ie. 'Arduino', 'Canakit' or 'Default')
163  Returns:
164    serial port object
165  """
166  serial_port = None
167  devices = list_ports.comports()
168  for device in devices:
169    if not (device.vid and device.pid):  # Not all comm ports have vid and pid
170      continue
171    if name.lower() == 'arduino':
172      if (device.vid in ARDUINO_VIDS and device.pid == ARDUINO_PID):
173        logging.debug('Arduino: %s', str(device))
174        serial_port = device.device
175        return serial.Serial(serial_port, ARDUINO_BAUDRATE, timeout=1)
176
177    elif name.lower() in ('canakit', 'default'):
178      if (device.vid == CANAKIT_VID and device.pid == CANAKIT_PID):
179        logging.debug('Canakit: %s', str(device))
180        serial_port = device.device
181        return serial.Serial(serial_port, CANAKIT_BAUDRATE,
182                             timeout=CANAKIT_SEND_TIMEOUT,
183                             parity=serial.PARITY_EVEN,
184                             stopbits=serial.STOPBITS_ONE,
185                             bytesize=serial.EIGHTBITS)
186  raise ValueError(f'{name} device not connected.')
187
188
189def canakit_cmd_send(canakit_serial_port, cmd_str):
190  """Wrapper for sending serial command to Canakit.
191
192  Args:
193    canakit_serial_port: port to write for canakit
194    cmd_str: str; value to send to device.
195  """
196  try:
197    logging.debug('writing port...')
198    canakit_serial_port.write(CANAKIT_DATA_DELIMITER.encode())
199    time.sleep(CANAKIT_CMD_TIME)  # This is critical for relay.
200    canakit_serial_port.write(cmd_str.encode())
201
202  except IOError as io_error:
203    raise IOError(
204        f'Port {CANAKIT_VID}:{CANAKIT_PID} is not open!') from io_error
205
206
207def canakit_set_relay_channel_state(canakit_port, ch, state):
208  """Set Canakit relay channel and state.
209
210  Waits CANAKIT_SLEEP_TIME for rotation to occur.
211
212  Args:
213    canakit_port: serial port object for the Canakit port.
214    ch: string for channel number of relay to set. '1', '2', '3', or '4'
215    state: string of either 'ON' or 'OFF'
216  """
217  logging.debug('Setting relay state %s', state)
218  if ch in CANAKIT_VALID_CH and state in CANAKIT_VALID_CMD:
219    canakit_cmd_send(canakit_port, CANAKIT_SET_CMD + ch + '.' + state + '\r\n')
220    time.sleep(CANAKIT_SLEEP_TIME)
221  else:
222    logging.debug('Invalid ch (%s) or state (%s), no command sent.', ch, state)
223
224
225def arduino_read_cmd(port):
226  """Read back Arduino command from serial port."""
227  cmd = []
228  for _ in range(ARDUINO_CMD_LENGTH):
229    cmd.append(port.read())
230  return cmd
231
232
233def arduino_send_cmd(port, cmd):
234  """Send command to serial port."""
235  for i in range(ARDUINO_CMD_LENGTH):
236    port.write(cmd[i])
237
238
239def arduino_loopback_cmd(port, cmd):
240  """Send command to serial port."""
241  arduino_send_cmd(port, cmd)
242  time.sleep(ARDUINO_CMD_TIME)
243  return arduino_read_cmd(port)
244
245
246def establish_serial_comm(port):
247  """Establish connection with serial port."""
248  logging.debug('Establishing communication with %s', port.name)
249  trys = 1
250  hex_test = convert_to_hex(ARDUINO_TEST_CMD)
251  logging.debug(' test tx: %s %s %s', hex_test[0], hex_test[1], hex_test[2])
252  start = time.time()
253  while time.time() < start + ARDUINO_START_TIMEOUT:
254    try:
255      cmd_read = arduino_loopback_cmd(port, ARDUINO_TEST_CMD)
256    except serial.serialutil.SerialException as _:
257      logging.debug('Port in use, trying again...')
258      continue
259    hex_read = convert_to_hex(cmd_read)
260    logging.debug(' test rx: %s %s %s', hex_read[0], hex_read[1], hex_read[2])
261    if cmd_read != list(ARDUINO_TEST_CMD):
262      trys += 1
263    else:
264      logging.debug(' Arduino comm established after %d try(s)', trys)
265      break
266  else:
267    raise AssertionError(f'Arduino comm not established after {trys} tries '
268                         f'and {ARDUINO_START_TIMEOUT} seconds')
269
270
271def convert_to_hex(cmd):
272  return [('%0.2x' % int(codecs.encode(x, 'hex_codec'), 16) if x else '--')
273          for x in cmd]
274
275
276def arduino_rotate_servo_to_angle(ch, angle, serial_port, move_time):
277  """Rotate servo to the specified angle.
278
279  Args:
280    ch: str; servo to rotate in ARDUINO_VALID_CH
281    angle: int; servo angle to move to
282    serial_port: object; serial port
283    move_time: int; time in seconds
284  """
285  if angle < 0 or angle > ARDUINO_ANGLE_MAX:
286    logging.debug('Angle must be between 0 and %d.', ARDUINO_ANGLE_MAX)
287    angle = 0
288    if angle > ARDUINO_ANGLE_MAX:
289      angle = ARDUINO_ANGLE_MAX
290
291  cmd = [struct.pack('B', i) for i in [ARDUINO_START_BYTE, int(ch), angle]]
292  arduino_send_cmd(serial_port, cmd)
293  time.sleep(move_time)
294
295
296def arduino_rotate_servo(ch, angles, move_time, serial_port):
297  """Rotate servo through 'angles'.
298
299  Args:
300    ch: str; servo to rotate
301    angles: list of ints; servo angles to move to
302    move_time: int; time required to allow for arduino movement
303    serial_port: object; serial port
304  """
305
306  for angle in angles:
307    angle_norm = int(round(angle*ARDUINO_ANGLE_MAX/HS755HB_ANGLE_MAX, 0))
308    arduino_rotate_servo_to_angle(ch, angle_norm, serial_port, move_time)
309
310
311def rotation_rig(rotate_cntl, rotate_ch, num_rotations, angles, servo_speed,
312                 move_time, arduino_serial_port):
313  """Rotate the phone n times using rotate_cntl and rotate_ch defined.
314
315  rotate_ch is hard wired and must be determined from physical setup.
316  If using Arduino, serial port must be initialized and communication must be
317  established before rotation.
318
319  Args:
320    rotate_cntl: str to identify 'arduino', 'canakit' or 'external' controller.
321    rotate_ch: str to identify rotation channel number.
322    num_rotations: int number of rotations.
323    angles: list of ints; servo angle to move to.
324    servo_speed: int number of move speed between [1, 255].
325    move_time: int time required to allow for arduino movement.
326    arduino_serial_port: optional initialized serial port object
327  """
328
329  logging.debug('Controller: %s, ch: %s', rotate_cntl, rotate_ch)
330  if arduino_serial_port:
331    # initialize servo at origin
332    logging.debug('Moving servo to origin')
333    arduino_rotate_servo_to_angle(rotate_ch, 0, arduino_serial_port, 1)
334
335    # set servo speed
336    set_servo_speed(rotate_ch, servo_speed, arduino_serial_port, delay=0)
337  elif rotate_cntl.lower() == 'canakit':
338    canakit_serial_port = serial_port_def('Canakit')
339  elif rotate_cntl.lower() == 'external':
340    logging.info('External rotation control.')
341  else:
342    logging.info('No rotation rig defined. Manual test: rotate phone by hand.')
343
344  # rotate phone
345  logging.debug('Rotating phone %dx', num_rotations)
346  for _ in range(num_rotations):
347    if rotate_cntl == 'arduino':
348      arduino_rotate_servo(rotate_ch, angles, move_time, arduino_serial_port)
349    elif rotate_cntl == 'canakit':
350      canakit_set_relay_channel_state(canakit_serial_port, rotate_ch, 'ON')
351      canakit_set_relay_channel_state(canakit_serial_port, rotate_ch, 'OFF')
352  logging.debug('Finished rotations')
353  if rotate_cntl == 'arduino':
354    logging.debug('Moving servo to origin')
355    arduino_rotate_servo_to_angle(rotate_ch, 0, arduino_serial_port, 1)
356
357
358def set_servo_speed(ch, servo_speed, serial_port, delay=0):
359  """Set servo to specified speed.
360
361  Args:
362    ch: str; servo to turn on in ARDUINO_VALID_CH
363    servo_speed: int; value of speed between 1 and 255
364    serial_port: object; serial port
365    delay: int; time in seconds
366  """
367  logging.debug('Servo speed: %d', servo_speed)
368  if servo_speed < ARDUINO_SERVO_SPEED_MIN:
369    logging.debug('Servo speed must be >= %d.', ARDUINO_SERVO_SPEED_MIN)
370    servo_speed = ARDUINO_SERVO_SPEED_MIN
371  elif servo_speed > ARDUINO_SERVO_SPEED_MAX:
372    logging.debug('Servo speed must be <= %d.', ARDUINO_SERVO_SPEED_MAX)
373    servo_speed = ARDUINO_SERVO_SPEED_MAX
374
375  cmd = [struct.pack('B', i) for i in [ARDUINO_SPEED_START_BYTE,
376                                       int(ch), servo_speed]]
377  arduino_send_cmd(serial_port, cmd)
378  time.sleep(delay)
379
380
381def calc_max_rotation_angle(rotations, sensor_type):
382  """Calculates the max angle of deflection from rotations.
383
384  Args:
385    rotations: numpy array of rotation per event
386    sensor_type: string 'Camera' or 'Gyro'
387
388  Returns:
389    maximum angle of rotation for the given rotations
390  """
391  rotations *= _RADS_TO_DEGS
392  rotations_sum = np.cumsum(rotations)
393  rotation_max = max(rotations_sum)
394  rotation_min = min(rotations_sum)
395  logging.debug('%s min: %.2f, max %.2f rotation (degrees)',
396                sensor_type, rotation_min, rotation_max)
397  logging.debug('%s max rotation: %.2f degrees',
398                sensor_type, (rotation_max-rotation_min))
399  return rotation_max-rotation_min
400
401
402def get_gyro_rotations(gyro_events, cam_times):
403  """Get the rotation values of the gyro.
404
405  Integrates the gyro data between each camera frame to compute an angular
406  displacement.
407
408  Args:
409    gyro_events: List of gyro event objects.
410    cam_times: Array of N camera times, one for each frame.
411
412  Returns:
413    Array of N-1 gyro rotation measurements (rads/s).
414  """
415  gyro_times = np.array([e['time'] for e in gyro_events])
416  all_gyro_rots = np.array([e['z'] for e in gyro_events])
417  gyro_rots = []
418  if gyro_times[0] > cam_times[0] or gyro_times[-1] < cam_times[-1]:
419    raise AssertionError('Gyro times do not bound camera times! '
420                         f'gyro: {gyro_times[0]:.0f} -> {gyro_times[-1]:.0f} '
421                         f'cam: {cam_times[0]} -> {cam_times[-1]} (ns).')
422
423  # Integrate the gyro data between each pair of camera frame times.
424  for i_cam in range(len(cam_times)-1):
425    # Get the window of gyro samples within the current pair of frames.
426    # Note: bisect always picks first gyro index after camera time.
427    t_cam0 = cam_times[i_cam]
428    t_cam1 = cam_times[i_cam+1]
429    i_gyro_window0 = bisect.bisect(gyro_times, t_cam0)
430    i_gyro_window1 = bisect.bisect(gyro_times, t_cam1)
431    gyro_sum = 0
432
433    # Integrate samples within the window.
434    for i_gyro in range(i_gyro_window0, i_gyro_window1):
435      gyro_val = all_gyro_rots[i_gyro+1]
436      t_gyro0 = gyro_times[i_gyro]
437      t_gyro1 = gyro_times[i_gyro+1]
438      t_gyro_delta = (t_gyro1 - t_gyro0) * _NSEC_TO_SEC
439      gyro_sum += gyro_val * t_gyro_delta
440
441    # Handle the fractional intervals at the sides of the window.
442    for side, i_gyro in enumerate([i_gyro_window0-1, i_gyro_window1]):
443      gyro_val = all_gyro_rots[i_gyro+1]
444      t_gyro0 = gyro_times[i_gyro]
445      t_gyro1 = gyro_times[i_gyro+1]
446      t_gyro_delta = (t_gyro1 - t_gyro0) * _NSEC_TO_SEC
447      if side == 0:
448        f = (t_cam0 - t_gyro0) / (t_gyro1 - t_gyro0)
449        frac_correction = gyro_val * t_gyro_delta * (1.0 - f)
450        gyro_sum += frac_correction
451      else:
452        f = (t_cam1 - t_gyro0) / (t_gyro1 - t_gyro0)
453        frac_correction = gyro_val * t_gyro_delta * f
454        gyro_sum += frac_correction
455    gyro_rots.append(gyro_sum)
456  gyro_rots = np.array(gyro_rots)
457  return gyro_rots
458
459
460def procrustes_rotation(x, y):
461  """Performs a Procrustes analysis to conform points in x to y.
462
463  Procrustes analysis determines a linear transformation (translation,
464  reflection, orthogonal rotation and scaling) of the points in y to best
465  conform them to the points in matrix x, using the sum of squared errors
466  as the metric for fit criterion.
467
468  Args:
469    x: Target coordinate matrix
470    y: Input coordinate matrix
471
472  Returns:
473    The rotation component of the transformation that maps x to y.
474  """
475  x0 = (x-x.mean(0)) / np.sqrt(((x-x.mean(0))**2.0).sum())
476  y0 = (y-y.mean(0)) / np.sqrt(((y-y.mean(0))**2.0).sum())
477  u, _, vt = np.linalg.svd(np.dot(x0.T, y0), full_matrices=False)
478  return np.dot(vt.T, u.T)
479
480
481def read_frame_from_file(file, file_path):
482  """Read an image from file to memory.
483
484  Args:
485    file: File name string
486    file_path: The path of the file
487
488  Returns:
489    The cv2 RGB frame, normalized.
490  """
491  img_bgr = cv2.imread(os.path.join(file_path, file))
492  img_rgb = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2RGB) / 255
493  return img_rgb
494
495
496def derive_rotation(
497    masking, pre_mask, frame0, gframe0, gframe1, idx,
498    start_frame, ymin, ymax, facing, file_name_stem):
499  """Derive scene rotation based on 2 frames.
500
501  Derive angular displacement corresponding to the rotation
502  between a pair of frames, in radians. Only takes feature points from
503  center so that rotation measured has less rolling shutter effect.
504  Requires FEATURE_PTS_MIN to have enough data points for accurate measurements.
505  Uses FEATURE_PARAMS for cv2 to identify features in checkerboard images.
506
507  Args:
508    masking: Either "pre" or "post"
509    pre_mask: Mask before any features are detected
510    frame0: Frame 0 represented as normalized rgb
511    gframe0: Frame 0 represented as a cv2 greyscale
512    gframe1: Frame 1 represented as a cv2 greyscale
513    idx: Index of frame0 in the video clip
514    start_frame: The index of the starting frame of the video
515    ymin: Min y coordinate of the area to find features from
516    ymax: Max y coordinate of the area to find features from
517    facing: Camera facing
518    file_name_stem: File name stem including location for data
519  Returns:
520    Rotation of the scene from frame0 to frame1 in rad
521  """
522  j = idx - 1
523  if masking == _POST_MASKING:
524    p0 = cv2.goodFeaturesToTrack(
525        gframe0, mask=None, **_CV2_FEATURE_PARAMS_POSTMASK)
526    post_mask = (p0[:, 0, 1] >= ymin) & (p0[:, 0, 1] <= ymax)
527    p0_filtered = p0[post_mask]
528  else:
529    p0_filtered = cv2.goodFeaturesToTrack(
530        gframe0, mask=pre_mask, **_CV2_FEATURE_PARAMS_PREMASK)
531  num_features = len(p0_filtered)
532  if num_features < _FEATURE_PTS_MIN:
533    for pt in np.rint(p0_filtered).astype(int):
534      x, y = pt[0][0], pt[0][1]
535      cv2.circle(frame0, (x, y), 3, (100, 255, 255), -1)
536    image_processing_utils.write_image(
537        frame0, f'{file_name_stem}_features{j+start_frame:03d}.png')
538    msg = (f'Not enough features in frame {j+start_frame}. Need at least '
539           f'{_FEATURE_PTS_MIN} features, got {num_features}.')
540    if masking == _PRE_MASKING:
541      raise AssertionError(msg)
542    else:
543      logging.debug(msg)
544      return None
545
546  logging.debug('Number of features in frame %s is %d',
547                str(j+start_frame).zfill(3), num_features)
548  p1, st, _ = cv2.calcOpticalFlowPyrLK(gframe0, gframe1, p0_filtered, None,
549                                       **_CV2_LK_PARAMS)
550  tform = procrustes_rotation(p0_filtered[st == 1], p1[st == 1])
551  if facing == camera_properties_utils.LENS_FACING['BACK']:
552    rotation = -math.atan2(tform[0, 1], tform[0, 0])
553  elif facing == camera_properties_utils.LENS_FACING['FRONT']:
554    rotation = math.atan2(tform[0, 1], tform[0, 0])
555  else:
556    raise AssertionError(f'Unknown lens facing: {facing}.')
557  if idx == 1:
558    # Save debug visualization of features that are being
559    # tracked in the first frame.
560    for x, y in np.rint(p0_filtered[st == 1]).astype(int):
561      cv2.circle(frame0, (x, y), 3, (100, 255, 255), -1)
562    image_processing_utils.write_image(
563        frame0, f'{file_name_stem}_features{j+start_frame:03d}.png')
564
565  return rotation
566
567
568def rotations_in_numpy(rotations, stabilized_video):
569  """Return rotations in numpy and verify them.
570
571  Args:
572    rotations: Array of scene rotations in rad
573    stabilized_video: Whether video is stabilized
574
575  Returns:
576    Numpy array of N-1 camera rotation measurements (rad).
577  """
578  rotations = np.array(rotations)
579  rot_per_frame_max = max(abs(rotations))
580  logging.debug('Max rotation in frame: %.2f degrees',
581                rot_per_frame_max*_RADS_TO_DEGS)
582  if stabilized_video:
583    logging.debug('Skipped camera rotation check due to stabilized video.')
584  else:
585    if rot_per_frame_max < _ROTATION_PER_FRAME_MIN:
586      raise AssertionError(f'Device not moved enough: {rot_per_frame_max:.3f} '
587                           f'movement. THRESH: {_ROTATION_PER_FRAME_MIN} rads.')
588    else:
589      logging.debug('Device movement exceeds %.2f degrees',
590                    _ROTATION_PER_FRAME_MIN*_RADS_TO_DEGS)
591  return rotations
592
593
594def get_cam_rotations_from_files(
595    files, facing, file_name_stem, file_path,
596    start_frame, stabilized_video=False):
597  """Get the rotations of the camera between each pair of frames.
598
599  Takes N image files and returns N-1 angular displacements corresponding
600  to the rotations between adjacent pairs of frames, in radians.
601  Ensures camera rotates enough if not calling with stabilized video.
602
603  Args:
604    files: List of N images saved in file
605    facing: Direction camera is facing
606    file_name_stem: File name stem including location for data
607    file_path: Path where image files are saved
608    start_frame: int; index to start at
609    stabilized_video: Boolean; if called with stabilized video
610  Returns:
611    numpy array of N-1 camera rotation measurements (rad).
612  """
613  num_frames = len(files)
614  logging.debug('num_frames: %d', num_frames)
615
616  # Do post-masking (original) method 1st
617  for masking in [_POST_MASKING, _PRE_MASKING]:
618    logging.debug('Using %s masking method', masking)
619    rotations = []
620
621    frame1 = read_frame_from_file(files[0], file_path)
622    gframe1 = cv2.cvtColor((frame1 * 255).astype(np.uint8), cv2.COLOR_RGB2GRAY)
623    frame_h, frame_w, _ = frame1.shape
624    logging.debug('Frame size %d x %d', frame_w, frame_h)
625    pre_mask = np.zeros_like(gframe1)
626
627    # create mask
628    ymin = int(frame_h * (1 - _FEATURE_MARGIN) / 2)
629    ymax = int(frame_h * (1 + _FEATURE_MARGIN) / 2)
630
631    pre_mask[ymin:ymax, :] = 255
632    for i in range(1, num_frames):
633      frame0 = frame1
634      gframe0 = gframe1
635      frame1 = read_frame_from_file(files[i], file_path)
636      gframe1 = cv2.cvtColor(
637          (frame1 * 255.0).astype(np.uint8), cv2.COLOR_RGB2GRAY)
638
639      rotation = derive_rotation(
640          masking, pre_mask, frame0, gframe0, gframe1, i,
641          start_frame, ymin, ymax, facing, file_name_stem)
642      if rotation is None:
643        break
644      rotations.append(rotation)
645    if i == num_frames-1:
646      logging.debug('Correct num of frames found: %d', i)
647      break  # exit if enough features in all frames
648  if i != num_frames-1:
649    raise AssertionError('Neither method found enough features in all frames')
650
651  return rotations_in_numpy(rotations, stabilized_video)
652
653
654def get_cam_rotations_from_frames(
655    frames, facing, h, file_name_stem,
656    start_frame, stabilized_video=False):
657  """Get the rotations of the camera between each pair of frames.
658
659  Takes N frames and returns N-1 angular displacements corresponding to the
660  rotations between adjacent pairs of frames, in radians.
661  Ensures camera rotates enough if not calling with stabilized video.
662
663  Args:
664    frames: List of N images (as RGB numpy arrays).
665    facing: Direction camera is facing.
666    h: Pixel height of each frame.
667    file_name_stem: file name stem including location for data.
668    start_frame: int; index to start at
669    stabilized_video: Boolean; if called with stabilized video
670
671  Returns:
672    numpy array of N-1 camera rotation measurements (rad).
673  """
674  gframes = []
675  for frame in frames:
676    frame = (frame * 255.0).astype(np.uint8)  # cv2 uses [0, 255]
677    gframes.append(cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY))
678  num_frames = len(gframes)
679  logging.debug('num_frames: %d', num_frames)
680  # create mask
681  ymin = int(h * (1 - _FEATURE_MARGIN) / 2)
682  ymax = int(h * (1 + _FEATURE_MARGIN) / 2)
683  pre_mask = np.zeros_like(gframes[0])
684  pre_mask[ymin:ymax, :] = 255
685
686  # Do post-masking (original) method 1st
687  for masking in [_POST_MASKING, _PRE_MASKING]:
688    logging.debug('Using %s masking method', masking)
689    rotations = []
690    for i in range(1, num_frames):
691      rotation = derive_rotation(
692          masking, pre_mask, frames[i-1], gframes[i-1], gframes[i],
693          i, start_frame, ymin, ymax, facing, file_name_stem)
694      if rotation is None:
695        break
696      rotations.append(rotation)
697    if i == num_frames-1:
698      logging.debug('Correct num of frames found: %d', i)
699      break  # exit if enough features in all frames
700  if i != num_frames-1:
701    raise AssertionError('Neither method found enough features in all frames')
702
703  return rotations_in_numpy(rotations, stabilized_video)
704
705
706def get_best_alignment_offset(cam_times, cam_rots, gyro_events, degree=2):
707  """Find the best offset to align the camera and gyro motion traces.
708
709  This function integrates the shifted gyro data between camera samples
710  for a range of candidate shift values, and returns the shift that
711  result in the best correlation.
712
713  Uses a correlation distance metric between the curves, where a smaller
714  value means that the curves are better-correlated.
715
716  Fits a curve to the correlation distance data to measure the minima more
717  accurately, by looking at the correlation distances within a range of
718  +/- 10ms from the measured best score; note that this will use fewer
719  than the full +/- 10 range for the curve fit if the measured score
720  (which is used as the center of the fit) is within 10ms of the edge of
721  the +/- 50ms candidate range.
722
723  Args:
724    cam_times: Array of N camera times, one for each frame.
725    cam_rots: Array of N-1 camera rotation displacements (rad).
726    gyro_events: List of gyro event objects.
727    degree: Degree of polynomial
728
729  Returns:
730    Best alignment offset(ms), fit coefficients, candidates, and distances.
731  """
732  # Measure the correlation distance over defined shift
733  shift_candidates = np.arange(-_CORR_TIME_OFFSET_MAX,
734                               _CORR_TIME_OFFSET_MAX+_CORR_TIME_OFFSET_STEP,
735                               _CORR_TIME_OFFSET_STEP).tolist()
736  spatial_distances = []
737  for shift in shift_candidates:
738    shifted_cam_times = cam_times + shift*_MSEC_TO_NSEC
739    gyro_rots = get_gyro_rotations(gyro_events, shifted_cam_times)
740    spatial_distance = scipy.spatial.distance.correlation(cam_rots, gyro_rots)
741    logging.debug('shift %.1fms spatial distance: %.5f', shift,
742                  spatial_distance)
743    spatial_distances.append(spatial_distance)
744
745  best_corr_dist = min(spatial_distances)
746  coarse_best_shift = shift_candidates[spatial_distances.index(best_corr_dist)]
747  logging.debug('Best shift without fitting is %.4f ms', coarse_best_shift)
748
749  # Fit a polynomial around coarse_best_shift to extract best fit
750  i = spatial_distances.index(best_corr_dist)
751  i_poly_fit_min = i - _COARSE_FIT_RANGE
752  i_poly_fit_max = i + _COARSE_FIT_RANGE + 1
753  shift_candidates = shift_candidates[i_poly_fit_min:i_poly_fit_max]
754  spatial_distances = spatial_distances[i_poly_fit_min:i_poly_fit_max]
755  logging.debug('Polynomial degree: %d', degree)
756  fit_coeffs, residuals, _, _, _ = np.polyfit(
757      shift_candidates, spatial_distances, degree, full=True
758  )
759  logging.debug('Fit coefficients: %s', fit_coeffs)
760  logging.debug('Residuals: %s', residuals)
761  total_sum_of_squares = np.sum(
762      (spatial_distances - np.mean(spatial_distances)) ** 2
763  )
764  # Calculate r-squared on the entire domain for debugging
765  r_squared = 1 - residuals[0] / total_sum_of_squares
766  logging.debug('r^2 on the entire domain: %f', r_squared)
767
768  # Calculate r-squared near the best shift
769  domain_around_best_shift = [coarse_best_shift - _SHIFT_DOMAIN_RADIUS,
770                              coarse_best_shift + _SHIFT_DOMAIN_RADIUS]
771  logging.debug('Calculating r^2 on the limited domain of [%f, %f]',
772                domain_around_best_shift[0], domain_around_best_shift[1])
773  small_shifts_and_distances = [
774      (x, y)
775      for x, y in zip(shift_candidates, spatial_distances)
776      if domain_around_best_shift[0] <= x <= domain_around_best_shift[1]
777  ]
778  small_shift_candidates, small_spatial_distances = zip(
779      *small_shifts_and_distances
780  )
781  logging.debug('Shift candidates on limited domain: %s',
782                small_shift_candidates)
783  logging.debug('Spatial distances on limited domain: %s',
784                small_spatial_distances)
785  limited_residuals = np.sum(
786      (np.polyval(fit_coeffs, small_shift_candidates) - small_spatial_distances)
787      ** 2
788  )
789  logging.debug('Residuals on limited domain: %s', limited_residuals)
790  limited_total_sum_of_squares = np.sum(
791      (small_spatial_distances - np.mean(small_spatial_distances)) ** 2
792  )
793  limited_r_squared = 1 - limited_residuals / limited_total_sum_of_squares
794  logging.debug('r^2 on limited domain: %f', limited_r_squared)
795
796  # Calculate exact_best_shift (x where y is minimum of parabola)
797  exact_best_shift = smallest_absolute_minimum_of_polynomial(fit_coeffs)
798
799  if abs(coarse_best_shift - exact_best_shift) > 2.0:
800    raise AssertionError(
801        f'Test failed. Bad fit to time-shift curve. Coarse best shift: '
802        f'{coarse_best_shift}, Exact best shift: {exact_best_shift}.')
803
804  # Check fit of polynomial near the best shift
805  if not math.isclose(limited_r_squared, 1, abs_tol=_R_SQUARED_TOLERANCE):
806    logging.debug('r-squared on domain [%f, %f] was %f, expected 1.0, '
807                  'ATOL: %f',
808                  domain_around_best_shift[0], domain_around_best_shift[1],
809                  limited_r_squared, _R_SQUARED_TOLERANCE)
810    return None
811
812  return exact_best_shift, fit_coeffs, shift_candidates, spatial_distances
813
814
815def plot_camera_rotations(cam_rots, start_frame, video_quality,
816                          plot_name_stem):
817  """Plot the camera rotations.
818
819  Args:
820   cam_rots: np array of camera rotations angle per frame
821   start_frame: int value of start frame
822   video_quality: str for video quality identifier
823   plot_name_stem: str (with path) of what to call plot
824  """
825
826  plt.figure(video_quality)
827  frames = range(start_frame, len(cam_rots)+start_frame)
828  plt.title(f'Camera rotation vs frame {video_quality}')
829  plt.plot(frames, cam_rots*_RADS_TO_DEGS, '-ro', label='x')
830  plt.xlabel('frame #')
831  plt.ylabel('camera rotation (degrees)')
832  plt.savefig(f'{plot_name_stem}_cam_rots.png')
833  plt.close(video_quality)
834
835
836def plot_gyro_events(gyro_events, plot_name, log_path):
837  """Plot x, y, and z on the gyro events.
838
839  Samples are grouped into NUM_GYRO_PTS_TO_AVG groups and averaged to minimize
840  random spikes in data.
841
842  Args:
843    gyro_events: List of gyroscope events.
844    plot_name:  name of plot(s).
845    log_path: location to save data.
846  """
847
848  nevents = (len(gyro_events) // _NUM_GYRO_PTS_TO_AVG) * _NUM_GYRO_PTS_TO_AVG
849  gyro_events = gyro_events[:nevents]
850  times = np.array([(e['time'] - gyro_events[0]['time']) * _NSEC_TO_SEC
851                    for e in gyro_events])
852  x = np.array([e['x'] for e in gyro_events])
853  y = np.array([e['y'] for e in gyro_events])
854  z = np.array([e['z'] for e in gyro_events])
855
856  # Group samples into size-N groups & average each together to minimize random
857  # spikes in data.
858  times = times[_NUM_GYRO_PTS_TO_AVG//2::_NUM_GYRO_PTS_TO_AVG]
859  x = x.reshape(nevents//_NUM_GYRO_PTS_TO_AVG, _NUM_GYRO_PTS_TO_AVG).mean(1)
860  y = y.reshape(nevents//_NUM_GYRO_PTS_TO_AVG, _NUM_GYRO_PTS_TO_AVG).mean(1)
861  z = z.reshape(nevents//_NUM_GYRO_PTS_TO_AVG, _NUM_GYRO_PTS_TO_AVG).mean(1)
862
863  plt.figure(plot_name)
864  # x & y on same axes
865  plt.subplot(2, 1, 1)
866  plt.title(f'{plot_name}(mean of {_NUM_GYRO_PTS_TO_AVG} pts)')
867  plt.plot(times, x, 'r', label='x')
868  plt.plot(times, y, 'g', label='y')
869  plt.ylim([np.amin(z)/4, np.amax(z)/4])  # zoom in 4x from z axis
870  plt.ylabel('gyro x,y movement (rads/s)')
871  plt.legend()
872
873  # z on separate axes
874  plt.subplot(2, 1, 2)
875  plt.plot(times, z, 'b', label='z')
876  plt.ylim([np.amin(z), np.amax(z)])
877  plt.xlabel('time (seconds)')
878  plt.ylabel('gyro z movement (rads/s)')
879  plt.legend()
880  file_name = os.path.join(log_path, plot_name)
881  plt.savefig(f'{file_name}_gyro_events.png')
882  plt.close(plot_name)
883
884  z_max = max(abs(z))
885  logging.debug('z_max: %.3f', z_max)
886  if z_max > _GYRO_ROTATION_PER_SEC_MAX:
887    raise AssertionError(
888        f'Phone moved too rapidly! Please confirm controller firmware. '
889        f'Max: {z_max:.3f}, TOL: {_GYRO_ROTATION_PER_SEC_MAX} rads/s')
890
891
892def conv_acceleration_to_movement(gyro_events, video_delay_time):
893  """Convert gyro_events time and speed to movement during video time.
894
895  Args:
896    gyro_events: sorted dict of entries with 'time', 'x', 'y', and 'z'
897    video_delay_time: time at which video starts (and the video's duration)
898
899  Returns:
900    'z' acceleration converted to movement for times around VIDEO playing.
901  """
902  gyro_times = np.array([e['time'] for e in gyro_events])
903  gyro_speed = np.array([e['z'] for e in gyro_events])
904  gyro_time_min = gyro_times[0]
905  logging.debug('gyro start time: %dns', gyro_time_min)
906  logging.debug('gyro stop time: %dns', gyro_times[-1])
907  gyro_rotations = []
908  video_time_start = gyro_time_min + video_delay_time *_SEC_TO_NSEC
909  video_time_stop = video_time_start + video_delay_time *_SEC_TO_NSEC
910  logging.debug('video start time: %dns', video_time_start)
911  logging.debug('video stop time: %dns', video_time_stop)
912
913  for i, t in enumerate(gyro_times):
914    if video_time_start <= t <= video_time_stop:
915      gyro_rotations.append((gyro_times[i]-gyro_times[i-1])/_SEC_TO_NSEC *
916                            gyro_speed[i])
917  return np.array(gyro_rotations)
918