1 package com.android.cts.verifier.sensors; 2 3 /* 4 * Copyright (C) 2014 The Android Open Source Project 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 */ 18 import android.media.MediaCodec; 19 import android.media.MediaExtractor; 20 import android.media.MediaFormat; 21 import android.os.Debug; 22 import android.os.Environment; 23 import android.os.PowerManager; 24 import android.util.JsonWriter; 25 import android.util.Log; 26 27 import org.opencv.core.Mat; 28 import org.opencv.core.CvType; 29 import org.opencv.core.MatOfDouble; 30 import org.opencv.core.MatOfFloat; 31 import org.opencv.core.MatOfPoint2f; 32 import org.opencv.core.MatOfPoint3f; 33 import org.opencv.core.Point; 34 import org.opencv.core.Size; 35 import org.opencv.imgcodecs.Imgcodecs; 36 import org.opencv.imgproc.Imgproc; 37 import org.opencv.calib3d.Calib3d; 38 import org.opencv.core.Core; 39 40 import org.json.JSONObject; 41 import org.json.JSONException; 42 43 import java.io.BufferedReader; 44 import java.io.ByteArrayOutputStream; 45 import java.io.File; 46 import java.io.FileNotFoundException; 47 import java.io.FileOutputStream; 48 import java.io.FileReader; 49 import java.io.IOException; 50 import java.io.OutputStream; 51 import java.io.OutputStreamWriter; 52 import java.nio.ByteBuffer; 53 import java.util.ArrayList; 54 55 import android.opengl.GLES20; 56 import javax.microedition.khronos.opengles.GL10; 57 58 /** 59 * This class does analysis on the recorded RVCVCXCheck data sets. 60 */ 61 public class RVCVXCheckAnalyzer { 62 private static final String TAG = "RVCVXAnalysis"; 63 private static final boolean LOCAL_LOGV = false; 64 private static final boolean LOCAL_LOGD = true; 65 private final String mPath; 66 67 private static final boolean OUTPUT_DEBUG_IMAGE = false; 68 private static final double VALID_FRAME_THRESHOLD = 0.8; 69 private static final double REPROJECTION_THRESHOLD_RATIO = 0.01; 70 private static final boolean FORCE_CV_ANALYSIS = false; 71 private static final boolean TRACE_VIDEO_ANALYSIS = false; 72 private static final double DECIMATION_FPS_TARGET = 15.0; 73 private static final double MIN_VIDEO_LENGTH_SEC = 10; 74 75 private final boolean mHaveInvertedImu; 76 RVCVXCheckAnalyzer(String path, boolean haveInvertedImu)77 RVCVXCheckAnalyzer(String path, boolean haveInvertedImu) 78 { 79 mPath = path; 80 mHaveInvertedImu = haveInvertedImu; 81 } 82 83 /** 84 * A class that contains the analysis results 85 * 86 */ 87 class AnalyzeReport { 88 public boolean error=true; 89 public String reason = "incomplete"; 90 91 // roll pitch yaw RMS error ( \sqrt{\frac{1}{n} \sum e_i^2 }) 92 // unit in rad 93 public double roll_rms_error; 94 public double pitch_rms_error; 95 public double yaw_rms_error; 96 97 // roll pitch yaw max error 98 // unit in rad 99 public double roll_max_error; 100 public double pitch_max_error; 101 public double yaw_max_error; 102 103 // optimal t delta between sensor and camera data set to make best match 104 public double optimal_delta_t; 105 // the associate yaw offset based on initial values 106 public double yaw_offset; 107 108 public int n_of_frame; 109 public int n_of_valid_frame; 110 111 // both data below are in [sec] 112 public double sensor_period_avg; 113 public double sensor_period_stdev; 114 115 /** 116 * write Json format serialization to a file in case future processing need the data 117 */ writeToFile(File file)118 public void writeToFile(File file) { 119 try { 120 writeJSONToStream(new FileOutputStream(file)); 121 } catch (FileNotFoundException e) { 122 e.printStackTrace(); 123 Log.e(TAG, "Cannot create analyze report file."); 124 } 125 } 126 127 /** 128 * Get the JSON format serialization 129 *@return Json format serialization as String 130 */ 131 @Override toString()132 public String toString() { 133 ByteArrayOutputStream s = new ByteArrayOutputStream(); 134 writeJSONToStream(s); 135 return new String(s.toByteArray(), java.nio.charset.StandardCharsets.UTF_8); 136 } 137 writeJSONToStream(OutputStream s)138 private void writeJSONToStream(OutputStream s) { 139 try{ 140 JsonWriter writer = 141 new JsonWriter( 142 new OutputStreamWriter( s ) 143 ); 144 writer.beginObject(); 145 writer.setLenient(true); 146 147 writer.name("roll_rms_error").value(roll_rms_error); 148 writer.name("pitch_rms_error").value(pitch_rms_error); 149 writer.name("yaw_rms_error").value(yaw_rms_error); 150 writer.name("roll_max_error").value(roll_max_error); 151 writer.name("pitch_max_error").value(pitch_max_error); 152 writer.name("yaw_max_error").value(yaw_max_error); 153 writer.name("optimal_delta_t").value(optimal_delta_t); 154 writer.name("yaw_offset").value(yaw_offset); 155 writer.name("n_of_frame").value(n_of_frame); 156 writer.name("n_of_valid_frame").value(n_of_valid_frame); 157 writer.name("sensor_period_avg").value(sensor_period_avg); 158 writer.name("sensor_period_stdev").value(sensor_period_stdev); 159 160 writer.endObject(); 161 162 writer.close(); 163 } catch (IOException e) { 164 // do nothing 165 Log.e(TAG, "Error in serialize analyze report to JSON"); 166 } catch (IllegalArgumentException e) { 167 e.printStackTrace(); 168 Log.e(TAG, "Invalid parameter to write into JSON format"); 169 } 170 } 171 } 172 173 /** 174 * Process data set stored in the path specified in constructor 175 * and return an analyze report to caller 176 * 177 * @return An AnalyzeReport that contains detailed information about analysis 178 */ processDataSet()179 public AnalyzeReport processDataSet() { 180 int nframe;// number of frames in video 181 int nslog; // number of sensor log 182 int nvlog; // number of video generated log 183 184 185 AnalyzeReport report = new AnalyzeReport(); 186 187 ArrayList<AttitudeRec> srecs = new ArrayList<>(); 188 ArrayList<AttitudeRec> vrecs = new ArrayList<>(); 189 ArrayList<AttitudeRec> srecs2 = new ArrayList<>(); 190 191 192 final boolean use_solved = new File(mPath, "vision_rpy.log").exists() && !FORCE_CV_ANALYSIS; 193 194 if (use_solved) { 195 nframe = nvlog = loadAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs); 196 nslog = loadAttitudeRecs(new File(mPath, "sensor_rpy.log"),srecs); 197 }else { 198 nframe = analyzeVideo(vrecs); 199 nvlog = vrecs.size(); 200 201 if (LOCAL_LOGV) { 202 Log.v(TAG, "Post video analysis nvlog = " + nvlog + " nframe=" + nframe); 203 } 204 if (nvlog <= 0 || nframe <= 0) { 205 // invalid results 206 report.reason = "Unable to load recorded video."; 207 return report; 208 } 209 if (nframe < MIN_VIDEO_LENGTH_SEC*VALID_FRAME_THRESHOLD) { 210 // video is too short 211 Log.w(TAG, "Video record is to short, n frame = " + nframe); 212 report.reason = "Video too short."; 213 return report; 214 } 215 if ((double) nvlog / nframe < VALID_FRAME_THRESHOLD) { 216 // too many invalid frames 217 Log.w(TAG, "Too many invalid frames, n valid frame = " + nvlog + 218 ", n total frame = " + nframe); 219 report.reason = "Too many invalid frames."; 220 return report; 221 } 222 223 fixFlippedAxis(vrecs); 224 225 nslog = loadSensorLog(srecs); 226 } 227 228 // Gradient descent will have faster performance than this simple search, 229 // but the performance is dominated by the vision part, so it is not very necessary. 230 double delta_t; 231 double min_rms = Double.MAX_VALUE; 232 double min_delta_t =0.; 233 double min_yaw_offset =0.; 234 235 // pre-allocation 236 for (AttitudeRec i: vrecs) { 237 srecs2.add(new AttitudeRec(0,0,0,0)); 238 } 239 240 // find optimal offset 241 for (delta_t = -2.0; delta_t<2.0; delta_t +=0.01) { 242 double rms; 243 resampleSensorLog(srecs, vrecs, delta_t, 0.0, srecs2); 244 rms = Math.sqrt(calcSqrErr(vrecs, srecs2, 0)+ calcSqrErr(vrecs, srecs2, 1)); 245 if (rms < min_rms) { 246 min_rms = rms; 247 min_delta_t = delta_t; 248 min_yaw_offset = vrecs.get(0).yaw - srecs2.get(0).yaw; 249 } 250 } 251 // sample at optimal offset 252 resampleSensorLog(srecs, vrecs, min_delta_t, min_yaw_offset, srecs2); 253 254 if (!use_solved) { 255 dumpAttitudeRecs(new File(mPath, "vision_rpy.log"), vrecs); 256 dumpAttitudeRecs(new File(mPath, "sensor_rpy.log"), srecs); 257 } 258 dumpAttitudeRecs(new File(mPath, "sensor_rpy_resampled.log"), srecs2); 259 dumpAttitudeError(new File(mPath, "attitude_error.log"), vrecs, srecs2); 260 261 // fill report fields 262 report.roll_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 0)); 263 report.pitch_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 1)); 264 report.yaw_rms_error = Math.sqrt(calcSqrErr(vrecs, srecs2, 2)); 265 266 report.roll_max_error = calcMaxErr(vrecs, srecs2, 0); 267 report.pitch_max_error = calcMaxErr(vrecs, srecs2, 1); 268 report.yaw_max_error = calcMaxErr(vrecs, srecs2, 2); 269 270 report.optimal_delta_t = min_delta_t; 271 report.yaw_offset = (min_yaw_offset); 272 273 report.n_of_frame = nframe; 274 report.n_of_valid_frame = nvlog; 275 276 double [] sensor_period_stat = calcSensorPeriodStat(srecs); 277 report.sensor_period_avg = sensor_period_stat[0]; 278 report.sensor_period_stdev = sensor_period_stat[1]; 279 280 // output report to file and log in JSON format as well 281 report.writeToFile(new File(mPath, "report.json")); 282 if (LOCAL_LOGV) Log.v(TAG, "Report in JSON:" + report.toString()); 283 284 report.reason = "Completed"; 285 report.error = false; 286 return report; 287 } 288 289 /** 290 * Generate pattern geometry like this one 291 * http://docs.opencv.org/trunk/_downloads/acircles_pattern.png 292 * 293 * @return Array of 3D points 294 */ asymmetricalCircleGrid(Size size)295 private MatOfPoint3f asymmetricalCircleGrid(Size size) { 296 final int cn = 3; 297 298 int n = (int)(size.width * size.height); 299 float positions[] = new float[n * cn]; 300 float unit=0.02f; 301 MatOfPoint3f grid = new MatOfPoint3f(); 302 303 for (int i = 0; i < size.height; i++) { 304 for (int j = 0; j < size.width * cn; j += cn) { 305 positions[(int) (i * size.width * cn + j + 0)] = 306 (2 * (j / cn) + i % 2) * (float) unit; 307 positions[(int) (i * size.width * cn + j + 1)] = 308 i * unit; 309 positions[(int) (i * size.width * cn + j + 2)] = 0; 310 } 311 } 312 grid.create(n, 1, CvType.CV_32FC3); 313 grid.put(0, 0, positions); 314 return grid; 315 } 316 317 /** 318 * Create a camera intrinsic matrix using input parameters 319 * 320 * The camera intrinsic matrix will be like: 321 * 322 * +- -+ 323 * | f 0 center.width | 324 * A = | 0 f center.height | 325 * | 0 0 1 | 326 * +- -+ 327 * 328 * @return An approximated (not actually calibrated) camera matrix 329 */ cameraMatrix(float f, Size center)330 private static Mat cameraMatrix(float f, Size center) { 331 final double [] data = {f, 0, center.width, 0, f, center.height, 0, 0, 1f}; 332 Mat m = new Mat(3,3, CvType.CV_64F); 333 m.put(0, 0, data); 334 return m; 335 } 336 337 /** 338 * Attitude record in time roll pitch yaw format. 339 * 340 */ 341 private class AttitudeRec { 342 public double time; 343 public double roll; 344 public double pitch; 345 public double yaw; 346 347 // ctor AttitudeRec(double atime, double aroll, double apitch, double ayaw)348 AttitudeRec(double atime, double aroll, double apitch, double ayaw) { 349 time = atime; 350 roll = aroll; 351 pitch = apitch; 352 yaw = ayaw; 353 } 354 355 // ctor AttitudeRec(double atime, double [] rpy)356 AttitudeRec(double atime, double [] rpy) { 357 time = atime; 358 roll = rpy[0]; 359 pitch = rpy[1]; 360 yaw = rpy[2]; 361 } 362 363 // copy value of another to this assign(AttitudeRec rec)364 void assign(AttitudeRec rec) { 365 time = rec.time; 366 roll = rec.time; 367 pitch = rec.pitch; 368 yaw = rec.yaw; 369 } 370 371 // copy roll-pitch-yaw value but leave the time specified by atime assign(AttitudeRec rec, double atime)372 void assign(AttitudeRec rec, double atime) { 373 time = atime; 374 roll = rec.time; 375 pitch = rec.pitch; 376 yaw = rec.yaw; 377 } 378 379 // set each field separately set(double atime, double aroll, double apitch, double ayaw)380 void set(double atime, double aroll, double apitch, double ayaw) { 381 time = atime; 382 roll = aroll; 383 pitch = apitch; 384 yaw = ayaw; 385 } 386 } 387 388 389 /** 390 * Load the sensor log in (time Roll-pitch-yaw) format to a ArrayList<AttitudeRec> 391 * 392 * @return the number of sensor log items 393 */ loadSensorLog(ArrayList<AttitudeRec> recs)394 private int loadSensorLog(ArrayList<AttitudeRec> recs) { 395 //ArrayList<AttitudeRec> recs = new ArrayList<AttitudeRec>(); 396 File csvFile = new File(mPath, "sensor.log"); 397 BufferedReader br=null; 398 String line; 399 400 // preallocate and reuse 401 double [] quat = new double[4]; 402 double [] rpy = new double[3]; 403 404 double t0 = -1; 405 406 Log.i(TAG, "Converting sensor log; inverted IMU adjustment: " + mHaveInvertedImu); 407 try { 408 br = new BufferedReader(new FileReader(csvFile)); 409 while ((line = br.readLine()) != null) { 410 //space separator 411 String[] items = line.split(" "); 412 413 if (items.length != 5) { 414 recs.clear(); 415 return -1; 416 } 417 418 quat[0] = Double.parseDouble(items[1]); 419 quat[1] = Double.parseDouble(items[2]); 420 quat[2] = Double.parseDouble(items[3]); 421 quat[3] = Double.parseDouble(items[4]); 422 423 // 424 quat2rpy(quat, rpy); 425 426 if (mHaveInvertedImu) { 427 // Compensate for front-facing camera rotated around hinge along 428 // Y-axis with IMU on same panel (so IMU X & Z axes are inverted 429 // compared to what we expect): offset roll by 180 degrees and 430 // invert pitch and roll directions 431 rpy[0] -= Math.PI; 432 if (rpy[0] <= -Math.PI) { 433 rpy[0] += 2 * Math.PI; 434 } 435 rpy[0] *= -1; 436 rpy[1] *= -1; 437 } 438 439 if (t0 < 0) { 440 t0 = Long.parseLong(items[0])/1e9; 441 } 442 recs.add(new AttitudeRec(Long.parseLong(items[0])/1e9-t0, rpy)); 443 } 444 445 } catch (FileNotFoundException e) { 446 e.printStackTrace(); 447 Log.e(TAG, "Cannot find sensor logging data"); 448 } catch (IOException e) { 449 e.printStackTrace(); 450 Log.e(TAG, "Cannot read sensor logging data"); 451 } finally { 452 if (br != null) { 453 try { 454 br.close(); 455 } catch (IOException e) { 456 e.printStackTrace(); 457 } 458 } 459 } 460 461 return recs.size(); 462 } 463 464 /** 465 * Read video meta info 466 */ 467 private class VideoMetaInfo { 468 public double fps; 469 public int frameWidth; 470 public int frameHeight; 471 public double fovWidth; 472 public double fovHeight; 473 public boolean valid = false; 474 VideoMetaInfo(File file)475 VideoMetaInfo(File file) { 476 477 BufferedReader br=null; 478 String line; 479 String content=""; 480 try { 481 br = new BufferedReader(new FileReader(file)); 482 while ((line = br.readLine()) != null) { 483 content = content +line; 484 } 485 486 } catch (FileNotFoundException e) { 487 e.printStackTrace(); 488 Log.e(TAG, "Cannot find video meta info file"); 489 } catch (IOException e) { 490 e.printStackTrace(); 491 Log.e(TAG, "Cannot read video meta info file"); 492 } finally { 493 if (br != null) { 494 try { 495 br.close(); 496 } catch (IOException e) { 497 e.printStackTrace(); 498 } 499 } 500 } 501 502 if (content.isEmpty()) { 503 return; 504 } 505 506 try { 507 JSONObject json = new JSONObject(content); 508 frameWidth = json.getInt("width"); 509 frameHeight = json.getInt("height"); 510 fps = json.getDouble("frameRate"); 511 fovWidth = json.getDouble("fovW")*Math.PI/180.0; 512 fovHeight = json.getDouble("fovH")*Math.PI/180.0; 513 } catch (JSONException e) { 514 return; 515 } 516 517 valid = true; 518 519 } 520 } 521 522 523 524 /** 525 * Debugging helper function, load ArrayList<AttitudeRec> from a file dumped out by 526 * dumpAttitudeRecs 527 */ loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs)528 private int loadAttitudeRecs(File file, ArrayList<AttitudeRec> recs) { 529 BufferedReader br=null; 530 String line; 531 double time; 532 double [] rpy = new double[3]; 533 534 try { 535 br = new BufferedReader(new FileReader(file)); 536 while ((line = br.readLine()) != null) { 537 //space separator 538 String[] items = line.split(" "); 539 540 if (items.length != 4) { 541 recs.clear(); 542 return -1; 543 } 544 545 time = Double.parseDouble(items[0]); 546 rpy[0] = Double.parseDouble(items[1]); 547 rpy[1] = Double.parseDouble(items[2]); 548 rpy[2] = Double.parseDouble(items[3]); 549 550 recs.add(new AttitudeRec(time, rpy)); 551 } 552 553 } catch (FileNotFoundException e) { 554 e.printStackTrace(); 555 Log.e(TAG, "Cannot find AttitudeRecs file specified."); 556 } catch (IOException e) { 557 e.printStackTrace(); 558 Log.e(TAG, "Read AttitudeRecs file failure"); 559 } finally { 560 if (br != null) { 561 try { 562 br.close(); 563 } catch (IOException e) { 564 e.printStackTrace(); 565 } 566 } 567 } 568 569 return recs.size(); 570 } 571 /** 572 * Debugging helper function, Dump an ArrayList<AttitudeRec> to a file 573 */ dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs)574 private void dumpAttitudeRecs(File file, ArrayList<AttitudeRec> recs) { 575 OutputStreamWriter w=null; 576 try { 577 w = new OutputStreamWriter(new FileOutputStream(file)); 578 579 for (AttitudeRec r : recs) { 580 w.write(String.format("%f %f %f %f\r\n", r.time, r.roll, r.pitch, r.yaw)); 581 } 582 w.close(); 583 } catch(FileNotFoundException e) { 584 e.printStackTrace(); 585 Log.e(TAG, "Cannot create AttitudeRecs file."); 586 } catch (IOException e) { 587 Log.e(TAG, "Write AttitudeRecs file failure"); 588 } finally { 589 if (w!=null) { 590 try { 591 w.close(); 592 } catch (IOException e) { 593 e.printStackTrace(); 594 } 595 } 596 } 597 } 598 599 /** 600 * Read the sensor log in ArrayList<AttitudeRec> format and find out the sensor sample time 601 * statistics: mean and standard deviation. 602 * 603 * @return The returned value will be a double array with exact 2 items, first [0] will be 604 * mean and the second [1] will be the standard deviation. 605 * 606 */ calcSensorPeriodStat(ArrayList<AttitudeRec> srec)607 private double [] calcSensorPeriodStat(ArrayList<AttitudeRec> srec) { 608 double tp = srec.get(0).time; 609 int i; 610 double sum = 0.0; 611 double sumsq = 0.0; 612 for(i=1; i<srec.size(); ++i) { 613 double dt; 614 dt = srec.get(i).time - tp; 615 sum += dt; 616 sumsq += dt*dt; 617 tp += dt; 618 } 619 double [] ret = new double[2]; 620 ret[0] = sum/srec.size(); 621 ret[1] = Math.sqrt(sumsq/srec.size() - ret[0]*ret[0]); 622 return ret; 623 } 624 625 /** 626 * Flipping the axis as the image are flipped upside down in OpenGL frames 627 */ fixFlippedAxis(ArrayList<AttitudeRec> vrecs)628 private void fixFlippedAxis(ArrayList<AttitudeRec> vrecs) { 629 for (AttitudeRec i: vrecs) { 630 i.yaw = -i.yaw; 631 } 632 } 633 634 /** 635 * Calculate the maximum error on the specified axis between two time aligned (resampled) 636 * ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing 637 * 638 * @param ra one ArrayList of AttitudeRec 639 * @param rb the other ArrayList of AttitudeRec 640 * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw) 641 * @return Maximum error 642 */ calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)643 private double calcMaxErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) { 644 // check if they are valid and comparable data 645 if (ra.size() != rb.size()) { 646 throw new ArrayIndexOutOfBoundsException("Two array has to be the same"); 647 } 648 // check input parameter validity 649 if (axis<0 || axis > 2) { 650 throw new IllegalArgumentException("Invalid data axis."); 651 } 652 653 int i; 654 double max = 0.0; 655 double diff = 0.0; 656 for(i=0; i<ra.size(); ++i) { 657 // make sure they are aligned data 658 if (ra.get(i).time != rb.get(i).time) { 659 throw new IllegalArgumentException("Element "+i+ 660 " of two inputs has different time."); 661 } 662 switch(axis) { 663 case 0: 664 diff = ra.get(i).roll - rb.get(i).roll; // they always opposite of each other.. 665 break; 666 case 1: 667 diff = ra.get(i).pitch - rb.get(i).pitch; 668 break; 669 case 2: 670 diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI)) 671 -Math.PI)-Math.PI; 672 break; 673 } 674 diff = Math.abs(diff); 675 if (diff>max) { 676 max = diff; 677 } 678 } 679 return max; 680 } 681 682 /** 683 * Calculate the RMS error on the specified axis between two time aligned (resampled) 684 * ArrayList<AttitudeRec>. Yaw axis needs special treatment as 0 and 2pi error are same thing 685 * 686 * @param ra one ArrayList of AttitudeRec 687 * @param rb the other ArrayList of AttitudeRec 688 * @param axis axis id for the comparison (0 = roll, 1 = pitch, 2 = yaw) 689 * @return Mean square error 690 */ calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis)691 private double calcSqrErr(ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb, int axis) { 692 // check if they are valid and comparable data 693 if (ra.size() != rb.size()) { 694 throw new ArrayIndexOutOfBoundsException("Two array has to be the same"); 695 } 696 // check input parameter validity 697 if (axis<0 || axis > 2) { 698 throw new IllegalArgumentException("Invalid data axis."); 699 } 700 701 int i; 702 double sum = 0.0; 703 double diff = 0.0; 704 for(i=0; i<ra.size(); ++i) { 705 // check input data validity 706 if (ra.get(i).time != rb.get(i).time) { 707 throw new IllegalArgumentException("Element "+i+ 708 " of two inputs has different time."); 709 } 710 711 switch(axis) { 712 case 0: 713 diff = ra.get(i).roll - rb.get(i).roll; 714 break; 715 case 1: 716 diff = ra.get(i).pitch - rb.get(i).pitch; 717 break; 718 case 2: 719 diff = Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI))- 720 Math.PI)-Math.PI; 721 break; 722 } 723 724 sum += diff*diff; 725 } 726 return sum/ra.size(); 727 } 728 729 /** 730 * Debugging helper function. Dump the error between two time aligned ArrayList<AttitudeRec>'s 731 * 732 * @param file File to write to 733 * @param ra one ArrayList of AttitudeRec 734 * @param rb the other ArrayList of AttitudeRec 735 */ dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb)736 private void dumpAttitudeError(File file, ArrayList<AttitudeRec> ra, ArrayList<AttitudeRec> rb){ 737 if (ra.size() != rb.size()) { 738 throw new ArrayIndexOutOfBoundsException("Two array has to be the same"); 739 } 740 741 int i; 742 743 ArrayList<AttitudeRec> rerr = new ArrayList<>(); 744 for(i=0; i<ra.size(); ++i) { 745 if (ra.get(i).time != rb.get(i).time) { 746 throw new IllegalArgumentException("Element "+ i 747 + " of two inputs has different time."); 748 } 749 750 rerr.add(new AttitudeRec(ra.get(i).time, ra.get(i).roll - rb.get(i).roll, 751 ra.get(i).pitch - rb.get(i).pitch, 752 (Math.abs(((4*Math.PI + ra.get(i).yaw - rb.get(i).yaw)%(2*Math.PI)) 753 -Math.PI)-Math.PI))); 754 755 } 756 dumpAttitudeRecs(file, rerr); 757 } 758 759 /** 760 * Resample one ArrayList<AttitudeRec> with respect to another ArrayList<AttitudeRec> 761 * 762 * @param rec the ArrayList of AttitudeRec to be sampled 763 * @param timebase the other ArrayList of AttitudeRec that serves as time base 764 * @param delta_t offset in time before resample 765 * @param yaw_offset offset in yaw axis 766 * @param resampled output ArrayList of AttitudeRec 767 */ 768 resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase, double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled)769 private void resampleSensorLog(ArrayList<AttitudeRec> rec, ArrayList<AttitudeRec> timebase, 770 double delta_t, double yaw_offset, ArrayList<AttitudeRec> resampled) { 771 int i; 772 int j = -1; 773 for(i=0; i<timebase.size(); i++) { 774 double time = timebase.get(i).time + delta_t; 775 776 while(j<rec.size()-1 && rec.get(j+1).time < time) j++; 777 778 if (j == -1) { 779 //use first 780 resampled.get(i).assign(rec.get(0), timebase.get(i).time); 781 } else if (j == rec.size()-1) { 782 // use last 783 resampled.get(i).assign(rec.get(j), timebase.get(i).time); 784 } else { 785 // do linear resample 786 double alpha = (time - rec.get(j).time)/((rec.get(j+1).time - rec.get(j).time)); 787 double roll = (1-alpha) * rec.get(j).roll + alpha * rec.get(j+1).roll; 788 double pitch = (1-alpha) * rec.get(j).pitch + alpha * rec.get(j+1).pitch; 789 double yaw = (1-alpha) * rec.get(j).yaw + alpha * rec.get(j+1).yaw + yaw_offset; 790 resampled.get(i).set(timebase.get(i).time, roll, pitch, yaw); 791 } 792 } 793 } 794 795 /** 796 * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec> 797 * 798 * @param recs output ArrayList of AttitudeRec 799 * @return total number of frame of the video 800 */ analyzeVideo(ArrayList<AttitudeRec> recs)801 private int analyzeVideo(ArrayList<AttitudeRec> recs) { 802 VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json")); 803 804 int decimation = 1; 805 boolean use_timestamp = true; 806 807 // roughly determine if decimation is necessary 808 if (meta.fps > DECIMATION_FPS_TARGET) { 809 decimation = (int)(meta.fps / DECIMATION_FPS_TARGET); 810 meta.fps /=decimation; 811 } 812 813 VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV( 814 new File(mPath, "video.mp4"), decimation); 815 816 817 Mat frame; 818 Mat gray = new Mat(); 819 int i = -1; 820 821 Size frameSize = videoDecoder.getSize(); 822 823 if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) { 824 // this is very unlikely 825 return -1; 826 } 827 828 if (TRACE_VIDEO_ANALYSIS) { 829 Debug.startMethodTracing("cvprocess"); 830 } 831 832 final int patternWidth = 4; 833 final int patternHeight = 11; 834 Size patternSize = new Size(patternWidth, patternHeight); 835 836 float fc = (float)(meta.frameWidth/2.0/Math.tan(meta.fovWidth/2.0)); 837 Mat camMat = cameraMatrix(fc, new Size(frameSize.width/2, frameSize.height/2)); 838 MatOfDouble coeff = new MatOfDouble(); // dummy 839 840 MatOfPoint2f centers = new MatOfPoint2f(); 841 MatOfPoint3f grid = asymmetricalCircleGrid(patternSize); 842 Mat rvec = new MatOfFloat(); 843 Mat tvec = new MatOfFloat(); 844 845 MatOfPoint2f reprojCenters = new MatOfPoint2f(); 846 847 if (LOCAL_LOGV) { 848 Log.v(TAG, "Camera Mat = \n" + camMat.dump()); 849 } 850 851 long startTime = System.nanoTime(); 852 long [] ts = new long[1]; 853 854 while ((frame = videoDecoder.getFrame(ts)) !=null) { 855 if (LOCAL_LOGV) { 856 Log.v(TAG, "got a frame " + i); 857 } 858 859 if (use_timestamp && ts[0] == -1) { 860 use_timestamp = false; 861 } 862 863 // has to be in front, as there are cases where execution 864 // will skip the later part of this while 865 i++; 866 867 // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY 868 Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY); 869 870 boolean foundPattern = Calib3d.findCirclesGrid( 871 gray, patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID); 872 873 if (!foundPattern) { 874 // skip to next frame 875 continue; 876 } 877 878 if (OUTPUT_DEBUG_IMAGE) { 879 Calib3d.drawChessboardCorners(frame, patternSize, centers, true); 880 } 881 882 // figure out the extrinsic parameters using real ground truth 3D points and the pixel 883 // position of blobs found in findCircleGrid, an estimated camera matrix and 884 // no-distortion are assumed. 885 boolean foundSolution = 886 Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, 887 false, Calib3d.CV_ITERATIVE); 888 889 if (!foundSolution) { 890 // skip to next frame 891 if (LOCAL_LOGV) { 892 Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped."); 893 } 894 continue; 895 } 896 897 // reproject points to for evaluation of result accuracy of solvePnP 898 Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters); 899 900 // Calculate the average distance between opposite corners of the pattern in pixels 901 Point[] centerPoints = centers.toArray(); 902 Point bottomLeftPos = centerPoints[0]; 903 Point bottomRightPos = centerPoints[patternWidth - 1]; 904 Point topLeftPos = centerPoints[(patternHeight * patternWidth) - patternWidth]; 905 Point topRightPos = centerPoints[(patternHeight * patternWidth) - 1]; 906 double avgPixelDist = (getDistanceBetweenPoints(bottomLeftPos, topRightPos) 907 + getDistanceBetweenPoints(bottomRightPos, topLeftPos)) / 2; 908 909 // Calculate the average pixel error between the circle centers from the video and the 910 // reprojected circle centers based on the estimated camera position. The error provides 911 // a way to estimate how accurate the assumed test device's position is. If the error 912 // is high, then the frame should be discarded to prevent an inaccurate test device's 913 // position from being compared against the rotation vector sample at that time. 914 Point[] reprojectedPointsArray = reprojCenters.toArray(); 915 double avgCenterError = 0.0; 916 for (int curCenter = 0; curCenter < reprojectedPointsArray.length; curCenter++) { 917 avgCenterError += getDistanceBetweenPoints( 918 reprojectedPointsArray[curCenter], centerPoints[curCenter]); 919 } 920 avgCenterError /= reprojectedPointsArray.length; 921 922 if (LOCAL_LOGV) { 923 Log.v(TAG, "Found attitude, re-projection error = " + avgCenterError); 924 } 925 926 // if error is reasonable, add it into the results. Use a dynamic threshold based on 927 // the pixel distance of opposite corners of the pattern to prevent higher resolution 928 // video or the distance between the camera and the test pattern from impacting the test 929 if (avgCenterError < REPROJECTION_THRESHOLD_RATIO * avgPixelDist) { 930 double [] rv = new double[3]; 931 double timestamp; 932 933 rvec.get(0,0, rv); 934 if (use_timestamp) { 935 timestamp = (double)ts[0] / 1e6; 936 } else { 937 timestamp = (double) i / meta.fps; 938 } 939 if (LOCAL_LOGV) Log.v(TAG, String.format("Added frame %d ts = %f", i, timestamp)); 940 recs.add(new AttitudeRec(timestamp, rodr2rpy(rv))); 941 } 942 943 if (OUTPUT_DEBUG_IMAGE) { 944 Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true); 945 Imgcodecs.imwrite(mPath + "/RVCVRecData/DebugCV/img" + i + ".png", frame); 946 } 947 } 948 949 if (LOCAL_LOGV) { 950 Log.v(TAG, "Finished decoding"); 951 } 952 953 if (TRACE_VIDEO_ANALYSIS) { 954 Debug.stopMethodTracing(); 955 } 956 957 if (LOCAL_LOGV) { 958 // time analysis 959 double totalTime = (System.nanoTime()-startTime)/1e9; 960 Log.i(TAG, "Total time: "+totalTime +"s, Per frame time: "+totalTime/i ); 961 } 962 return i; 963 } 964 965 /** 966 * OpenCV for Android have not support the VideoCapture from file 967 * This is a make shift solution before it is supported. 968 * One issue right now is that the glReadPixels is quite slow .. around 6.5ms for a 720p frame 969 */ 970 private class VideoDecoderForOpenCV implements Runnable { 971 static final String TAG = "VideoDecoderForOpenCV"; 972 973 private MediaExtractor extractor=null; 974 private MediaCodec decoder=null; 975 private CtsMediaOutputSurface surface=null; 976 977 private MatBuffer mMatBuffer; 978 979 private final File mVideoFile; 980 981 private boolean valid; 982 private Object setupSignal; 983 984 private Thread mThread; 985 private int mDecimation; 986 987 /** 988 * Constructor 989 * @param file video file 990 * @param decimation process every "decimation" number of frame 991 */ VideoDecoderForOpenCV(File file, int decimation)992 VideoDecoderForOpenCV(File file, int decimation) { 993 mVideoFile = file; 994 mDecimation = decimation; 995 valid = false; 996 997 start(); 998 } 999 1000 /** 1001 * Constructor 1002 * @param file video file 1003 */ VideoDecoderForOpenCV(File file)1004 VideoDecoderForOpenCV(File file) { 1005 this(file, 1); 1006 } 1007 1008 /** 1009 * Test if video decoder is in valid states ready to output video. 1010 * @return true of force. 1011 */ isValid()1012 public boolean isValid() { 1013 return valid; 1014 } 1015 start()1016 private void start() { 1017 setupSignal = new Object(); 1018 mThread = new Thread(this); 1019 mThread.start(); 1020 1021 synchronized (setupSignal) { 1022 try { 1023 setupSignal.wait(); 1024 } catch (InterruptedException e) { 1025 Log.e(TAG, "Interrupted when waiting for video decoder setup ready"); 1026 } 1027 } 1028 } stop()1029 private void stop() { 1030 if (mThread != null) { 1031 mThread.interrupt(); 1032 try { 1033 mThread.join(); 1034 } catch (InterruptedException e) { 1035 Log.e(TAG, "Interrupted when waiting for video decoder thread to stop"); 1036 } 1037 try { 1038 decoder.stop(); 1039 }catch (IllegalStateException e) { 1040 Log.e(TAG, "Video decoder is not in a state that can be stopped"); 1041 } 1042 } 1043 mThread = null; 1044 } 1045 teardown()1046 void teardown() { 1047 if (decoder!=null) { 1048 decoder.release(); 1049 decoder = null; 1050 } 1051 if (surface!=null) { 1052 surface.release(); 1053 surface = null; 1054 } 1055 if (extractor!=null) { 1056 extractor.release(); 1057 extractor = null; 1058 } 1059 } 1060 setup()1061 void setup() { 1062 int width=0, height=0; 1063 1064 extractor = new MediaExtractor(); 1065 1066 try { 1067 extractor.setDataSource(mVideoFile.getPath()); 1068 } catch (IOException e) { 1069 return; 1070 } 1071 1072 for (int i = 0; i < extractor.getTrackCount(); i++) { 1073 MediaFormat format = extractor.getTrackFormat(i); 1074 String mime = format.getString(MediaFormat.KEY_MIME); 1075 1076 if (mime.startsWith("video/")) { 1077 width = format.getInteger(MediaFormat.KEY_WIDTH); 1078 height = format.getInteger(MediaFormat.KEY_HEIGHT); 1079 extractor.selectTrack(i); 1080 try { 1081 decoder = MediaCodec.createDecoderByType(mime); 1082 }catch (IOException e) { 1083 continue; 1084 } 1085 // Decode to surface 1086 //decoder.configure(format, surface, null, 0); 1087 1088 // Decode to offscreen surface 1089 surface = new CtsMediaOutputSurface(width, height); 1090 mMatBuffer = new MatBuffer(width, height); 1091 1092 decoder.configure(format, surface.getSurface(), null, 0); 1093 break; 1094 } 1095 } 1096 1097 if (decoder == null) { 1098 Log.e(TAG, "Can't find video info!"); 1099 return; 1100 } 1101 valid = true; 1102 } 1103 1104 @Override run()1105 public void run() { 1106 setup(); 1107 1108 synchronized (setupSignal) { 1109 setupSignal.notify(); 1110 } 1111 1112 if (!valid) { 1113 return; 1114 } 1115 1116 decoder.start(); 1117 1118 ByteBuffer[] inputBuffers = decoder.getInputBuffers(); 1119 ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); 1120 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 1121 1122 boolean isEOS = false; 1123 long startMs = System.currentTimeMillis(); 1124 long timeoutUs = 10000; 1125 1126 int iframe = 0; 1127 long frameTimestamp = 0; 1128 1129 while (!Thread.interrupted()) { 1130 if (!isEOS) { 1131 int inIndex = decoder.dequeueInputBuffer(10000); 1132 if (inIndex >= 0) { 1133 ByteBuffer buffer = inputBuffers[inIndex]; 1134 int sampleSize = extractor.readSampleData(buffer, 0); 1135 if (sampleSize < 0) { 1136 if (LOCAL_LOGD) { 1137 Log.d("VideoDecoderForOpenCV", 1138 "InputBuffer BUFFER_FLAG_END_OF_STREAM"); 1139 } 1140 decoder.queueInputBuffer(inIndex, 0, 0, 0, 1141 MediaCodec.BUFFER_FLAG_END_OF_STREAM); 1142 isEOS = true; 1143 } else { 1144 frameTimestamp = extractor.getSampleTime(); 1145 decoder.queueInputBuffer(inIndex, 0, sampleSize, frameTimestamp, 0); 1146 if (LOCAL_LOGD) { 1147 Log.d(TAG, String.format("Frame %d sample time %f s", 1148 iframe, (double)frameTimestamp/1e6)); 1149 } 1150 extractor.advance(); 1151 } 1152 } 1153 } 1154 1155 int outIndex = decoder.dequeueOutputBuffer(info, 10000); 1156 MediaFormat outFormat; 1157 switch (outIndex) { 1158 case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 1159 if (LOCAL_LOGD) { 1160 Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED"); 1161 } 1162 outputBuffers = decoder.getOutputBuffers(); 1163 break; 1164 case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 1165 outFormat = decoder.getOutputFormat(); 1166 if (LOCAL_LOGD) { 1167 Log.d(TAG, "New format " + outFormat); 1168 } 1169 break; 1170 case MediaCodec.INFO_TRY_AGAIN_LATER: 1171 if (LOCAL_LOGD) { 1172 Log.d(TAG, "dequeueOutputBuffer timed out!"); 1173 } 1174 break; 1175 default: 1176 1177 ByteBuffer buffer = outputBuffers[outIndex]; 1178 boolean doRender = (info.size != 0); 1179 1180 // As soon as we call releaseOutputBuffer, the buffer will be forwarded 1181 // to SurfaceTexture to convert to a texture. The API doesn't 1182 // guarantee that the texture will be available before the call 1183 // returns, so we need to wait for the onFrameAvailable callback to 1184 // fire. If we don't wait, we risk rendering from the previous frame. 1185 decoder.releaseOutputBuffer(outIndex, doRender); 1186 1187 if (doRender) { 1188 surface.awaitNewImage(); 1189 surface.drawImage(); 1190 if (LOCAL_LOGV) { 1191 Log.v(TAG, "Finish drawing a frame!"); 1192 } 1193 if ((iframe++ % mDecimation) == 0) { 1194 //Send the frame for processing 1195 mMatBuffer.put(frameTimestamp); 1196 } 1197 } 1198 break; 1199 } 1200 1201 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 1202 if (LOCAL_LOGD) { 1203 Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); 1204 } 1205 break; 1206 } 1207 } 1208 mMatBuffer.invalidate(); 1209 1210 decoder.stop(); 1211 1212 teardown(); 1213 mThread = null; 1214 } 1215 1216 1217 /** 1218 * Get next valid frame 1219 * @return Frame in OpenCV mat 1220 */ getFrame(long ts[])1221 public Mat getFrame(long ts[]) { 1222 return mMatBuffer.get(ts); 1223 } 1224 1225 /** 1226 * Get the size of the frame 1227 * @return size of the frame 1228 */ getSize()1229 Size getSize() { 1230 return mMatBuffer.getSize(); 1231 } 1232 1233 /** 1234 * A synchronized buffer 1235 */ 1236 class MatBuffer { 1237 private Mat mat; 1238 private byte[] bytes; 1239 private ByteBuffer buf; 1240 private long timestamp; 1241 private boolean full; 1242 1243 private int mWidth, mHeight; 1244 private boolean mValid = false; 1245 MatBuffer(int width, int height)1246 MatBuffer(int width, int height) { 1247 mWidth = width; 1248 mHeight = height; 1249 1250 mat = new Mat(height, width, CvType.CV_8UC4); //RGBA 1251 buf = ByteBuffer.allocateDirect(width*height*4); 1252 bytes = new byte[width*height*4]; 1253 timestamp = -1; 1254 1255 mValid = true; 1256 full = false; 1257 } 1258 invalidate()1259 public synchronized void invalidate() { 1260 mValid = false; 1261 notifyAll(); 1262 } 1263 get(long ts[])1264 public synchronized Mat get(long ts[]) { 1265 1266 if (!mValid) return null; 1267 while (full == false) { 1268 try { 1269 wait(); 1270 if (!mValid) return null; 1271 } catch (InterruptedException e) { 1272 return null; 1273 } 1274 } 1275 mat.put(0,0, bytes); 1276 full = false; 1277 notifyAll(); 1278 ts[0] = timestamp; 1279 return mat; 1280 } 1281 put(long ts)1282 public synchronized void put(long ts) { 1283 while (full) { 1284 try { 1285 wait(); 1286 } catch (InterruptedException e) { 1287 Log.e(TAG, "Interrupted when waiting for space in buffer"); 1288 } 1289 } 1290 GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, 1291 GL10.GL_UNSIGNED_BYTE, buf); 1292 buf.get(bytes); 1293 buf.rewind(); 1294 1295 timestamp = ts; 1296 full = true; 1297 notifyAll(); 1298 } 1299 getSize()1300 public Size getSize() { 1301 if (valid) { 1302 return mat.size(); 1303 } 1304 return new Size(); 1305 } 1306 } 1307 } 1308 1309 1310 /* a small set of math functions */ quat2rpy( double [] q)1311 private static double [] quat2rpy( double [] q) { 1312 double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])), 1313 Math.asin(2*(q[0]*q[2] - q[3]*q[1])), 1314 Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))}; 1315 return rpy; 1316 } 1317 quat2rpy( double [] q, double[] rpy)1318 private static void quat2rpy( double [] q, double[] rpy) { 1319 rpy[0] = Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])); 1320 rpy[1] = Math.asin(2*(q[0]*q[2] - q[3]*q[1])); 1321 rpy[2] = Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3])); 1322 } 1323 quat2rpy(Mat quat)1324 private static Mat quat2rpy(Mat quat) { 1325 double [] q = new double[4]; 1326 quat.get(0,0,q); 1327 1328 double [] rpy = {Math.atan2(2*(q[0]*q[1]+q[2]*q[3]), 1-2*(q[1]*q[1]+q[2]*q[2])), 1329 Math.asin(2*(q[0]*q[2] - q[3]*q[1])), 1330 Math.atan2(2*(q[0]*q[3]+q[1]*q[2]), 1-2*(q[2]*q[2]+q[3]*q[3]))}; 1331 1332 Mat rpym = new Mat(3,1, CvType.CV_64F); 1333 rpym.put(0,0, rpy); 1334 return rpym; 1335 } 1336 rodr2quat( double [] r)1337 private static double [] rodr2quat( double [] r) { 1338 double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]); 1339 double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t, 1340 Math.sin(t/2)*r[2]/t}; 1341 return quat; 1342 } 1343 rodr2quat( double [] r, double [] quat)1344 private static void rodr2quat( double [] r, double [] quat) { 1345 double t = Math.sqrt(r[0]*r[0]+r[1]*r[1]+r[2]*r[2]); 1346 quat[0] = Math.cos(t/2); 1347 quat[1] = Math.sin(t/2)*r[0]/t; 1348 quat[2] = Math.sin(t/2)*r[1]/t; 1349 quat[3] = Math.sin(t/2)*r[2]/t; 1350 } 1351 rodr2quat(Mat rodr)1352 private static Mat rodr2quat(Mat rodr) { 1353 double t = Core.norm(rodr); 1354 double [] r = new double[3]; 1355 rodr.get(0,0,r); 1356 1357 double [] quat = {Math.cos(t/2), Math.sin(t/2)*r[0]/t,Math.sin(t/2)*r[1]/t, 1358 Math.sin(t/2)*r[2]/t}; 1359 Mat quatm = new Mat(4,1, CvType.CV_64F); 1360 quatm.put(0, 0, quat); 1361 return quatm; 1362 } 1363 rodr2rpy( double [] r)1364 private static double [] rodr2rpy( double [] r) { 1365 return quat2rpy(rodr2quat(r)); 1366 } 1367 getDistanceBetweenPoints(Point a, Point b)1368 private double getDistanceBetweenPoints(Point a, Point b) { 1369 return Math.sqrt(Math.pow(a.x - b.x, 2) + Math.pow(a.y - b.y, 2)); 1370 } 1371 ////////////////// 1372 1373 } 1374