• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
2 
3 Licensed under the Apache License, Version 2.0 (the "License");
4 you may not use this file except in compliance with the License.
5 You may obtain a copy of the License at
6 
7     http://www.apache.org/licenses/LICENSE-2.0
8 
9 Unless required by applicable law or agreed to in writing, software
10 distributed under the License is distributed on an "AS IS" BASIS,
11 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 See the License for the specific language governing permissions and
13 limitations under the License.
14 ==============================================================================*/
15 
16 #ifdef __RENDER_OPENGL__
17 #include <GLES/gl.h>
18 #include <GLES/glext.h>
19 #endif
20 
21 #include <string>
22 #include <map>
23 
24 #include "tensorflow/examples/android/jni/object_tracking/geom.h"
25 #include "tensorflow/examples/android/jni/object_tracking/image-inl.h"
26 #include "tensorflow/examples/android/jni/object_tracking/image.h"
27 #include "tensorflow/examples/android/jni/object_tracking/integral_image.h"
28 #include "tensorflow/examples/android/jni/object_tracking/logging.h"
29 #include "tensorflow/examples/android/jni/object_tracking/time_log.h"
30 #include "tensorflow/examples/android/jni/object_tracking/utils.h"
31 
32 #include "tensorflow/examples/android/jni/object_tracking/config.h"
33 #include "tensorflow/examples/android/jni/object_tracking/flow_cache.h"
34 #include "tensorflow/examples/android/jni/object_tracking/keypoint_detector.h"
35 #include "tensorflow/examples/android/jni/object_tracking/object_detector.h"
36 #include "tensorflow/examples/android/jni/object_tracking/object_tracker.h"
37 #include "tensorflow/examples/android/jni/object_tracking/optical_flow.h"
38 
39 namespace tf_tracking {
40 
ObjectTracker(const TrackerConfig * const config,ObjectDetectorBase * const detector)41 ObjectTracker::ObjectTracker(const TrackerConfig* const config,
42                              ObjectDetectorBase* const detector)
43     : config_(config),
44       frame_width_(config->image_size.width),
45       frame_height_(config->image_size.height),
46       curr_time_(0),
47       num_frames_(0),
48       flow_cache_(&config->flow_config),
49       keypoint_detector_(&config->keypoint_detector_config),
50       curr_num_frame_pairs_(0),
51       first_frame_index_(0),
52       frame1_(new ImageData(frame_width_, frame_height_)),
53       frame2_(new ImageData(frame_width_, frame_height_)),
54       detector_(detector),
55       num_detected_(0) {
56   for (int i = 0; i < kNumFrames; ++i) {
57     frame_pairs_[i].Init(-1, -1);
58   }
59 }
60 
61 
~ObjectTracker()62 ObjectTracker::~ObjectTracker() {
63   for (TrackedObjectMap::iterator iter = objects_.begin();
64        iter != objects_.end(); iter++) {
65     TrackedObject* object = iter->second;
66     SAFE_DELETE(object);
67   }
68 }
69 
70 
71 // Finds the correspondences for all the points in the current pair of frames.
72 // Stores the results in the given FramePair.
FindCorrespondences(FramePair * const frame_pair) const73 void ObjectTracker::FindCorrespondences(FramePair* const frame_pair) const {
74   // Keypoints aren't found until they're found.
75   memset(frame_pair->optical_flow_found_keypoint_, false,
76          sizeof(*frame_pair->optical_flow_found_keypoint_) * kMaxKeypoints);
77   TimeLog("Cleared old found keypoints");
78 
79   int num_keypoints_found = 0;
80 
81   // For every keypoint...
82   for (int i_feat = 0; i_feat < frame_pair->number_of_keypoints_; ++i_feat) {
83     Keypoint* const keypoint1 = frame_pair->frame1_keypoints_ + i_feat;
84     Keypoint* const keypoint2 = frame_pair->frame2_keypoints_ + i_feat;
85 
86     if (flow_cache_.FindNewPositionOfPoint(
87         keypoint1->pos_.x, keypoint1->pos_.y,
88         &keypoint2->pos_.x, &keypoint2->pos_.y)) {
89       frame_pair->optical_flow_found_keypoint_[i_feat] = true;
90       ++num_keypoints_found;
91     }
92   }
93 
94   TimeLog("Found correspondences");
95 
96   LOGV("Found %d of %d keypoint correspondences",
97        num_keypoints_found, frame_pair->number_of_keypoints_);
98 }
99 
NextFrame(const uint8_t * const new_frame,const uint8_t * const uv_frame,const int64_t timestamp,const float * const alignment_matrix_2x3)100 void ObjectTracker::NextFrame(const uint8_t* const new_frame,
101                               const uint8_t* const uv_frame,
102                               const int64_t timestamp,
103                               const float* const alignment_matrix_2x3) {
104   IncrementFrameIndex();
105   LOGV("Received frame %d", num_frames_);
106 
107   FramePair* const curr_change = frame_pairs_ + GetNthIndexFromEnd(0);
108   curr_change->Init(curr_time_, timestamp);
109 
110   CHECK_ALWAYS(curr_time_ < timestamp,
111                "Timestamp must monotonically increase! Went from %lld to %lld"
112                " on frame %d.",
113                curr_time_, timestamp, num_frames_);
114   curr_time_ = timestamp;
115 
116   // Swap the frames.
117   frame1_.swap(frame2_);
118 
119   frame2_->SetData(new_frame, uv_frame, frame_width_, timestamp, 1);
120 
121   if (detector_.get() != NULL) {
122     detector_->SetImageData(frame2_.get());
123   }
124 
125   flow_cache_.NextFrame(frame2_.get(), alignment_matrix_2x3);
126 
127   if (num_frames_ == 1) {
128     // This must be the first frame, so abort.
129     return;
130   }
131 
132   if (config_->always_track || objects_.size() > 0) {
133     LOGV("Tracking %zu targets", objects_.size());
134     ComputeKeypoints(true);
135     TimeLog("Keypoints computed!");
136 
137     FindCorrespondences(curr_change);
138     TimeLog("Flow computed!");
139 
140     TrackObjects();
141   }
142   TimeLog("Targets tracked!");
143 
144   if (detector_.get() != NULL && num_frames_ % kDetectEveryNFrames == 0) {
145     DetectTargets();
146   }
147   TimeLog("Detected objects.");
148 }
149 
MaybeAddObject(const std::string & id,const Image<uint8_t> & source_image,const BoundingBox & bounding_box,const ObjectModelBase * object_model)150 TrackedObject* ObjectTracker::MaybeAddObject(
151     const std::string& id, const Image<uint8_t>& source_image,
152     const BoundingBox& bounding_box, const ObjectModelBase* object_model) {
153   // Train the detector if this is a new object.
154   if (objects_.find(id) != objects_.end()) {
155     return objects_[id];
156   }
157 
158   // Need to get a non-const version of the model, or create a new one if it
159   // wasn't given.
160   ObjectModelBase* model = NULL;
161   if (detector_ != NULL) {
162     // If a detector is registered, then this new object must have a model.
163     CHECK_ALWAYS(object_model != NULL, "No model given!");
164     model = detector_->CreateObjectModel(object_model->GetName());
165   }
166   TrackedObject* const object =
167       new TrackedObject(id, source_image, bounding_box, model);
168 
169   objects_[id] = object;
170   return object;
171 }
172 
RegisterNewObjectWithAppearance(const std::string & id,const uint8_t * const new_frame,const BoundingBox & bounding_box)173 void ObjectTracker::RegisterNewObjectWithAppearance(
174     const std::string& id, const uint8_t* const new_frame,
175     const BoundingBox& bounding_box) {
176   ObjectModelBase* object_model = NULL;
177 
178   Image<uint8_t> image(frame_width_, frame_height_);
179   image.FromArray(new_frame, frame_width_, 1);
180 
181   if (detector_ != NULL) {
182     object_model = detector_->CreateObjectModel(id);
183     CHECK_ALWAYS(object_model != NULL, "Null object model!");
184 
185     const IntegralImage integral_image(image);
186     object_model->TrackStep(bounding_box, image, integral_image, true);
187   }
188 
189   // Create an object at this position.
190   CHECK_ALWAYS(!HaveObject(id), "Already have this object!");
191   if (objects_.find(id) == objects_.end()) {
192     TrackedObject* const object =
193         MaybeAddObject(id, image, bounding_box, object_model);
194     CHECK_ALWAYS(object != NULL, "Object not created!");
195   }
196 }
197 
SetPreviousPositionOfObject(const std::string & id,const BoundingBox & bounding_box,const int64_t timestamp)198 void ObjectTracker::SetPreviousPositionOfObject(const std::string& id,
199                                                 const BoundingBox& bounding_box,
200                                                 const int64_t timestamp) {
201   CHECK_ALWAYS(timestamp > 0, "Timestamp too low! %lld", timestamp);
202   CHECK_ALWAYS(timestamp <= curr_time_,
203                "Timestamp too great! %lld vs %lld", timestamp, curr_time_);
204 
205   TrackedObject* const object = GetObject(id);
206 
207   // Track this bounding box from the past to the current time.
208   const BoundingBox current_position = TrackBox(bounding_box, timestamp);
209 
210   object->UpdatePosition(current_position, curr_time_, *frame2_, false);
211 
212   VLOG(2) << "Set tracked position for " << id << " to " << bounding_box
213           << std::endl;
214 }
215 
216 
SetCurrentPositionOfObject(const std::string & id,const BoundingBox & bounding_box)217 void ObjectTracker::SetCurrentPositionOfObject(
218     const std::string& id, const BoundingBox& bounding_box) {
219   SetPreviousPositionOfObject(id, bounding_box, curr_time_);
220 }
221 
222 
ForgetTarget(const std::string & id)223 void ObjectTracker::ForgetTarget(const std::string& id) {
224   LOGV("Forgetting object %s", id.c_str());
225   TrackedObject* const object = GetObject(id);
226   delete object;
227   objects_.erase(id);
228 
229   if (detector_ != NULL) {
230     detector_->DeleteObjectModel(id);
231   }
232 }
233 
GetKeypointsPacked(uint16_t * const out_data,const float scale) const234 int ObjectTracker::GetKeypointsPacked(uint16_t* const out_data,
235                                       const float scale) const {
236   const FramePair& change = frame_pairs_[GetNthIndexFromEnd(0)];
237   uint16_t* curr_data = out_data;
238   int num_keypoints = 0;
239 
240   for (int i = 0; i < change.number_of_keypoints_; ++i) {
241     if (change.optical_flow_found_keypoint_[i]) {
242       ++num_keypoints;
243       const Point2f& point1 = change.frame1_keypoints_[i].pos_;
244       *curr_data++ = RealToFixed115(point1.x * scale);
245       *curr_data++ = RealToFixed115(point1.y * scale);
246 
247       const Point2f& point2 = change.frame2_keypoints_[i].pos_;
248       *curr_data++ = RealToFixed115(point2.x * scale);
249       *curr_data++ = RealToFixed115(point2.y * scale);
250     }
251   }
252 
253   return num_keypoints;
254 }
255 
256 
GetKeypoints(const bool only_found,float * const out_data) const257 int ObjectTracker::GetKeypoints(const bool only_found,
258                                 float* const out_data) const {
259   int curr_keypoint = 0;
260   const FramePair& change = frame_pairs_[GetNthIndexFromEnd(0)];
261 
262   for (int i = 0; i < change.number_of_keypoints_; ++i) {
263     if (!only_found || change.optical_flow_found_keypoint_[i]) {
264       const int base = curr_keypoint * kKeypointStep;
265       out_data[base + 0] = change.frame1_keypoints_[i].pos_.x;
266       out_data[base + 1] = change.frame1_keypoints_[i].pos_.y;
267 
268       out_data[base + 2] =
269           change.optical_flow_found_keypoint_[i] ? 1.0f : -1.0f;
270       out_data[base + 3] = change.frame2_keypoints_[i].pos_.x;
271       out_data[base + 4] = change.frame2_keypoints_[i].pos_.y;
272 
273       out_data[base + 5] = change.frame1_keypoints_[i].score_;
274       out_data[base + 6] = change.frame1_keypoints_[i].type_;
275       ++curr_keypoint;
276     }
277   }
278 
279   LOGV("Got %d keypoints.", curr_keypoint);
280 
281   return curr_keypoint;
282 }
283 
284 
TrackBox(const BoundingBox & region,const FramePair & frame_pair) const285 BoundingBox ObjectTracker::TrackBox(const BoundingBox& region,
286                                     const FramePair& frame_pair) const {
287   float translation_x;
288   float translation_y;
289 
290   float scale_x;
291   float scale_y;
292 
293   BoundingBox tracked_box(region);
294   frame_pair.AdjustBox(
295       tracked_box, &translation_x, &translation_y, &scale_x, &scale_y);
296 
297   tracked_box.Shift(Point2f(translation_x, translation_y));
298 
299   if (scale_x > 0 && scale_y > 0) {
300     tracked_box.Scale(scale_x, scale_y);
301   }
302   return tracked_box;
303 }
304 
TrackBox(const BoundingBox & region,const int64_t timestamp) const305 BoundingBox ObjectTracker::TrackBox(const BoundingBox& region,
306                                     const int64_t timestamp) const {
307   CHECK_ALWAYS(timestamp > 0, "Timestamp too low! %lld", timestamp);
308   CHECK_ALWAYS(timestamp <= curr_time_, "Timestamp is in the future!");
309 
310   // Anything that ended before the requested timestamp is of no concern to us.
311   bool found_it = false;
312   int num_frames_back = -1;
313   for (int i = 0; i < curr_num_frame_pairs_; ++i) {
314     const FramePair& frame_pair =
315         frame_pairs_[GetNthIndexFromEnd(i)];
316 
317     if (frame_pair.end_time_ <= timestamp) {
318       num_frames_back = i - 1;
319 
320       if (num_frames_back > 0) {
321         LOGV("Went %d out of %d frames before finding frame. (index: %d)",
322              num_frames_back, curr_num_frame_pairs_, GetNthIndexFromEnd(i));
323       }
324 
325       found_it = true;
326       break;
327     }
328   }
329 
330   if (!found_it) {
331     LOGW("History did not go back far enough! %lld vs %lld",
332          frame_pairs_[GetNthIndexFromEnd(0)].end_time_ -
333          frame_pairs_[GetNthIndexFromStart(0)].end_time_,
334          frame_pairs_[GetNthIndexFromEnd(0)].end_time_ - timestamp);
335   }
336 
337   // Loop over all the frames in the queue, tracking the accumulated delta
338   // of the point from frame to frame.  It's possible the point could
339   // go out of frame, but keep tracking as best we can, using points near
340   // the edge of the screen where it went out of bounds.
341   BoundingBox tracked_box(region);
342   for (int i = num_frames_back; i >= 0; --i) {
343     const FramePair& frame_pair = frame_pairs_[GetNthIndexFromEnd(i)];
344     SCHECK(frame_pair.end_time_ >= timestamp, "Frame timestamp was too early!");
345     tracked_box = TrackBox(tracked_box, frame_pair);
346   }
347   return tracked_box;
348 }
349 
350 
351 // Converts a row-major 3x3 2d transformation matrix to a column-major 4x4
352 // 3d transformation matrix.
Convert3x3To4x4(const float * const in_matrix,float * const out_matrix)353 inline void Convert3x3To4x4(
354     const float* const in_matrix, float* const out_matrix) {
355   // X
356   out_matrix[0] = in_matrix[0];
357   out_matrix[1] = in_matrix[3];
358   out_matrix[2] = 0.0f;
359   out_matrix[3] = 0.0f;
360 
361   // Y
362   out_matrix[4] = in_matrix[1];
363   out_matrix[5] = in_matrix[4];
364   out_matrix[6] = 0.0f;
365   out_matrix[7] = 0.0f;
366 
367   // Z
368   out_matrix[8] = 0.0f;
369   out_matrix[9] = 0.0f;
370   out_matrix[10] = 1.0f;
371   out_matrix[11] = 0.0f;
372 
373   // Translation
374   out_matrix[12] = in_matrix[2];
375   out_matrix[13] = in_matrix[5];
376   out_matrix[14] = 0.0f;
377   out_matrix[15] = 1.0f;
378 }
379 
380 
Draw(const int canvas_width,const int canvas_height,const float * const frame_to_canvas) const381 void ObjectTracker::Draw(const int canvas_width, const int canvas_height,
382                          const float* const frame_to_canvas) const {
383 #ifdef __RENDER_OPENGL__
384   glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
385 
386   glMatrixMode(GL_PROJECTION);
387   glLoadIdentity();
388 
389   glOrthof(0.0f, canvas_width, 0.0f, canvas_height, 0.0f, 1.0f);
390 
391   // To make Y go the right direction (0 at top of frame).
392   glScalef(1.0f, -1.0f, 1.0f);
393   glTranslatef(0.0f, -canvas_height, 0.0f);
394 
395   glMatrixMode(GL_MODELVIEW);
396   glLoadIdentity();
397 
398   glPushMatrix();
399 
400   // Apply the frame to canvas transformation.
401   static GLfloat transformation[16];
402   Convert3x3To4x4(frame_to_canvas, transformation);
403   glMultMatrixf(transformation);
404 
405   // Draw tracked object bounding boxes.
406   for (TrackedObjectMap::const_iterator iter = objects_.begin();
407     iter != objects_.end(); ++iter) {
408     TrackedObject* tracked_object = iter->second;
409     tracked_object->Draw();
410   }
411 
412   static const bool kRenderDebugPyramid = false;
413   if (kRenderDebugPyramid) {
414     glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
415     for (int i = 0; i < kNumPyramidLevels * 2; ++i) {
416       Sprite(*frame1_->GetPyramidSqrt2Level(i)).Draw();
417     }
418   }
419 
420   static const bool kRenderDebugDerivative = false;
421   if (kRenderDebugDerivative) {
422     glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
423     for (int i = 0; i < kNumPyramidLevels; ++i) {
424       const Image<int32_t>& dx = *frame1_->GetSpatialX(i);
425       Image<uint8_t> render_image(dx.GetWidth(), dx.GetHeight());
426       for (int y = 0; y < dx.GetHeight(); ++y) {
427         const int32_t* dx_ptr = dx[y];
428         uint8_t* dst_ptr = render_image[y];
429         for (int x = 0; x < dx.GetWidth(); ++x) {
430           *dst_ptr++ = Clip(-(*dx_ptr++), 0, 255);
431         }
432       }
433 
434       Sprite(render_image).Draw();
435     }
436   }
437 
438   if (detector_ != NULL) {
439     glDisable(GL_CULL_FACE);
440     detector_->Draw();
441   }
442   glPopMatrix();
443 #endif
444 }
445 
AddQuadrants(const BoundingBox & box,std::vector<BoundingBox> * boxes)446 static void AddQuadrants(const BoundingBox& box,
447                          std::vector<BoundingBox>* boxes) {
448   const Point2f center = box.GetCenter();
449 
450   float x1 = box.left_;
451   float x2 = center.x;
452   float x3 = box.right_;
453 
454   float y1 = box.top_;
455   float y2 = center.y;
456   float y3 = box.bottom_;
457 
458   // Upper left.
459   boxes->push_back(BoundingBox(x1, y1, x2, y2));
460 
461   // Upper right.
462   boxes->push_back(BoundingBox(x2, y1, x3, y2));
463 
464   // Bottom left.
465   boxes->push_back(BoundingBox(x1, y2, x2, y3));
466 
467   // Bottom right.
468   boxes->push_back(BoundingBox(x2, y2, x3, y3));
469 
470   // Whole thing.
471   boxes->push_back(box);
472 }
473 
ComputeKeypoints(const bool cached_ok)474 void ObjectTracker::ComputeKeypoints(const bool cached_ok) {
475   const FramePair& prev_change = frame_pairs_[GetNthIndexFromEnd(1)];
476   FramePair* const curr_change = &frame_pairs_[GetNthIndexFromEnd(0)];
477 
478   std::vector<BoundingBox> boxes;
479 
480   for (TrackedObjectMap::iterator object_iter = objects_.begin();
481        object_iter != objects_.end(); ++object_iter) {
482     BoundingBox box = object_iter->second->GetPosition();
483     box.Scale(config_->object_box_scale_factor_for_features,
484               config_->object_box_scale_factor_for_features);
485     AddQuadrants(box, &boxes);
486   }
487 
488   AddQuadrants(frame1_->GetImage()->GetContainingBox(), &boxes);
489 
490   keypoint_detector_.FindKeypoints(*frame1_, boxes, prev_change, curr_change);
491 }
492 
493 
494 // Given a vector of detections and a model, simply returns the Detection for
495 // that model with the highest correlation.
GetBestObjectForDetection(const Detection & detection,TrackedObject ** match) const496 bool ObjectTracker::GetBestObjectForDetection(
497     const Detection& detection, TrackedObject** match) const {
498   TrackedObject* best_match = NULL;
499   float best_overlap = -FLT_MAX;
500 
501   LOGV("Looking for matches in %zu objects!", objects_.size());
502   for (TrackedObjectMap::const_iterator object_iter = objects_.begin();
503       object_iter != objects_.end(); ++object_iter) {
504     TrackedObject* const tracked_object = object_iter->second;
505 
506     const float overlap = tracked_object->GetPosition().PascalScore(
507         detection.GetObjectBoundingBox());
508 
509     if (!detector_->AllowSpontaneousDetections() &&
510         (detection.GetObjectModel() != tracked_object->GetModel())) {
511       if (overlap > 0.0f) {
512         return false;
513       }
514       continue;
515     }
516 
517     const float jump_distance =
518         (tracked_object->GetPosition().GetCenter() -
519          detection.GetObjectBoundingBox().GetCenter()).LengthSquared();
520 
521     const float allowed_distance =
522         tracked_object->GetAllowableDistanceSquared();
523 
524     LOGV("Distance: %.2f, Allowed distance %.2f, Overlap: %.2f",
525          jump_distance, allowed_distance, overlap);
526 
527     // TODO(andrewharp): No need to do this verification twice, eliminate
528     // one of the score checks (the other being in OnDetection).
529     if (jump_distance < allowed_distance &&
530         overlap > best_overlap &&
531         tracked_object->GetMatchScore() + kMatchScoreBuffer <
532         detection.GetMatchScore()) {
533       best_match = tracked_object;
534       best_overlap = overlap;
535     } else if (overlap > 0.0f) {
536       return false;
537     }
538   }
539 
540   *match = best_match;
541   return true;
542 }
543 
544 
ProcessDetections(std::vector<Detection> * const detections)545 void ObjectTracker::ProcessDetections(
546     std::vector<Detection>* const detections) {
547   LOGV("Initial detection done, iterating over %zu detections now.",
548        detections->size());
549 
550   const bool spontaneous_detections_allowed =
551       detector_->AllowSpontaneousDetections();
552   for (std::vector<Detection>::const_iterator it = detections->begin();
553       it != detections->end(); ++it) {
554     const Detection& detection = *it;
555     SCHECK(frame2_->GetImage()->Contains(detection.GetObjectBoundingBox()),
556           "Frame does not contain bounding box!");
557 
558     TrackedObject* best_match = NULL;
559 
560     const bool no_collisions =
561         GetBestObjectForDetection(detection, &best_match);
562 
563     // Need to get a non-const version of the model, or create a new one if it
564     // wasn't given.
565     ObjectModelBase* model =
566         const_cast<ObjectModelBase*>(detection.GetObjectModel());
567 
568     if (best_match != NULL) {
569       if (model != best_match->GetModel()) {
570         CHECK_ALWAYS(detector_->AllowSpontaneousDetections(),
571             "Model for object changed but spontaneous detections not allowed!");
572       }
573       best_match->OnDetection(model,
574                               detection.GetObjectBoundingBox(),
575                               detection.GetMatchScore(),
576                               curr_time_, *frame2_);
577     } else if (no_collisions && spontaneous_detections_allowed) {
578       if (detection.GetMatchScore() > kMinimumMatchScore) {
579         LOGV("No match, adding it!");
580         const ObjectModelBase* model = detection.GetObjectModel();
581         std::ostringstream ss;
582         // TODO(andrewharp): Generate this in a more general fashion.
583         ss << "hand_" << num_detected_++;
584         std::string object_name = ss.str();
585         MaybeAddObject(object_name, *frame2_->GetImage(),
586                        detection.GetObjectBoundingBox(), model);
587       }
588     }
589   }
590 }
591 
592 
DetectTargets()593 void ObjectTracker::DetectTargets() {
594   // Detect all object model types that we're currently tracking.
595   std::vector<const ObjectModelBase*> object_models;
596   detector_->GetObjectModels(&object_models);
597   if (object_models.size() == 0) {
598     LOGV("No objects to search for, aborting.");
599     return;
600   }
601 
602   LOGV("Trying to detect %zu models", object_models.size());
603 
604   LOGV("Creating test vector!");
605   std::vector<BoundingSquare> positions;
606 
607   for (TrackedObjectMap::iterator object_iter = objects_.begin();
608       object_iter != objects_.end(); ++object_iter) {
609     TrackedObject* const tracked_object = object_iter->second;
610 
611 #if DEBUG_PREDATOR
612   positions.push_back(GetCenteredSquare(
613       frame2_->GetImage()->GetContainingBox(), 32.0f));
614 #else
615     const BoundingBox& position = tracked_object->GetPosition();
616 
617     const float square_size = MAX(
618         kScanMinSquareSize / (kLastKnownPositionScaleFactor *
619         kLastKnownPositionScaleFactor),
620         MIN(position.GetWidth(),
621         position.GetHeight())) / kLastKnownPositionScaleFactor;
622 
623     FillWithSquares(frame2_->GetImage()->GetContainingBox(),
624                     tracked_object->GetPosition(),
625                     square_size,
626                     kScanMinSquareSize,
627                     kLastKnownPositionScaleFactor,
628                     &positions);
629   }
630 #endif
631 
632   LOGV("Created test vector!");
633 
634   std::vector<Detection> detections;
635   LOGV("Detecting!");
636   detector_->Detect(positions, &detections);
637   LOGV("Found %zu detections", detections.size());
638 
639   TimeLog("Finished detection.");
640 
641   ProcessDetections(&detections);
642 
643   TimeLog("iterated over detections");
644 
645   LOGV("Done detecting!");
646 }
647 
648 
TrackObjects()649 void ObjectTracker::TrackObjects() {
650   // TODO(andrewharp): Correlation should be allowed to remove objects too.
651   const bool automatic_removal_allowed = detector_.get() != NULL ?
652       detector_->AllowSpontaneousDetections() : false;
653 
654   LOGV("Tracking %zu objects!", objects_.size());
655   std::vector<std::string> dead_objects;
656   for (TrackedObjectMap::iterator iter = objects_.begin();
657        iter != objects_.end(); iter++) {
658     TrackedObject* object = iter->second;
659     const BoundingBox tracked_position = TrackBox(
660         object->GetPosition(), frame_pairs_[GetNthIndexFromEnd(0)]);
661     object->UpdatePosition(tracked_position, curr_time_, *frame2_, false);
662 
663     if (automatic_removal_allowed &&
664         object->GetNumConsecutiveFramesBelowThreshold() >
665         kMaxNumDetectionFailures * 5) {
666       dead_objects.push_back(iter->first);
667     }
668   }
669 
670   if (detector_ != NULL && automatic_removal_allowed) {
671     for (std::vector<std::string>::iterator iter = dead_objects.begin();
672          iter != dead_objects.end(); iter++) {
673       LOGE("Removing object! %s", iter->c_str());
674       ForgetTarget(*iter);
675     }
676   }
677   TimeLog("Tracked all objects.");
678 
679   LOGV("%zu objects tracked!", objects_.size());
680 }
681 
682 }  // namespace tf_tracking
683