Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
vpMbGenericTracker.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * Generic model-based tracker.
32 */
33
34#include <visp3/mbt/vpMbGenericTracker.h>
35
36#include <visp3/core/vpDisplay.h>
37#include <visp3/core/vpExponentialMap.h>
38#include <visp3/core/vpTrackingException.h>
39#include <visp3/core/vpIoTools.h>
40#include <visp3/mbt/vpMbtXmlGenericParser.h>
41
42#ifdef VISP_HAVE_NLOHMANN_JSON
43#include VISP_NLOHMANN_JSON(json.hpp)
44using json = nlohmann::json;
45#endif
46
52{
53 m_mapOfTrackers["Camera"] = new TrackerWrapper(EDGE_TRACKER);
54
55 // Add default camera transformation matrix
57
58 // Add default ponderation between each feature type
60
61#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
63#endif
64
67}
68
69vpMbGenericTracker::vpMbGenericTracker(unsigned int nbCameras, int trackerType)
73{
74 if (nbCameras == 0) {
75 throw vpException(vpTrackingException::fatalError, "Cannot use no camera!");
76 }
77 else if (nbCameras == 1) {
78 m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerType);
79
80 // Add default camera transformation matrix
82 }
83 else {
84 for (unsigned int i = 1; i <= nbCameras; i++) {
85 std::stringstream ss;
86 ss << "Camera" << i;
87 m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerType);
88
89 // Add default camera transformation matrix
91 }
92
93 // Set by default the reference camera to the first one
95 }
96
97 // Add default ponderation between each feature type
99
100#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
102#endif
103
106}
107
108vpMbGenericTracker::vpMbGenericTracker(const std::vector<int> &trackerTypes)
112{
113 if (trackerTypes.empty()) {
114 throw vpException(vpException::badValue, "There is no camera!");
115 }
116
117 if (trackerTypes.size() == 1) {
118 m_mapOfTrackers["Camera"] = new TrackerWrapper(trackerTypes[0]);
119
120 // Add default camera transformation matrix
121 m_mapOfCameraTransformationMatrix["Camera"] = vpHomogeneousMatrix();
122 }
123 else {
124 for (size_t i = 1; i <= trackerTypes.size(); i++) {
125 std::stringstream ss;
126 ss << "Camera" << i;
127 m_mapOfTrackers[ss.str()] = new TrackerWrapper(trackerTypes[i - 1]);
128
129 // Add default camera transformation matrix
130 m_mapOfCameraTransformationMatrix[ss.str()] = vpHomogeneousMatrix();
131 }
132
133 // Set by default the reference camera to the first one
134 m_referenceCameraName = m_mapOfTrackers.begin()->first;
135 }
136
137 // Add default ponderation between each feature type
138 m_mapOfFeatureFactors[EDGE_TRACKER] = 1.0;
139
140#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
141 m_mapOfFeatureFactors[KLT_TRACKER] = 1.0;
142#endif
143
144 m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER] = 1.0;
145 m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER] = 1.0;
146}
147
148vpMbGenericTracker::vpMbGenericTracker(const std::vector<std::string> &cameraNames,
149 const std::vector<int> &trackerTypes)
153{
154 if (cameraNames.size() != trackerTypes.size() || cameraNames.empty()) {
155 throw vpException(vpTrackingException::badValue,
156 "cameraNames.size() != trackerTypes.size() || cameraNames.empty()");
157 }
158
159 for (size_t i = 0; i < cameraNames.size(); i++) {
160 m_mapOfTrackers[cameraNames[i]] = new TrackerWrapper(trackerTypes[i]);
161
162 // Add default camera transformation matrix
164 }
165
166 // Set by default the reference camera to the first one
167 m_referenceCameraName = m_mapOfTrackers.begin()->first;
168
169 // Add default ponderation between each feature type
171
172#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
174#endif
175
178}
179
181{
182 for (std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.begin(); it != m_mapOfTrackers.end();
183 ++it) {
184 delete it->second;
185 it->second = nullptr;
186 }
187}
188
207 const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam)
208{
209 double rawTotalProjectionError = 0.0;
210 unsigned int nbTotalFeaturesUsed = 0;
211
212 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
213 it != m_mapOfTrackers.end(); ++it) {
214 TrackerWrapper *tracker = it->second;
215
216 unsigned int nbFeaturesUsed = 0;
217 double curProjError = tracker->computeProjectionErrorImpl(I, cMo, cam, nbFeaturesUsed);
218
219 if (nbFeaturesUsed > 0) {
220 nbTotalFeaturesUsed += nbFeaturesUsed;
221 rawTotalProjectionError += curProjError;
222 }
223 }
224
225 if (nbTotalFeaturesUsed > 0) {
226 return vpMath::deg(rawTotalProjectionError / static_cast<double>(nbTotalFeaturesUsed));
227 }
228
229 return 90.0;
230}
231
250 const vpHomogeneousMatrix &_cMo,
251 const vpCameraParameters &_cam)
252{
254 vpImageConvert::convert(I_color, I); // FS: Shouldn't we use here m_I that was converted in track() ?
255
256 return computeCurrentProjectionError(I, _cMo, _cam);
257}
258
260{
261 if (computeProjError) {
262 double rawTotalProjectionError = 0.0;
263 unsigned int nbTotalFeaturesUsed = 0;
264
265 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
266 it != m_mapOfTrackers.end(); ++it) {
267 TrackerWrapper *tracker = it->second;
268
269 double curProjError = tracker->getProjectionError();
270 unsigned int nbFeaturesUsed = tracker->nbFeaturesForProjErrorComputation;
271
272 if (nbFeaturesUsed > 0) {
273 nbTotalFeaturesUsed += nbFeaturesUsed;
274 rawTotalProjectionError += (vpMath::rad(curProjError) * nbFeaturesUsed);
275 }
276 }
277
278 if (nbTotalFeaturesUsed > 0) {
279 projectionError = vpMath::deg(rawTotalProjectionError / static_cast<double>(nbTotalFeaturesUsed));
280 }
281 else {
282 projectionError = 90.0;
283 }
284 }
285 else {
286 projectionError = 90.0;
287 }
288}
289
290void vpMbGenericTracker::computeVVS(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
291{
292 computeVVSInit(mapOfImages);
293
294 if (m_error.getRows() < 4) {
295 throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
296 }
297
298 double normRes = 0;
299 double normRes_1 = -1;
300 unsigned int iter = 0;
301
302 vpMatrix LTL;
303 vpColVector LTR, v;
304 vpColVector error_prev;
305
306 double mu = m_initialMu;
307 vpHomogeneousMatrix cMo_prev;
308
309 bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
310 if (isoJoIdentity)
311 oJo.eye();
312
313 // Covariance
314 vpColVector W_true(m_error.getRows());
315 vpMatrix L_true, LVJ_true;
316
317 // Create the map of VelocityTwistMatrices
318 std::map<std::string, vpVelocityTwistMatrix> mapOfVelocityTwist;
319 for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = m_mapOfCameraTransformationMatrix.begin();
320 it != m_mapOfCameraTransformationMatrix.end(); ++it) {
322 cVo.buildFrom(it->second);
323 mapOfVelocityTwist[it->first] = cVo;
324 }
325
326 double factorEdge = m_mapOfFeatureFactors[EDGE_TRACKER];
327#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
328 double factorKlt = m_mapOfFeatureFactors[KLT_TRACKER];
329#endif
330 double factorDepth = m_mapOfFeatureFactors[DEPTH_NORMAL_TRACKER];
331 double factorDepthDense = m_mapOfFeatureFactors[DEPTH_DENSE_TRACKER];
332
333 m_nb_feat_edge = 0;
334 m_nb_feat_klt = 0;
337
338 while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
339 computeVVSInteractionMatrixAndResidu(mapOfImages, mapOfVelocityTwist);
340
341 bool reStartFromLastIncrement = false;
342 computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
343 if (reStartFromLastIncrement) {
344 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
345 it != m_mapOfTrackers.end(); ++it) {
346 TrackerWrapper *tracker = it->second;
347
348 tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * cMo_prev;
349
350#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
351 vpHomogeneousMatrix c_curr_tTc_curr0 =
352 m_mapOfCameraTransformationMatrix[it->first] * cMo_prev * tracker->c0Mo.inverse();
353 tracker->ctTc0 = c_curr_tTc_curr0;
354#endif
355 }
356 }
357
358 if (!reStartFromLastIncrement) {
360
361 if (computeCovariance) {
362 L_true = m_L;
363 if (!isoJoIdentity) {
365 cVo.buildFrom(m_cMo);
366 LVJ_true = (m_L * (cVo * oJo));
367 }
368 }
369
371 if (iter == 0) {
372 // If all the 6 dof should be estimated, we check if the interaction
373 // matrix is full rank. If not we remove automatically the dof that
374 // cannot be estimated. This is particularly useful when considering
375 // circles (rank 5) and cylinders (rank 4)
376 if (isoJoIdentity) {
377 cVo.buildFrom(m_cMo);
378
379 vpMatrix K; // kernel
380 unsigned int rank = (m_L * cVo).kernel(K);
381 if (rank == 0) {
382 throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
383 }
384
385 if (rank != 6) {
386 vpMatrix I; // Identity
387 I.eye(6);
388 oJo = I - K.AtA();
389 isoJoIdentity = false;
390 }
391 }
392 }
393
394 // Weighting
395 double num = 0;
396 double den = 0;
397
398 unsigned int start_index = 0;
399 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
400 it != m_mapOfTrackers.end(); ++it) {
401 TrackerWrapper *tracker = it->second;
402
403 if (tracker->m_trackerType & EDGE_TRACKER) {
404 for (unsigned int i = 0; i < tracker->m_error_edge.getRows(); i++) {
405 double wi = tracker->m_w_edge[i] * tracker->m_factor[i] * factorEdge;
406 W_true[start_index + i] = wi;
407 m_weightedError[start_index + i] = wi * m_error[start_index + i];
408
409 num += wi * vpMath::sqr(m_error[start_index + i]);
410 den += wi;
411
412 for (unsigned int j = 0; j < m_L.getCols(); j++) {
413 m_L[start_index + i][j] *= wi;
414 }
415 }
416
417 start_index += tracker->m_error_edge.getRows();
418 }
419
420#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
421 if (tracker->m_trackerType & KLT_TRACKER) {
422 for (unsigned int i = 0; i < tracker->m_error_klt.getRows(); i++) {
423 double wi = tracker->m_w_klt[i] * factorKlt;
424 W_true[start_index + i] = wi;
425 m_weightedError[start_index + i] = wi * m_error[start_index + i];
426
427 num += wi * vpMath::sqr(m_error[start_index + i]);
428 den += wi;
429
430 for (unsigned int j = 0; j < m_L.getCols(); j++) {
431 m_L[start_index + i][j] *= wi;
432 }
433 }
434
435 start_index += tracker->m_error_klt.getRows();
436 }
437#endif
438
439 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
440 for (unsigned int i = 0; i < tracker->m_error_depthNormal.getRows(); i++) {
441 double wi = tracker->m_w_depthNormal[i] * factorDepth;
442 W_true[start_index + i] = wi;
443 m_weightedError[start_index + i] = wi * m_error[start_index + i];
444
445 num += wi * vpMath::sqr(m_error[start_index + i]);
446 den += wi;
447
448 for (unsigned int j = 0; j < m_L.getCols(); j++) {
449 m_L[start_index + i][j] *= wi;
450 }
451 }
452
453 start_index += tracker->m_error_depthNormal.getRows();
454 }
455
456 if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
457 for (unsigned int i = 0; i < tracker->m_error_depthDense.getRows(); i++) {
458 double wi = tracker->m_w_depthDense[i] * factorDepthDense;
459 W_true[start_index + i] = wi;
460 m_weightedError[start_index + i] = wi * m_error[start_index + i];
461
462 num += wi * vpMath::sqr(m_error[start_index + i]);
463 den += wi;
464
465 for (unsigned int j = 0; j < m_L.getCols(); j++) {
466 m_L[start_index + i][j] *= wi;
467 }
468 }
469
470 start_index += tracker->m_error_depthDense.getRows();
471 }
472 }
473
474 normRes_1 = normRes;
475 normRes = sqrt(num / den);
476
477 computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
478
479 cMo_prev = m_cMo;
480
482
483#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
484 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
485 it != m_mapOfTrackers.end(); ++it) {
486 TrackerWrapper *tracker = it->second;
487
488 vpHomogeneousMatrix c_curr_tTc_curr0 =
489 m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
490 tracker->ctTc0 = c_curr_tTc_curr0;
491 }
492#endif
493 // Update cMo
494 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
495 it != m_mapOfTrackers.end(); ++it) {
496 TrackerWrapper *tracker = it->second;
497 tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
498 }
499 }
500
501 iter++;
502 }
503
504 // Update features number
505 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
506 it != m_mapOfTrackers.end(); ++it) {
507 TrackerWrapper *tracker = it->second;
508 if (tracker->m_trackerType & EDGE_TRACKER) {
509 m_nb_feat_edge += tracker->m_error_edge.size();
510 }
511#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
512 if (tracker->m_trackerType & KLT_TRACKER) {
513 m_nb_feat_klt += tracker->m_error_klt.size();
514 }
515#endif
516 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
517 m_nb_feat_depthNormal += tracker->m_error_depthNormal.size();
518 }
519 if (tracker->m_trackerType & DEPTH_DENSE_TRACKER) {
520 m_nb_feat_depthDense += tracker->m_error_depthDense.size();
521 }
522 }
523
524 computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
525
526 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
527 it != m_mapOfTrackers.end(); ++it) {
528 TrackerWrapper *tracker = it->second;
529
530 if (tracker->m_trackerType & EDGE_TRACKER) {
531 tracker->updateMovingEdgeWeights();
532 }
533 }
534}
535
537{
538 throw vpException(vpException::fatalError, "vpMbGenericTracker::computeVVSInit() should not be called!");
539}
540
541void vpMbGenericTracker::computeVVSInit(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
542{
543 unsigned int nbFeatures = 0;
544
545 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
546 it != m_mapOfTrackers.end(); ++it) {
547 TrackerWrapper *tracker = it->second;
548 tracker->computeVVSInit(mapOfImages[it->first]);
549
550 nbFeatures += tracker->m_error.getRows();
551 }
552
553 m_L.resize(nbFeatures, 6, false, false);
554 m_error.resize(nbFeatures, false);
555
556 m_weightedError.resize(nbFeatures, false);
557 m_w.resize(nbFeatures, false);
558 m_w = 1;
559}
560
562{
563 throw vpException(vpException::fatalError, "vpMbGenericTracker::"
564 "computeVVSInteractionMatrixAndR"
565 "esidu() should not be called");
566}
567
569 std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
570 std::map<std::string, vpVelocityTwistMatrix> &mapOfVelocityTwist)
571{
572 unsigned int start_index = 0;
573
574 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
575 it != m_mapOfTrackers.end(); ++it) {
576 TrackerWrapper *tracker = it->second;
577
578 tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
579#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
580 vpHomogeneousMatrix c_curr_tTc_curr0 =
581 m_mapOfCameraTransformationMatrix[it->first] * m_cMo * tracker->c0Mo.inverse();
582 tracker->ctTc0 = c_curr_tTc_curr0;
583#endif
584
585 tracker->computeVVSInteractionMatrixAndResidu(mapOfImages[it->first]);
586
587 if (tracker->m_L.getRows() > 0) {
588 m_L.insert(tracker->m_L * mapOfVelocityTwist[it->first], start_index, 0);
589 m_error.insert(start_index, tracker->m_error);
590
591 start_index += tracker->m_error.getRows();
592 }
593 }
594}
595
597{
598 unsigned int start_index = 0;
599
600 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
601 it != m_mapOfTrackers.end(); ++it) {
602 TrackerWrapper *tracker = it->second;
603 tracker->computeVVSWeights();
604
605 m_w.insert(start_index, tracker->m_w);
606 start_index += tracker->m_w.getRows();
607 }
608}
609
624 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
625 bool displayFullModel)
626{
627 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
628 if (it != m_mapOfTrackers.end()) {
629 TrackerWrapper *tracker = it->second;
630 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
631 }
632 else {
633 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
634 }
635}
636
651 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
652 bool displayFullModel)
653{
654 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
655 if (it != m_mapOfTrackers.end()) {
656 TrackerWrapper *tracker = it->second;
657 tracker->display(I, cMo, cam, col, thickness, displayFullModel);
658 }
659 else {
660 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
661 }
662}
663
681 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo,
682 const vpCameraParameters &cam1, const vpCameraParameters &cam2, const vpColor &color,
683 unsigned int thickness, bool displayFullModel)
684{
685 if (m_mapOfTrackers.size() == 2) {
686 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
687 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
688 ++it;
689
690 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
691 }
692 else {
693 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
694 << std::endl;
695 }
696}
697
715 const vpHomogeneousMatrix &c2Mo, const vpCameraParameters &cam1,
716 const vpCameraParameters &cam2, const vpColor &color, unsigned int thickness,
717 bool displayFullModel)
718{
719 if (m_mapOfTrackers.size() == 2) {
720 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
721 it->second->display(I1, c1Mo, cam1, color, thickness, displayFullModel);
722 ++it;
723
724 it->second->display(I2, c2Mo, cam2, color, thickness, displayFullModel);
725 }
726 else {
727 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
728 << std::endl;
729 }
730}
731
743void vpMbGenericTracker::display(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
744 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
745 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
746 const vpColor &col, unsigned int thickness, bool displayFullModel)
747{
748 // Display only for the given images
749 for (std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img = mapOfImages.begin();
750 it_img != mapOfImages.end(); ++it_img) {
751 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
752 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
753 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
754
755 if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
756 it_cam != mapOfCameraParameters.end()) {
757 TrackerWrapper *tracker = it_tracker->second;
758 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
759 }
760 else {
761 std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
762 }
763 }
764}
765
777void vpMbGenericTracker::display(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfImages,
778 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses,
779 const std::map<std::string, vpCameraParameters> &mapOfCameraParameters,
780 const vpColor &col, unsigned int thickness, bool displayFullModel)
781{
782 // Display only for the given images
783 for (std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfImages.begin();
784 it_img != mapOfImages.end(); ++it_img) {
785 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it_img->first);
786 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(it_img->first);
787 std::map<std::string, vpCameraParameters>::const_iterator it_cam = mapOfCameraParameters.find(it_img->first);
788
789 if (it_tracker != m_mapOfTrackers.end() && it_camPose != mapOfCameraPoses.end() &&
790 it_cam != mapOfCameraParameters.end()) {
791 TrackerWrapper *tracker = it_tracker->second;
792 tracker->display(*it_img->second, it_camPose->second, it_cam->second, col, thickness, displayFullModel);
793 }
794 else {
795 std::cerr << "Missing elements for image:" << it_img->first << "!" << std::endl;
796 }
797 }
798}
799
805std::vector<std::string> vpMbGenericTracker::getCameraNames() const
806{
807 std::vector<std::string> cameraNames;
808
809 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
810 it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
811 cameraNames.push_back(it_tracker->first);
812 }
813
814 return cameraNames;
815}
816
821
831{
832 if (m_mapOfTrackers.size() == 2) {
833 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
834 it->second->getCameraParameters(cam1);
835 ++it;
836
837 it->second->getCameraParameters(cam2);
838 }
839 else {
840 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
841 << std::endl;
842 }
843}
844
850void vpMbGenericTracker::getCameraParameters(std::map<std::string, vpCameraParameters> &mapOfCameraParameters) const
851{
852 // Clear the input map
853 mapOfCameraParameters.clear();
854
855 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
856 it != m_mapOfTrackers.end(); ++it) {
858 it->second->getCameraParameters(cam_);
859 mapOfCameraParameters[it->first] = cam_;
860 }
861}
862
869std::map<std::string, int> vpMbGenericTracker::getCameraTrackerTypes() const
870{
871 std::map<std::string, int> trackingTypes;
872
873 TrackerWrapper *traker;
874 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
875 it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
876 traker = it_tracker->second;
877 trackingTypes[it_tracker->first] = traker->getTrackerType();
878 }
879
880 return trackingTypes;
881}
882
891void vpMbGenericTracker::getClipping(unsigned int &clippingFlag1, unsigned int &clippingFlag2) const
892{
893 if (m_mapOfTrackers.size() == 2) {
894 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
895 clippingFlag1 = it->second->getClipping();
896 ++it;
897
898 clippingFlag2 = it->second->getClipping();
899 }
900 else {
901 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
902 << std::endl;
903 }
904}
905
911void vpMbGenericTracker::getClipping(std::map<std::string, unsigned int> &mapOfClippingFlags) const
912{
913 mapOfClippingFlags.clear();
914
915 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
916 it != m_mapOfTrackers.end(); ++it) {
917 TrackerWrapper *tracker = it->second;
918 mapOfClippingFlags[it->first] = tracker->getClipping();
919 }
920}
921
928{
929 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
930 if (it != m_mapOfTrackers.end()) {
931 return it->second->getFaces();
932 }
933
934 std::cerr << "The reference camera: " << m_referenceCameraName << " cannot be found!" << std::endl;
935 return faces;
936}
937
944{
945 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
946 if (it != m_mapOfTrackers.end()) {
947 return it->second->getFaces();
948 }
949
950 std::cerr << "The camera: " << cameraName << " cannot be found!" << std::endl;
951 return faces;
952}
953
954#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
958std::list<vpMbtDistanceCircle *> &vpMbGenericTracker::getFeaturesCircle()
959{
960 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
961 if (it != m_mapOfTrackers.end()) {
962 TrackerWrapper *tracker = it->second;
963 return tracker->getFeaturesCircle();
964 }
965 else {
966 throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
967 m_referenceCameraName.c_str());
968 }
969}
970
974std::list<vpMbtDistanceKltCylinder *> &vpMbGenericTracker::getFeaturesKltCylinder()
975{
976 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
977 if (it != m_mapOfTrackers.end()) {
978 TrackerWrapper *tracker = it->second;
979 return tracker->getFeaturesKltCylinder();
980 }
981 else {
982 throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
983 m_referenceCameraName.c_str());
984 }
985}
986
990std::list<vpMbtDistanceKltPoints *> &vpMbGenericTracker::getFeaturesKlt()
991{
992 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
993 if (it != m_mapOfTrackers.end()) {
994 TrackerWrapper *tracker = it->second;
995 return tracker->getFeaturesKlt();
996 }
997 else {
998 throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
999 m_referenceCameraName.c_str());
1000 }
1001}
1002#endif
1003
1030std::vector<std::vector<double> > vpMbGenericTracker::getFeaturesForDisplay()
1031{
1032 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1033
1034 if (it != m_mapOfTrackers.end()) {
1035 return it->second->getFeaturesForDisplay();
1036 }
1037 else {
1038 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1039 }
1040
1041 return std::vector<std::vector<double> >();
1042}
1043
1069void vpMbGenericTracker::getFeaturesForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfFeatures)
1070{
1071 // Clear the input map
1072 mapOfFeatures.clear();
1073
1074 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1075 it != m_mapOfTrackers.end(); ++it) {
1076 mapOfFeatures[it->first] = it->second->getFeaturesForDisplay();
1077 }
1078}
1079
1090
1091#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
1100std::vector<vpImagePoint> vpMbGenericTracker::getKltImagePoints() const
1101{
1102 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1103 if (it != m_mapOfTrackers.end()) {
1104 TrackerWrapper *tracker = it->second;
1105 return tracker->getKltImagePoints();
1106 }
1107 else {
1108 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1109 }
1110
1111 return std::vector<vpImagePoint>();
1112}
1113
1122std::map<int, vpImagePoint> vpMbGenericTracker::getKltImagePointsWithId() const
1123{
1124 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1125 if (it != m_mapOfTrackers.end()) {
1126 TrackerWrapper *tracker = it->second;
1127 return tracker->getKltImagePointsWithId();
1128 }
1129 else {
1130 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1131 }
1132
1133 return std::map<int, vpImagePoint>();
1134}
1135
1142{
1143 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1144 if (it != m_mapOfTrackers.end()) {
1145 TrackerWrapper *tracker = it->second;
1146 return tracker->getKltMaskBorder();
1147 }
1148 else {
1149 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1150 }
1151
1152 return 0;
1153}
1154
1161{
1162 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1163 if (it != m_mapOfTrackers.end()) {
1164 TrackerWrapper *tracker = it->second;
1165 return tracker->getKltNbPoints();
1166 }
1167 else {
1168 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1169 }
1170
1171 return 0;
1172}
1173
1180{
1181 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
1182
1183 if (it_tracker != m_mapOfTrackers.end()) {
1184 TrackerWrapper *tracker;
1185 tracker = it_tracker->second;
1186 return tracker->getKltOpencv();
1187 }
1188 else {
1189 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1190 }
1191
1192 return vpKltOpencv();
1193}
1194
1204{
1205 if (m_mapOfTrackers.size() == 2) {
1206 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1207 klt1 = it->second->getKltOpencv();
1208 ++it;
1209
1210 klt2 = it->second->getKltOpencv();
1211 }
1212 else {
1213 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1214 << std::endl;
1215 }
1216}
1217
1223void vpMbGenericTracker::getKltOpencv(std::map<std::string, vpKltOpencv> &mapOfKlts) const
1224{
1225 mapOfKlts.clear();
1226
1227 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1228 it != m_mapOfTrackers.end(); ++it) {
1229 TrackerWrapper *tracker = it->second;
1230 mapOfKlts[it->first] = tracker->getKltOpencv();
1231 }
1232}
1233
1239std::vector<cv::Point2f> vpMbGenericTracker::getKltPoints() const
1240{
1241 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1242 if (it != m_mapOfTrackers.end()) {
1243 TrackerWrapper *tracker = it->second;
1244 return tracker->getKltPoints();
1245 }
1246 else {
1247 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1248 }
1249
1250 return std::vector<cv::Point2f>();
1251}
1252
1260#endif
1261
1274void vpMbGenericTracker::getLcircle(std::list<vpMbtDistanceCircle *> &circlesList, unsigned int level) const
1275{
1276 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1277 if (it != m_mapOfTrackers.end()) {
1278 it->second->getLcircle(circlesList, level);
1279 }
1280 else {
1281 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1282 }
1283}
1284
1298void vpMbGenericTracker::getLcircle(const std::string &cameraName, std::list<vpMbtDistanceCircle *> &circlesList,
1299 unsigned int level) const
1300{
1301 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1302 if (it != m_mapOfTrackers.end()) {
1303 it->second->getLcircle(circlesList, level);
1304 }
1305 else {
1306 std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1307 }
1308}
1309
1322void vpMbGenericTracker::getLcylinder(std::list<vpMbtDistanceCylinder *> &cylindersList, unsigned int level) const
1323{
1324 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1325 if (it != m_mapOfTrackers.end()) {
1326 it->second->getLcylinder(cylindersList, level);
1327 }
1328 else {
1329 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1330 }
1331}
1332
1346void vpMbGenericTracker::getLcylinder(const std::string &cameraName, std::list<vpMbtDistanceCylinder *> &cylindersList,
1347 unsigned int level) const
1348{
1349 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1350 if (it != m_mapOfTrackers.end()) {
1351 it->second->getLcylinder(cylindersList, level);
1352 }
1353 else {
1354 std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1355 }
1356}
1357
1370void vpMbGenericTracker::getLline(std::list<vpMbtDistanceLine *> &linesList, unsigned int level) const
1371{
1372 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1373
1374 if (it != m_mapOfTrackers.end()) {
1375 it->second->getLline(linesList, level);
1376 }
1377 else {
1378 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1379 }
1380}
1381
1395void vpMbGenericTracker::getLline(const std::string &cameraName, std::list<vpMbtDistanceLine *> &linesList,
1396 unsigned int level) const
1397{
1398 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1399 if (it != m_mapOfTrackers.end()) {
1400 it->second->getLline(linesList, level);
1401 }
1402 else {
1403 std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1404 }
1405}
1406
1436std::vector<std::vector<double> > vpMbGenericTracker::getModelForDisplay(unsigned int width, unsigned int height,
1437 const vpHomogeneousMatrix &cMo,
1438 const vpCameraParameters &cam,
1439 bool displayFullModel)
1440{
1441 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1442
1443 if (it != m_mapOfTrackers.end()) {
1444 return it->second->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1445 }
1446 else {
1447 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1448 }
1449
1450 return std::vector<std::vector<double> >();
1451}
1452
1478void vpMbGenericTracker::getModelForDisplay(std::map<std::string, std::vector<std::vector<double> > > &mapOfModels,
1479 const std::map<std::string, unsigned int> &mapOfwidths,
1480 const std::map<std::string, unsigned int> &mapOfheights,
1481 const std::map<std::string, vpHomogeneousMatrix> &mapOfcMos,
1482 const std::map<std::string, vpCameraParameters> &mapOfCams,
1483 bool displayFullModel)
1484{
1485 // Clear the input map
1486 mapOfModels.clear();
1487
1488 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1489 it != m_mapOfTrackers.end(); ++it) {
1490 std::map<std::string, unsigned int>::const_iterator findWidth = mapOfwidths.find(it->first);
1491 std::map<std::string, unsigned int>::const_iterator findHeight = mapOfheights.find(it->first);
1492 std::map<std::string, vpHomogeneousMatrix>::const_iterator findcMo = mapOfcMos.find(it->first);
1493 std::map<std::string, vpCameraParameters>::const_iterator findCam = mapOfCams.find(it->first);
1494
1495 if (findWidth != mapOfwidths.end() && findHeight != mapOfheights.end() && findcMo != mapOfcMos.end() &&
1496 findCam != mapOfCams.end()) {
1497 mapOfModels[it->first] = it->second->getModelForDisplay(findWidth->second, findHeight->second, findcMo->second,
1498 findCam->second, displayFullModel);
1499 }
1500 }
1501}
1502
1509{
1510 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1511
1512 if (it != m_mapOfTrackers.end()) {
1513 return it->second->getMovingEdge();
1514 }
1515 else {
1516 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1517 }
1518
1519 return vpMe();
1520}
1521
1531{
1532 if (m_mapOfTrackers.size() == 2) {
1533 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1534 it->second->getMovingEdge(me1);
1535 ++it;
1536
1537 it->second->getMovingEdge(me2);
1538 }
1539 else {
1540 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1541 << std::endl;
1542 }
1543}
1544
1550void vpMbGenericTracker::getMovingEdge(std::map<std::string, vpMe> &mapOfMovingEdges) const
1551{
1552 mapOfMovingEdges.clear();
1553
1554 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1555 it != m_mapOfTrackers.end(); ++it) {
1556 TrackerWrapper *tracker = it->second;
1557 mapOfMovingEdges[it->first] = tracker->getMovingEdge();
1558 }
1559}
1560
1578unsigned int vpMbGenericTracker::getNbPoints(unsigned int level) const
1579{
1580 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1581
1582 unsigned int nbGoodPoints = 0;
1583 if (it != m_mapOfTrackers.end()) {
1584
1585 nbGoodPoints = it->second->getNbPoints(level);
1586 }
1587 else {
1588 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1589 }
1590
1591 return nbGoodPoints;
1592}
1593
1608void vpMbGenericTracker::getNbPoints(std::map<std::string, unsigned int> &mapOfNbPoints, unsigned int level) const
1609{
1610 mapOfNbPoints.clear();
1611
1612 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1613 it != m_mapOfTrackers.end(); ++it) {
1614 TrackerWrapper *tracker = it->second;
1615 mapOfNbPoints[it->first] = tracker->getNbPoints(level);
1616 }
1617}
1618
1625{
1626 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1627 if (it != m_mapOfTrackers.end()) {
1628 return it->second->getNbPolygon();
1629 }
1630
1631 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1632 return 0;
1633}
1634
1640void vpMbGenericTracker::getNbPolygon(std::map<std::string, unsigned int> &mapOfNbPolygons) const
1641{
1642 mapOfNbPolygons.clear();
1643
1644 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1645 it != m_mapOfTrackers.end(); ++it) {
1646 TrackerWrapper *tracker = it->second;
1647 mapOfNbPolygons[it->first] = tracker->getNbPolygon();
1648 }
1649}
1650
1662{
1663 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1664 if (it != m_mapOfTrackers.end()) {
1665 return it->second->getPolygon(index);
1666 }
1667
1668 std::cerr << "The reference camera: " << m_referenceCameraName << " does not exist!" << std::endl;
1669 return nullptr;
1670}
1671
1683vpMbtPolygon *vpMbGenericTracker::getPolygon(const std::string &cameraName, unsigned int index)
1684{
1685 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(cameraName);
1686 if (it != m_mapOfTrackers.end()) {
1687 return it->second->getPolygon(index);
1688 }
1689
1690 std::cerr << "The camera: " << cameraName << " does not exist!" << std::endl;
1691 return nullptr;
1692}
1693
1709std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > >
1710vpMbGenericTracker::getPolygonFaces(bool orderPolygons, bool useVisibility, bool clipPolygon)
1711{
1712 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces;
1713
1714 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1715 if (it != m_mapOfTrackers.end()) {
1716 TrackerWrapper *tracker = it->second;
1717 polygonFaces = tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1718 }
1719 else {
1720 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
1721 }
1722
1723 return polygonFaces;
1724}
1725
1743void vpMbGenericTracker::getPolygonFaces(std::map<std::string, std::vector<vpPolygon> > &mapOfPolygons,
1744 std::map<std::string, std::vector<std::vector<vpPoint> > > &mapOfPoints,
1745 bool orderPolygons, bool useVisibility, bool clipPolygon)
1746{
1747 mapOfPolygons.clear();
1748 mapOfPoints.clear();
1749
1750 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1751 it != m_mapOfTrackers.end(); ++it) {
1752 TrackerWrapper *tracker = it->second;
1753 std::pair<std::vector<vpPolygon>, std::vector<std::vector<vpPoint> > > polygonFaces =
1754 tracker->getPolygonFaces(orderPolygons, useVisibility, clipPolygon);
1755
1756 mapOfPolygons[it->first] = polygonFaces.first;
1757 mapOfPoints[it->first] = polygonFaces.second;
1758 }
1759}
1760
1762
1772{
1773 if (m_mapOfTrackers.size() == 2) {
1774 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1775 it->second->getPose(c1Mo);
1776 ++it;
1777
1778 it->second->getPose(c2Mo);
1779 }
1780 else {
1781 std::cerr << "The tracker is not set as a stereo configuration! There are " << m_mapOfTrackers.size() << " cameras!"
1782 << std::endl;
1783 }
1784}
1785
1791void vpMbGenericTracker::getPose(std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses) const
1792{
1793 // Clear the map
1794 mapOfCameraPoses.clear();
1795
1796 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1797 it != m_mapOfTrackers.end(); ++it) {
1798 TrackerWrapper *tracker = it->second;
1799 tracker->getPose(mapOfCameraPoses[it->first]);
1800 }
1801}
1802
1807
1812{
1813 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
1814 if (it != m_mapOfTrackers.end()) {
1815 TrackerWrapper *tracker = it->second;
1816 return tracker->getTrackerType();
1817 }
1818 else {
1819 throw vpException(vpTrackingException::badValue, "Cannot find the reference camera: %s!",
1820 m_referenceCameraName.c_str());
1821 }
1822}
1823
1825{
1826 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1827 it != m_mapOfTrackers.end(); ++it) {
1828 TrackerWrapper *tracker = it->second;
1829 tracker->m_cMo = m_mapOfCameraTransformationMatrix[it->first] * m_cMo;
1830 tracker->init(I);
1831 }
1832}
1833
1834void vpMbGenericTracker::initCircle(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const vpPoint & /*p3*/,
1835 double /*radius*/, int /*idFace*/, const std::string & /*name*/)
1836{
1837 throw vpException(vpException::fatalError, "vpMbGenericTracker::initCircle() should not be called!");
1838}
1839
1840#ifdef VISP_HAVE_MODULE_GUI
1841
1885 const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1886 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1887{
1888 if (m_mapOfTrackers.size() == 2) {
1889 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1890 TrackerWrapper *tracker = it->second;
1891 tracker->initClick(I1, initFile1, displayHelp, T1);
1892
1893 ++it;
1894
1895 tracker = it->second;
1896 tracker->initClick(I2, initFile2, displayHelp, T2);
1897
1899 if (it != m_mapOfTrackers.end()) {
1900 tracker = it->second;
1901
1902 // Set the reference cMo
1903 tracker->getPose(m_cMo);
1904 }
1905 }
1906 else {
1908 "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1909 }
1910}
1911
1955 const std::string &initFile1, const std::string &initFile2, bool displayHelp,
1956 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
1957{
1958 if (m_mapOfTrackers.size() == 2) {
1959 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
1960 TrackerWrapper *tracker = it->second;
1961 tracker->initClick(I_color1, initFile1, displayHelp, T1);
1962
1963 ++it;
1964
1965 tracker = it->second;
1966 tracker->initClick(I_color2, initFile2, displayHelp, T2);
1967
1969 if (it != m_mapOfTrackers.end()) {
1970 tracker = it->second;
1971
1972 // Set the reference cMo
1973 tracker->getPose(m_cMo);
1974 }
1975 }
1976 else {
1978 "Cannot initClick()! Require two cameras but there are %d cameras!", m_mapOfTrackers.size());
1979 }
1980}
1981
2024void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2025 const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2026 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2027{
2028 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2029 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2030 mapOfImages.find(m_referenceCameraName);
2031 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2032
2033 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2034 TrackerWrapper *tracker = it_tracker->second;
2035 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2036 if (it_T != mapOfT.end())
2037 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2038 else
2039 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2040 tracker->getPose(m_cMo);
2041 }
2042 else {
2043 throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2044 }
2045
2046 // Vector of missing initFile for cameras
2047 std::vector<std::string> vectorOfMissingCameraPoses;
2048
2049 // Set pose for the specified cameras
2050 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2051 if (it_tracker->first != m_referenceCameraName) {
2052 it_img = mapOfImages.find(it_tracker->first);
2053 it_initFile = mapOfInitFiles.find(it_tracker->first);
2054
2055 if (it_img != mapOfImages.end() && it_initFile != mapOfInitFiles.end()) {
2056 // InitClick for the current camera
2057 TrackerWrapper *tracker = it_tracker->second;
2058 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2059 }
2060 else {
2061 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2062 }
2063 }
2064 }
2065
2066 // Init for cameras that do not have an initFile
2067 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2068 it != vectorOfMissingCameraPoses.end(); ++it) {
2069 it_img = mapOfImages.find(*it);
2070 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2072
2073 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2074 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2075 m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2076 m_mapOfTrackers[*it]->init(*it_img->second);
2077 }
2078 else {
2080 "Missing image or missing camera transformation "
2081 "matrix! Cannot set the pose for camera: %s!",
2082 it->c_str());
2083 }
2084 }
2085}
2086
2129void vpMbGenericTracker::initClick(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2130 const std::map<std::string, std::string> &mapOfInitFiles, bool displayHelp,
2131 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
2132{
2133 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2134 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2135 std::map<std::string, std::string>::const_iterator it_initFile = mapOfInitFiles.find(m_referenceCameraName);
2136
2137 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2138 TrackerWrapper *tracker = it_tracker->second;
2139 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
2140 if (it_T != mapOfT.end())
2141 tracker->initClick(*it_img->second, it_initFile->second, displayHelp, it_T->second);
2142 else
2143 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2144 tracker->getPose(m_cMo);
2145 }
2146 else {
2147 throw vpException(vpTrackingException::initializationError, "Cannot initClick for the reference camera!");
2148 }
2149
2150 // Vector of missing initFile for cameras
2151 std::vector<std::string> vectorOfMissingCameraPoses;
2152
2153 // Set pose for the specified cameras
2154 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2155 if (it_tracker->first != m_referenceCameraName) {
2156 it_img = mapOfColorImages.find(it_tracker->first);
2157 it_initFile = mapOfInitFiles.find(it_tracker->first);
2158
2159 if (it_img != mapOfColorImages.end() && it_initFile != mapOfInitFiles.end()) {
2160 // InitClick for the current camera
2161 TrackerWrapper *tracker = it_tracker->second;
2162 tracker->initClick(*it_img->second, it_initFile->second, displayHelp);
2163 }
2164 else {
2165 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2166 }
2167 }
2168 }
2169
2170 // Init for cameras that do not have an initFile
2171 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2172 it != vectorOfMissingCameraPoses.end(); ++it) {
2173 it_img = mapOfColorImages.find(*it);
2174 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2176
2177 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2178 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2179 m_mapOfTrackers[*it]->m_cMo = cCurrentMo;
2180 vpImageConvert::convert(*it_img->second, m_mapOfTrackers[*it]->m_I);
2181 m_mapOfTrackers[*it]->init(m_mapOfTrackers[*it]->m_I);
2182 }
2183 else {
2185 "Missing image or missing camera transformation "
2186 "matrix! Cannot set the pose for camera: %s!",
2187 it->c_str());
2188 }
2189 }
2190}
2191#endif
2192
2193void vpMbGenericTracker::initCylinder(const vpPoint & /*p1*/, const vpPoint & /*p2*/, const double /*radius*/,
2194 const int /*idFace*/, const std::string & /*name*/)
2195{
2196 throw vpException(vpException::fatalError, "vpMbGenericTracker::initCylinder() should not be called!");
2197}
2198
2200{
2201 throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromCorners() should not be called!");
2202}
2203
2205{
2206 throw vpException(vpException::fatalError, "vpMbGenericTracker::initFaceFromLines() should not be called!");
2207}
2208
2239 const std::string &initFile1, const std::string &initFile2)
2240{
2241 if (m_mapOfTrackers.size() == 2) {
2242 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2243 TrackerWrapper *tracker = it->second;
2244 tracker->initFromPoints(I1, initFile1);
2245
2246 ++it;
2247
2248 tracker = it->second;
2249 tracker->initFromPoints(I2, initFile2);
2250
2252 if (it != m_mapOfTrackers.end()) {
2253 tracker = it->second;
2254
2255 // Set the reference cMo
2256 tracker->getPose(m_cMo);
2257
2258 // Set the reference camera parameters
2259 tracker->getCameraParameters(m_cam);
2260 }
2261 }
2262 else {
2264 "Cannot initFromPoints()! Require two cameras but "
2265 "there are %d cameras!",
2266 m_mapOfTrackers.size());
2267 }
2268}
2269
2300 const std::string &initFile1, const std::string &initFile2)
2301{
2302 if (m_mapOfTrackers.size() == 2) {
2303 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2304 TrackerWrapper *tracker = it->second;
2305 tracker->initFromPoints(I_color1, initFile1);
2306
2307 ++it;
2308
2309 tracker = it->second;
2310 tracker->initFromPoints(I_color2, initFile2);
2311
2313 if (it != m_mapOfTrackers.end()) {
2314 tracker = it->second;
2315
2316 // Set the reference cMo
2317 tracker->getPose(m_cMo);
2318
2319 // Set the reference camera parameters
2320 tracker->getCameraParameters(m_cam);
2321 }
2322 }
2323 else {
2325 "Cannot initFromPoints()! Require two cameras but "
2326 "there are %d cameras!",
2327 m_mapOfTrackers.size());
2328 }
2329}
2330
2331void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2332 const std::map<std::string, std::string> &mapOfInitPoints)
2333{
2334 // Set the reference cMo
2335 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2336 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2337 mapOfImages.find(m_referenceCameraName);
2338 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2339
2340 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2341 TrackerWrapper *tracker = it_tracker->second;
2342 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2343 tracker->getPose(m_cMo);
2344 }
2345 else {
2346 throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2347 }
2348
2349 // Vector of missing initPoints for cameras
2350 std::vector<std::string> vectorOfMissingCameraPoints;
2351
2352 // Set pose for the specified cameras
2353 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2354 it_img = mapOfImages.find(it_tracker->first);
2355 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2356
2357 if (it_img != mapOfImages.end() && it_initPoints != mapOfInitPoints.end()) {
2358 // Set pose
2359 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2360 }
2361 else {
2362 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2363 }
2364 }
2365
2366 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2367 it != vectorOfMissingCameraPoints.end(); ++it) {
2368 it_img = mapOfImages.find(*it);
2369 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2371
2372 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2373 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2374 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2375 }
2376 else {
2378 "Missing image or missing camera transformation "
2379 "matrix! Cannot init the pose for camera: %s!",
2380 it->c_str());
2381 }
2382 }
2383}
2384
2385void vpMbGenericTracker::initFromPoints(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2386 const std::map<std::string, std::string> &mapOfInitPoints)
2387{
2388 // Set the reference cMo
2389 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2390 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2391 std::map<std::string, std::string>::const_iterator it_initPoints = mapOfInitPoints.find(m_referenceCameraName);
2392
2393 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() &&
2394 it_initPoints != mapOfInitPoints.end()) {
2395 TrackerWrapper *tracker = it_tracker->second;
2396 tracker->initFromPoints(*it_img->second, it_initPoints->second);
2397 tracker->getPose(m_cMo);
2398 }
2399 else {
2400 throw vpException(vpTrackingException::initializationError, "Cannot initFromPoints() for the reference camera!");
2401 }
2402
2403 // Vector of missing initPoints for cameras
2404 std::vector<std::string> vectorOfMissingCameraPoints;
2405
2406 // Set pose for the specified cameras
2407 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2408 it_img = mapOfColorImages.find(it_tracker->first);
2409 it_initPoints = mapOfInitPoints.find(it_tracker->first);
2410
2411 if (it_img != mapOfColorImages.end() && it_initPoints != mapOfInitPoints.end()) {
2412 // Set pose
2413 it_tracker->second->initFromPoints(*it_img->second, it_initPoints->second);
2414 }
2415 else {
2416 vectorOfMissingCameraPoints.push_back(it_tracker->first);
2417 }
2418 }
2419
2420 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoints.begin();
2421 it != vectorOfMissingCameraPoints.end(); ++it) {
2422 it_img = mapOfColorImages.find(*it);
2423 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2425
2426 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2427 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2428 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2429 }
2430 else {
2432 "Missing image or missing camera transformation "
2433 "matrix! Cannot init the pose for camera: %s!",
2434 it->c_str());
2435 }
2436 }
2437}
2438
2443
2456 const std::string &initFile1, const std::string &initFile2)
2457{
2458 if (m_mapOfTrackers.size() == 2) {
2459 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2460 TrackerWrapper *tracker = it->second;
2461 tracker->initFromPose(I1, initFile1);
2462
2463 ++it;
2464
2465 tracker = it->second;
2466 tracker->initFromPose(I2, initFile2);
2467
2469 if (it != m_mapOfTrackers.end()) {
2470 tracker = it->second;
2471
2472 // Set the reference cMo
2473 tracker->getPose(m_cMo);
2474
2475 // Set the reference camera parameters
2476 tracker->getCameraParameters(m_cam);
2477 }
2478 }
2479 else {
2481 "Cannot initFromPose()! Require two cameras but there "
2482 "are %d cameras!",
2483 m_mapOfTrackers.size());
2484 }
2485}
2486
2499 const std::string &initFile1, const std::string &initFile2)
2500{
2501 if (m_mapOfTrackers.size() == 2) {
2502 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2503 TrackerWrapper *tracker = it->second;
2504 tracker->initFromPose(I_color1, initFile1);
2505
2506 ++it;
2507
2508 tracker = it->second;
2509 tracker->initFromPose(I_color2, initFile2);
2510
2512 if (it != m_mapOfTrackers.end()) {
2513 tracker = it->second;
2514
2515 // Set the reference cMo
2516 tracker->getPose(m_cMo);
2517
2518 // Set the reference camera parameters
2519 tracker->getCameraParameters(m_cam);
2520 }
2521 }
2522 else {
2524 "Cannot initFromPose()! Require two cameras but there "
2525 "are %d cameras!",
2526 m_mapOfTrackers.size());
2527 }
2528}
2529
2544void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2545 const std::map<std::string, std::string> &mapOfInitPoses)
2546{
2547 // Set the reference cMo
2548 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2549 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2550 mapOfImages.find(m_referenceCameraName);
2551 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2552
2553 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2554 TrackerWrapper *tracker = it_tracker->second;
2555 tracker->initFromPose(*it_img->second, it_initPose->second);
2556 tracker->getPose(m_cMo);
2557 }
2558 else {
2559 throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2560 }
2561
2562 // Vector of missing pose matrices for cameras
2563 std::vector<std::string> vectorOfMissingCameraPoses;
2564
2565 // Set pose for the specified cameras
2566 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2567 it_img = mapOfImages.find(it_tracker->first);
2568 it_initPose = mapOfInitPoses.find(it_tracker->first);
2569
2570 if (it_img != mapOfImages.end() && it_initPose != mapOfInitPoses.end()) {
2571 // Set pose
2572 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2573 }
2574 else {
2575 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2576 }
2577 }
2578
2579 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2580 it != vectorOfMissingCameraPoses.end(); ++it) {
2581 it_img = mapOfImages.find(*it);
2582 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2584
2585 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2586 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2587 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2588 }
2589 else {
2591 "Missing image or missing camera transformation "
2592 "matrix! Cannot init the pose for camera: %s!",
2593 it->c_str());
2594 }
2595 }
2596}
2597
2612void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2613 const std::map<std::string, std::string> &mapOfInitPoses)
2614{
2615 // Set the reference cMo
2616 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2617 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2618 std::map<std::string, std::string>::const_iterator it_initPose = mapOfInitPoses.find(m_referenceCameraName);
2619
2620 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2621 TrackerWrapper *tracker = it_tracker->second;
2622 tracker->initFromPose(*it_img->second, it_initPose->second);
2623 tracker->getPose(m_cMo);
2624 }
2625 else {
2626 throw vpException(vpTrackingException::initializationError, "Cannot initFromPose() for the reference camera!");
2627 }
2628
2629 // Vector of missing pose matrices for cameras
2630 std::vector<std::string> vectorOfMissingCameraPoses;
2631
2632 // Set pose for the specified cameras
2633 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2634 it_img = mapOfColorImages.find(it_tracker->first);
2635 it_initPose = mapOfInitPoses.find(it_tracker->first);
2636
2637 if (it_img != mapOfColorImages.end() && it_initPose != mapOfInitPoses.end()) {
2638 // Set pose
2639 it_tracker->second->initFromPose(*it_img->second, it_initPose->second);
2640 }
2641 else {
2642 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2643 }
2644 }
2645
2646 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2647 it != vectorOfMissingCameraPoses.end(); ++it) {
2648 it_img = mapOfColorImages.find(*it);
2649 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2651
2652 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2653 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2654 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2655 }
2656 else {
2658 "Missing image or missing camera transformation "
2659 "matrix! Cannot init the pose for camera: %s!",
2660 it->c_str());
2661 }
2662 }
2663}
2664
2676 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2677{
2678 if (m_mapOfTrackers.size() == 2) {
2679 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2680 it->second->initFromPose(I1, c1Mo);
2681
2682 ++it;
2683
2684 it->second->initFromPose(I2, c2Mo);
2685
2686 m_cMo = c1Mo;
2687 }
2688 else {
2690 "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2691 }
2692}
2693
2705 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
2706{
2707 if (m_mapOfTrackers.size() == 2) {
2708 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2709 it->second->initFromPose(I_color1, c1Mo);
2710
2711 ++it;
2712
2713 it->second->initFromPose(I_color2, c2Mo);
2714
2715 m_cMo = c1Mo;
2716 }
2717 else {
2719 "This method requires 2 cameras but there are %d cameras!", m_mapOfTrackers.size());
2720 }
2721}
2722
2736void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
2737 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2738{
2739 // Set the reference cMo
2740 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2741 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
2742 mapOfImages.find(m_referenceCameraName);
2743 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2744
2745 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2746 TrackerWrapper *tracker = it_tracker->second;
2747 tracker->initFromPose(*it_img->second, it_camPose->second);
2748 tracker->getPose(m_cMo);
2749 }
2750 else {
2751 throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2752 }
2753
2754 // Vector of missing pose matrices for cameras
2755 std::vector<std::string> vectorOfMissingCameraPoses;
2756
2757 // Set pose for the specified cameras
2758 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2759 it_img = mapOfImages.find(it_tracker->first);
2760 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2761
2762 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
2763 // Set pose
2764 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2765 }
2766 else {
2767 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2768 }
2769 }
2770
2771 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2772 it != vectorOfMissingCameraPoses.end(); ++it) {
2773 it_img = mapOfImages.find(*it);
2774 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2776
2777 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2778 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2779 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2780 }
2781 else {
2783 "Missing image or missing camera transformation "
2784 "matrix! Cannot set the pose for camera: %s!",
2785 it->c_str());
2786 }
2787 }
2788}
2789
2803void vpMbGenericTracker::initFromPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
2804 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
2805{
2806 // Set the reference cMo
2807 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
2808 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
2809 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
2810
2811 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2812 TrackerWrapper *tracker = it_tracker->second;
2813 tracker->initFromPose(*it_img->second, it_camPose->second);
2814 tracker->getPose(m_cMo);
2815 }
2816 else {
2817 throw vpException(vpTrackingException::initializationError, "Cannot set pose for the reference camera!");
2818 }
2819
2820 // Vector of missing pose matrices for cameras
2821 std::vector<std::string> vectorOfMissingCameraPoses;
2822
2823 // Set pose for the specified cameras
2824 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
2825 it_img = mapOfColorImages.find(it_tracker->first);
2826 it_camPose = mapOfCameraPoses.find(it_tracker->first);
2827
2828 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
2829 // Set pose
2830 it_tracker->second->initFromPose(*it_img->second, it_camPose->second);
2831 }
2832 else {
2833 vectorOfMissingCameraPoses.push_back(it_tracker->first);
2834 }
2835 }
2836
2837 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
2838 it != vectorOfMissingCameraPoses.end(); ++it) {
2839 it_img = mapOfColorImages.find(*it);
2840 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
2842
2843 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
2844 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
2845 m_mapOfTrackers[*it]->initFromPose(*it_img->second, cCurrentMo);
2846 }
2847 else {
2849 "Missing image or missing camera transformation "
2850 "matrix! Cannot set the pose for camera: %s!",
2851 it->c_str());
2852 }
2853 }
2854}
2855
2867void vpMbGenericTracker::loadConfigFile(const std::string &configFile, bool verbose)
2868{
2869 const std::string extension = vpIoTools::getFileExtension(configFile);
2870 if (extension == ".xml") {
2871 loadConfigFileXML(configFile, verbose);
2872 }
2873#ifdef VISP_HAVE_NLOHMANN_JSON
2874 else if (extension == ".json") {
2875 loadConfigFileJSON(configFile, verbose);
2876 }
2877#endif
2878 else {
2879 throw vpException(vpException::ioError, "MBT config parsing: File format " + extension + "for file " + configFile + " is not supported.");
2880 }
2881}
2882
2894void vpMbGenericTracker::loadConfigFileXML(const std::string &configFile, bool verbose)
2895{
2896 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
2897 it != m_mapOfTrackers.end(); ++it) {
2898 TrackerWrapper *tracker = it->second;
2899 tracker->loadConfigFile(configFile, verbose);
2900 }
2901
2903 throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
2904 }
2905
2906 m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
2907 this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
2908 this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
2909 this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
2910}
2911
2912#ifdef VISP_HAVE_NLOHMANN_JSON
2921void vpMbGenericTracker::loadConfigFileJSON(const std::string &settingsFile, bool verbose)
2922{
2923 //Read file
2924 std::ifstream jsonFile(settingsFile);
2925 if (!jsonFile.good()) {
2926 throw vpException(vpException::ioError, "Could not read from settings file " + settingsFile + " to initialise the vpMbGenericTracker");
2927 }
2928 json settings;
2929 try {
2930 settings = json::parse(jsonFile);
2931 }
2932 catch (json::parse_error &e) {
2933 std::stringstream msg;
2934 msg << "Could not parse JSON file : \n";
2935
2936 msg << e.what() << std::endl;
2937 msg << "Byte position of error: " << e.byte;
2938 throw vpException(vpException::ioError, msg.str());
2939 }
2940 jsonFile.close();
2941
2942 if (!settings.contains("version")) {
2943 throw vpException(vpException::notInitialized, "JSON configuration does not contain versioning information");
2944 }
2945 else if (settings["version"].get<std::string>() != MBT_JSON_SETTINGS_VERSION) {
2946 throw vpException(vpException::badValue, "Trying to load an old configuration file");
2947 }
2948
2949 //Load Basic settings
2950 settings.at("referenceCameraName").get_to(m_referenceCameraName);
2951 json trackersJson;
2952 trackersJson = settings.at("trackers");
2953
2954 //Find camera that are already present in the tracker but not in the config file: they will be removed
2955 std::vector<std::string> unusedCameraNames = getCameraNames();
2956 std::map<std::string, std::string> map_models;
2957
2958 bool refCameraFound = false;
2959 //Foreach camera
2960 for (const auto &it : trackersJson.items()) {
2961 const std::string cameraName = it.key();
2962 const json trackerJson = it.value();
2963 refCameraFound = refCameraFound || cameraName == m_referenceCameraName;
2964
2965 //Load transformation between current camera and reference camera, if it exists
2966 if (trackerJson.contains("camTref")) {
2967 m_mapOfCameraTransformationMatrix[cameraName] = trackerJson["camTref"].get<vpHomogeneousMatrix>();
2968 }
2969 else if (cameraName != m_referenceCameraName) { // No transformation to reference and its not the reference itself
2970 throw vpException(vpException::notInitialized, "Camera " + cameraName + " has no transformation to the reference camera");
2971 }
2972 if (trackerJson.contains("model")) {
2973 std::string model_filename = trackerJson["model"].get<std::string>();
2974 map_models[cameraName] = model_filename;
2975 std::cout << "Set the model to '" << model_filename << "' for camera '" << cameraName << "'" << std::endl;
2976 }
2977 if (verbose) {
2978 std::cout << "Loading tracker " << cameraName << std::endl << " with settings: " << std::endl << trackerJson.dump(2);
2979 }
2980 if (m_mapOfTrackers.count(cameraName)) {
2981 if (verbose) {
2982 std::cout << "Updating an already existing tracker with JSON configuration." << std::endl;
2983 }
2984 from_json(trackerJson, *(m_mapOfTrackers[cameraName]));
2985 }
2986 else {
2987 if (verbose) {
2988 std::cout << "Creating a new tracker from JSON configuration." << std::endl;
2989 }
2990 TrackerWrapper *tw = new TrackerWrapper(); //vpMBTracker is responsible for deleting trackers
2991 *tw = trackerJson;
2992 m_mapOfTrackers[cameraName] = tw;
2993 }
2994 const auto unusedCamIt = std::remove(unusedCameraNames.begin(), unusedCameraNames.end(), cameraName); // Mark this camera name as used
2995 unusedCameraNames.erase(unusedCamIt, unusedCameraNames.end());
2996 }
2997 if (!refCameraFound) {
2998 throw vpException(vpException::badValue, "Reference camera not found in trackers");
2999 }
3000
3001 // All cameras that were defined in the tracker but not in the config file are removed
3002 for (const std::string &oldCameraName : unusedCameraNames) {
3003 m_mapOfCameraTransformationMatrix.erase(oldCameraName);
3004 TrackerWrapper *tw = m_mapOfTrackers[oldCameraName];
3005 m_mapOfTrackers.erase(oldCameraName);
3006 delete tw;
3007 }
3008
3009 const TrackerWrapper *refTracker = m_mapOfTrackers[m_referenceCameraName];
3010 refTracker->getCameraParameters(m_cam);
3011 this->angleAppears = refTracker->getAngleAppear();
3012 this->angleDisappears = refTracker->getAngleDisappear();
3013 this->clippingFlag = refTracker->getClipping();
3014 this->distNearClip = refTracker->getNearClippingDistance();
3015 this->distFarClip = refTracker->getFarClippingDistance();
3016
3017 // These settings can be set in each tracker or globally. Global value overrides local ones.
3018 if (settings.contains("display")) {
3019 const json displayJson = settings["display"];
3020 setDisplayFeatures(displayJson.value("features", displayFeatures));
3021 setProjectionErrorDisplay(displayJson.value("projectionError", m_projectionErrorDisplay));
3022 }
3023 if (settings.contains("visibilityTest")) {
3024 const json visJson = settings["visibilityTest"];
3025 setOgreVisibilityTest(visJson.value("ogre", useOgre));
3026 setScanLineVisibilityTest(visJson.value("scanline", useScanLine));
3027 }
3028
3029 // VVS global settings
3030 if (settings.contains("vvs")) {
3031 const json vvsJson = settings["vvs"];
3032 setLambda(vvsJson.value("lambda", this->m_lambda));
3033 setMaxIter(vvsJson.value("maxIter", this->m_maxIter));
3034 setInitialMu(vvsJson.value("initialMu", this->m_initialMu));
3035 }
3036
3037 //If a 3D model is defined, load it
3038 if (settings.contains("model") && (map_models.size() == 0)) {
3039 loadModel(settings.at("model").get<std::string>(), verbose);
3040 }
3041 else if (map_models.size() != 0) {
3042 if (settings.contains("model")) {
3043 std::stringstream ss;
3044 ss << "Both a model common to all the cameras and at least one model for a dedicated camera has been set." << std::endl;
3045 ss << "Please either set a model for each camera or a single model common to each camera";
3046 throw(vpException(vpException::ioError, ss.str()));
3047 }
3048 else if (map_models.size() == trackersJson.size()) {
3049 loadModel(map_models, verbose);
3050 }
3051 else {
3052 std::stringstream ss;
3053 ss << "The model has not been defined for each camera individually" << std::endl;
3054 ss << "Please either set a model for each camera or a single model common to each camera";
3055 throw(vpException(vpException::ioError, ss.str()));
3056 }
3057 }
3058}
3059
3067void vpMbGenericTracker::saveConfigFile(const std::string &settingsFile) const
3068{
3069 json j;
3070 j["referenceCameraName"] = m_referenceCameraName;
3071 j["version"] = MBT_JSON_SETTINGS_VERSION;
3072 // j["thresholdOutlier"] = m_thresholdOutlier;
3073 json trackers;
3074 for (const auto &kv : m_mapOfTrackers) {
3075 trackers[kv.first] = *(kv.second);
3076 const auto itTransformation = m_mapOfCameraTransformationMatrix.find(kv.first);
3077 if (itTransformation != m_mapOfCameraTransformationMatrix.end()) {
3078 trackers[kv.first]["camTref"] = itTransformation->second;
3079 }
3080 }
3081 j["trackers"] = trackers;
3082 j["vvs"] = json {
3083 {"lambda", m_lambda},
3084 {"maxIter", m_maxIter},
3085 {"initialMu", m_initialMu}
3086 };
3087
3088 std::ofstream f(settingsFile);
3089 if (f.good()) {
3090 const unsigned indentLevel = 4;
3091 f << j.dump(indentLevel);
3092 f.close();
3093 }
3094 else {
3095 throw vpException(vpException::ioError, "Could not save tracker configuration to JSON file: " + settingsFile);
3096 }
3097}
3098
3099#endif
3100
3115void vpMbGenericTracker::loadConfigFile(const std::string &configFile1, const std::string &configFile2, bool verbose)
3116{
3117 if (m_mapOfTrackers.size() != 2) {
3118 throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3119 }
3120
3121 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3122 TrackerWrapper *tracker = it_tracker->second;
3123 tracker->loadConfigFile(configFile1, verbose);
3124
3125 ++it_tracker;
3126 tracker = it_tracker->second;
3127 tracker->loadConfigFile(configFile2, verbose);
3128
3130 throw vpException(vpException::fatalError, "Cannot find the reference camera: %s!", m_referenceCameraName.c_str());
3131 }
3132
3133 m_mapOfTrackers[m_referenceCameraName]->getCameraParameters(m_cam);
3134 this->angleAppears = m_mapOfTrackers[m_referenceCameraName]->getAngleAppear();
3135 this->angleDisappears = m_mapOfTrackers[m_referenceCameraName]->getAngleDisappear();
3136 this->clippingFlag = m_mapOfTrackers[m_referenceCameraName]->getClipping();
3137}
3138
3152void vpMbGenericTracker::loadConfigFile(const std::map<std::string, std::string> &mapOfConfigFiles, bool verbose)
3153{
3154 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3155 it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3156 TrackerWrapper *tracker = it_tracker->second;
3157
3158 std::map<std::string, std::string>::const_iterator it_config = mapOfConfigFiles.find(it_tracker->first);
3159 if (it_config != mapOfConfigFiles.end()) {
3160 tracker->loadConfigFile(it_config->second, verbose);
3161 }
3162 else {
3163 throw vpException(vpTrackingException::initializationError, "Missing configuration file for camera: %s!",
3164 it_tracker->first.c_str());
3165 }
3166 }
3167
3168 // Set the reference camera parameters
3169 std::map<std::string, TrackerWrapper *>::iterator it = m_mapOfTrackers.find(m_referenceCameraName);
3170 if (it != m_mapOfTrackers.end()) {
3171 TrackerWrapper *tracker = it->second;
3172 tracker->getCameraParameters(m_cam);
3173
3174 // Set clipping
3175 this->clippingFlag = tracker->getClipping();
3176 this->angleAppears = tracker->getAngleAppear();
3177 this->angleDisappears = tracker->getAngleDisappear();
3178 }
3179 else {
3180 throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
3181 m_referenceCameraName.c_str());
3182 }
3183}
3184
3203void vpMbGenericTracker::loadModel(const std::string &modelFile, bool verbose, const vpHomogeneousMatrix &T)
3204{
3205 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3206 it != m_mapOfTrackers.end(); ++it) {
3207 TrackerWrapper *tracker = it->second;
3208 tracker->loadModel(modelFile, verbose, T);
3209 }
3210}
3211
3234void vpMbGenericTracker::loadModel(const std::string &modelFile1, const std::string &modelFile2, bool verbose,
3235 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3236{
3237 if (m_mapOfTrackers.size() != 2) {
3238 throw vpException(vpException::fatalError, "The tracker is not set in a stereo configuration!");
3239 }
3240
3241 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3242 TrackerWrapper *tracker = it_tracker->second;
3243 tracker->loadModel(modelFile1, verbose, T1);
3244
3245 ++it_tracker;
3246 tracker = it_tracker->second;
3247 tracker->loadModel(modelFile2, verbose, T2);
3248}
3249
3269void vpMbGenericTracker::loadModel(const std::map<std::string, std::string> &mapOfModelFiles, bool verbose,
3270 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3271{
3272 for (std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3273 it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3274 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(it_tracker->first);
3275
3276 if (it_model != mapOfModelFiles.end()) {
3277 TrackerWrapper *tracker = it_tracker->second;
3278 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3279
3280 if (it_T != mapOfT.end())
3281 tracker->loadModel(it_model->second, verbose, it_T->second);
3282 else
3283 tracker->loadModel(it_model->second, verbose);
3284 }
3285 else {
3286 throw vpException(vpTrackingException::initializationError, "Cannot load model for camera: %s",
3287 it_tracker->first.c_str());
3288 }
3289 }
3290}
3291
3292#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_SEGMENTATION) && defined(VISP_HAVE_PCL_FILTERS) && defined(VISP_HAVE_PCL_COMMON)
3293void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3294 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
3295{
3296 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3297 it != m_mapOfTrackers.end(); ++it) {
3298 TrackerWrapper *tracker = it->second;
3299 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
3300 }
3301}
3302#endif
3303
3304void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3305 std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
3306 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3307 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3308{
3309 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3310 it != m_mapOfTrackers.end(); ++it) {
3311 TrackerWrapper *tracker = it->second;
3312 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3313 mapOfPointCloudHeights[it->first]);
3314 }
3315}
3316
3317void vpMbGenericTracker::preTracking(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3318 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
3319 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
3320 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
3321{
3322 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3323 it != m_mapOfTrackers.end(); ++it) {
3324 TrackerWrapper *tracker = it->second;
3325 tracker->preTracking(mapOfImages[it->first], mapOfPointClouds[it->first], mapOfPointCloudWidths[it->first],
3326 mapOfPointCloudHeights[it->first]);
3327 }
3328}
3329
3341void vpMbGenericTracker::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
3342 const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3343{
3344 if (m_mapOfTrackers.size() != 1) {
3345 throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3346 m_mapOfTrackers.size());
3347 }
3348
3349 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3350 if (it_tracker != m_mapOfTrackers.end()) {
3351 TrackerWrapper *tracker = it_tracker->second;
3352 tracker->reInitModel(I, cad_name, cMo, verbose, T);
3353
3354 // Set reference pose
3355 tracker->getPose(m_cMo);
3356 }
3357 else {
3358 throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3359 }
3360
3361 modelInitialised = true;
3362}
3363
3375void vpMbGenericTracker::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
3376 const vpHomogeneousMatrix &cMo, bool verbose, const vpHomogeneousMatrix &T)
3377{
3378 if (m_mapOfTrackers.size() != 1) {
3379 throw vpException(vpTrackingException::fatalError, "This method requires exactly one camera, there are %d cameras!",
3380 m_mapOfTrackers.size());
3381 }
3382
3383 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3384 if (it_tracker != m_mapOfTrackers.end()) {
3385 TrackerWrapper *tracker = it_tracker->second;
3386 tracker->reInitModel(I_color, cad_name, cMo, verbose, T);
3387
3388 // Set reference pose
3389 tracker->getPose(m_cMo);
3390 }
3391 else {
3392 throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() the reference camera!");
3393 }
3394
3395 modelInitialised = true;
3396}
3397
3419 const std::string &cad_name1, const std::string &cad_name2,
3420 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3421 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3422{
3423 if (m_mapOfTrackers.size() == 2) {
3424 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3425
3426 it_tracker->second->reInitModel(I1, cad_name1, c1Mo, verbose, T1);
3427
3428 ++it_tracker;
3429
3430 it_tracker->second->reInitModel(I2, cad_name2, c2Mo, verbose, T2);
3431
3432 it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3433 if (it_tracker != m_mapOfTrackers.end()) {
3434 // Set reference pose
3435 it_tracker->second->getPose(m_cMo);
3436 }
3437 }
3438 else {
3439 throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3440 }
3441
3442 modelInitialised = true;
3443}
3444
3466 const std::string &cad_name1, const std::string &cad_name2,
3467 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo, bool verbose,
3468 const vpHomogeneousMatrix &T1, const vpHomogeneousMatrix &T2)
3469{
3470 if (m_mapOfTrackers.size() == 2) {
3471 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.begin();
3472
3473 it_tracker->second->reInitModel(I_color1, cad_name1, c1Mo, verbose, T1);
3474
3475 ++it_tracker;
3476
3477 it_tracker->second->reInitModel(I_color2, cad_name2, c2Mo, verbose, T2);
3478
3479 it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3480 if (it_tracker != m_mapOfTrackers.end()) {
3481 // Set reference pose
3482 it_tracker->second->getPose(m_cMo);
3483 }
3484 }
3485 else {
3486 throw vpException(vpTrackingException::fatalError, "This method requires exactly two cameras!");
3487 }
3488
3489 modelInitialised = true;
3490}
3491
3506void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
3507 const std::map<std::string, std::string> &mapOfModelFiles,
3508 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3509 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3510{
3511 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3512 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
3513 mapOfImages.find(m_referenceCameraName);
3514 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3515 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3516
3517 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3518 it_camPose != mapOfCameraPoses.end()) {
3519 TrackerWrapper *tracker = it_tracker->second;
3520 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3521 if (it_T != mapOfT.end())
3522 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3523 else
3524 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3525
3526 // Set reference pose
3527 tracker->getPose(m_cMo);
3528 }
3529 else {
3530 throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3531 }
3532
3533 std::vector<std::string> vectorOfMissingCameras;
3534 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3535 if (it_tracker->first != m_referenceCameraName) {
3536 it_img = mapOfImages.find(it_tracker->first);
3537 it_model = mapOfModelFiles.find(it_tracker->first);
3538 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3539
3540 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() && it_camPose != mapOfCameraPoses.end()) {
3541 TrackerWrapper *tracker = it_tracker->second;
3542 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3543 }
3544 else {
3545 vectorOfMissingCameras.push_back(it_tracker->first);
3546 }
3547 }
3548 }
3549
3550 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3551 ++it) {
3552 it_img = mapOfImages.find(*it);
3553 it_model = mapOfModelFiles.find(*it);
3554 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3556
3557 if (it_img != mapOfImages.end() && it_model != mapOfModelFiles.end() &&
3558 it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3559 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3560 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3561 }
3562 }
3563
3564 modelInitialised = true;
3565}
3566
3581void vpMbGenericTracker::reInitModel(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
3582 const std::map<std::string, std::string> &mapOfModelFiles,
3583 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses, bool verbose,
3584 const std::map<std::string, vpHomogeneousMatrix> &mapOfT)
3585{
3586 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
3587 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
3588 std::map<std::string, std::string>::const_iterator it_model = mapOfModelFiles.find(m_referenceCameraName);
3589 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
3590
3591 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3592 it_camPose != mapOfCameraPoses.end()) {
3593 TrackerWrapper *tracker = it_tracker->second;
3594 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_T = mapOfT.find(it_tracker->first);
3595 if (it_T != mapOfT.end())
3596 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose, it_T->second);
3597 else
3598 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3599
3600 // Set reference pose
3601 tracker->getPose(m_cMo);
3602 }
3603 else {
3604 throw vpException(vpTrackingException::fatalError, "Cannot reInitModel() for reference camera!");
3605 }
3606
3607 std::vector<std::string> vectorOfMissingCameras;
3608 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
3609 if (it_tracker->first != m_referenceCameraName) {
3610 it_img = mapOfColorImages.find(it_tracker->first);
3611 it_model = mapOfModelFiles.find(it_tracker->first);
3612 it_camPose = mapOfCameraPoses.find(it_tracker->first);
3613
3614 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3615 it_camPose != mapOfCameraPoses.end()) {
3616 TrackerWrapper *tracker = it_tracker->second;
3617 tracker->reInitModel(*it_img->second, it_model->second, it_camPose->second, verbose);
3618 }
3619 else {
3620 vectorOfMissingCameras.push_back(it_tracker->first);
3621 }
3622 }
3623 }
3624
3625 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameras.begin(); it != vectorOfMissingCameras.end();
3626 ++it) {
3627 it_img = mapOfColorImages.find(*it);
3628 it_model = mapOfModelFiles.find(*it);
3629 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
3631
3632 if (it_img != mapOfColorImages.end() && it_model != mapOfModelFiles.end() &&
3633 it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3634 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
3635 m_mapOfTrackers[*it]->reInitModel(*it_img->second, it_model->second, cCurrentMo, verbose);
3636 }
3637 }
3638
3639 modelInitialised = true;
3640}
3641
3647{
3648 m_cMo.eye();
3649
3650 useScanLine = false;
3651
3652#ifdef VISP_HAVE_OGRE
3653 useOgre = false;
3654#endif
3655
3656 m_computeInteraction = true;
3657 m_lambda = 1.0;
3658
3662 distNearClip = 0.001;
3663 distFarClip = 100;
3664
3666 m_maxIter = 30;
3667 m_stopCriteriaEpsilon = 1e-8;
3668 m_initialMu = 0.01;
3669
3670 // Only for Edge
3671 m_percentageGdPt = 0.4;
3672
3673 // Only for KLT
3674 m_thresholdOutlier = 0.5;
3675
3676 // Reset default ponderation between each feature type
3678
3679#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
3681#endif
3682
3685
3686 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3687 it != m_mapOfTrackers.end(); ++it) {
3688 TrackerWrapper *tracker = it->second;
3689 tracker->resetTracker();
3690 }
3691}
3692
3703{
3705
3706 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3707 it != m_mapOfTrackers.end(); ++it) {
3708 TrackerWrapper *tracker = it->second;
3709 tracker->setAngleAppear(a);
3710 }
3711}
3712
3725void vpMbGenericTracker::setAngleAppear(const double &a1, const double &a2)
3726{
3727 if (m_mapOfTrackers.size() == 2) {
3728 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3729 it->second->setAngleAppear(a1);
3730
3731 ++it;
3732 it->second->setAngleAppear(a2);
3733
3735 if (it != m_mapOfTrackers.end()) {
3736 angleAppears = it->second->getAngleAppear();
3737 }
3738 else {
3739 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3740 }
3741 }
3742 else {
3743 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3744 m_mapOfTrackers.size());
3745 }
3746}
3747
3757void vpMbGenericTracker::setAngleAppear(const std::map<std::string, double> &mapOfAngles)
3758{
3759 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3760 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3761
3762 if (it_tracker != m_mapOfTrackers.end()) {
3763 TrackerWrapper *tracker = it_tracker->second;
3764 tracker->setAngleAppear(it->second);
3765
3766 if (it->first == m_referenceCameraName) {
3767 angleAppears = it->second;
3768 }
3769 }
3770 }
3771}
3772
3783{
3785
3786 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3787 it != m_mapOfTrackers.end(); ++it) {
3788 TrackerWrapper *tracker = it->second;
3789 tracker->setAngleDisappear(a);
3790 }
3791}
3792
3805void vpMbGenericTracker::setAngleDisappear(const double &a1, const double &a2)
3806{
3807 if (m_mapOfTrackers.size() == 2) {
3808 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3809 it->second->setAngleDisappear(a1);
3810
3811 ++it;
3812 it->second->setAngleDisappear(a2);
3813
3815 if (it != m_mapOfTrackers.end()) {
3816 angleDisappears = it->second->getAngleDisappear();
3817 }
3818 else {
3819 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3820 }
3821 }
3822 else {
3823 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3824 m_mapOfTrackers.size());
3825 }
3826}
3827
3837void vpMbGenericTracker::setAngleDisappear(const std::map<std::string, double> &mapOfAngles)
3838{
3839 for (std::map<std::string, double>::const_iterator it = mapOfAngles.begin(); it != mapOfAngles.end(); ++it) {
3840 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3841
3842 if (it_tracker != m_mapOfTrackers.end()) {
3843 TrackerWrapper *tracker = it_tracker->second;
3844 tracker->setAngleDisappear(it->second);
3845
3846 if (it->first == m_referenceCameraName) {
3847 angleDisappears = it->second;
3848 }
3849 }
3850 }
3851}
3852
3859{
3861
3862 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3863 it != m_mapOfTrackers.end(); ++it) {
3864 TrackerWrapper *tracker = it->second;
3865 tracker->setCameraParameters(camera);
3866 }
3867}
3868
3878{
3879 if (m_mapOfTrackers.size() == 2) {
3880 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3881 it->second->setCameraParameters(camera1);
3882
3883 ++it;
3884 it->second->setCameraParameters(camera2);
3885
3887 if (it != m_mapOfTrackers.end()) {
3888 it->second->getCameraParameters(m_cam);
3889 }
3890 else {
3891 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
3892 }
3893 }
3894 else {
3895 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
3896 m_mapOfTrackers.size());
3897 }
3898}
3899
3908void vpMbGenericTracker::setCameraParameters(const std::map<std::string, vpCameraParameters> &mapOfCameraParameters)
3909{
3910 for (std::map<std::string, vpCameraParameters>::const_iterator it = mapOfCameraParameters.begin();
3911 it != mapOfCameraParameters.end(); ++it) {
3912 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
3913
3914 if (it_tracker != m_mapOfTrackers.end()) {
3915 TrackerWrapper *tracker = it_tracker->second;
3916 tracker->setCameraParameters(it->second);
3917
3918 if (it->first == m_referenceCameraName) {
3919 m_cam = it->second;
3920 }
3921 }
3922 }
3923}
3924
3933void vpMbGenericTracker::setCameraTransformationMatrix(const std::string &cameraName,
3934 const vpHomogeneousMatrix &cameraTransformationMatrix)
3935{
3936 std::map<std::string, vpHomogeneousMatrix>::iterator it = m_mapOfCameraTransformationMatrix.find(cameraName);
3937
3938 if (it != m_mapOfCameraTransformationMatrix.end()) {
3939 it->second = cameraTransformationMatrix;
3940 }
3941 else {
3942 throw vpException(vpTrackingException::fatalError, "Cannot find camera: %s!", cameraName.c_str());
3943 }
3944}
3945
3954 const std::map<std::string, vpHomogeneousMatrix> &mapOfTransformationMatrix)
3955{
3956 for (std::map<std::string, vpHomogeneousMatrix>::const_iterator it = mapOfTransformationMatrix.begin();
3957 it != mapOfTransformationMatrix.end(); ++it) {
3958 std::map<std::string, vpHomogeneousMatrix>::iterator it_camTrans =
3959 m_mapOfCameraTransformationMatrix.find(it->first);
3960
3961 if (it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
3962 it_camTrans->second = it->second;
3963 }
3964 }
3965}
3966
3976void vpMbGenericTracker::setClipping(const unsigned int &flags)
3977{
3979
3980 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
3981 it != m_mapOfTrackers.end(); ++it) {
3982 TrackerWrapper *tracker = it->second;
3983 tracker->setClipping(flags);
3984 }
3985}
3986
3997void vpMbGenericTracker::setClipping(const unsigned int &flags1, const unsigned int &flags2)
3998{
3999 if (m_mapOfTrackers.size() == 2) {
4000 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4001 it->second->setClipping(flags1);
4002
4003 ++it;
4004 it->second->setClipping(flags2);
4005
4007 if (it != m_mapOfTrackers.end()) {
4008 clippingFlag = it->second->getClipping();
4009 }
4010 else {
4011 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4012 }
4013 }
4014 else {
4015 std::stringstream ss;
4016 ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
4018 }
4019}
4020
4028void vpMbGenericTracker::setClipping(const std::map<std::string, unsigned int> &mapOfClippingFlags)
4029{
4030 for (std::map<std::string, unsigned int>::const_iterator it = mapOfClippingFlags.begin();
4031 it != mapOfClippingFlags.end(); ++it) {
4032 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4033
4034 if (it_tracker != m_mapOfTrackers.end()) {
4035 TrackerWrapper *tracker = it_tracker->second;
4036 tracker->setClipping(it->second);
4037
4038 if (it->first == m_referenceCameraName) {
4039 clippingFlag = it->second;
4040 }
4041 }
4042 }
4043}
4044
4055{
4056 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4057 it != m_mapOfTrackers.end(); ++it) {
4058 TrackerWrapper *tracker = it->second;
4059 tracker->setDepthDenseFilteringMaxDistance(maxDistance);
4060 }
4061}
4062
4072{
4073 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4074 it != m_mapOfTrackers.end(); ++it) {
4075 TrackerWrapper *tracker = it->second;
4076 tracker->setDepthDenseFilteringMethod(method);
4077 }
4078}
4079
4090{
4091 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4092 it != m_mapOfTrackers.end(); ++it) {
4093 TrackerWrapper *tracker = it->second;
4094 tracker->setDepthDenseFilteringMinDistance(minDistance);
4095 }
4096}
4097
4108{
4109 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4110 it != m_mapOfTrackers.end(); ++it) {
4111 TrackerWrapper *tracker = it->second;
4112 tracker->setDepthDenseFilteringOccupancyRatio(occupancyRatio);
4113 }
4114}
4115
4124void vpMbGenericTracker::setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
4125{
4126 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4127 it != m_mapOfTrackers.end(); ++it) {
4128 TrackerWrapper *tracker = it->second;
4129 tracker->setDepthDenseSamplingStep(stepX, stepY);
4130 }
4131}
4132
4141{
4142 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4143 it != m_mapOfTrackers.end(); ++it) {
4144 TrackerWrapper *tracker = it->second;
4145 tracker->setDepthNormalFaceCentroidMethod(method);
4146 }
4147}
4148
4158{
4159 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4160 it != m_mapOfTrackers.end(); ++it) {
4161 TrackerWrapper *tracker = it->second;
4162 tracker->setDepthNormalFeatureEstimationMethod(method);
4163 }
4164}
4165
4174{
4175 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4176 it != m_mapOfTrackers.end(); ++it) {
4177 TrackerWrapper *tracker = it->second;
4178 tracker->setDepthNormalPclPlaneEstimationMethod(method);
4179 }
4180}
4181
4190{
4191 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4192 it != m_mapOfTrackers.end(); ++it) {
4193 TrackerWrapper *tracker = it->second;
4194 tracker->setDepthNormalPclPlaneEstimationRansacMaxIter(maxIter);
4195 }
4196}
4197
4206{
4207 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4208 it != m_mapOfTrackers.end(); ++it) {
4209 TrackerWrapper *tracker = it->second;
4210 tracker->setDepthNormalPclPlaneEstimationRansacThreshold(thresold);
4211 }
4212}
4213
4222void vpMbGenericTracker::setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
4223{
4224 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4225 it != m_mapOfTrackers.end(); ++it) {
4226 TrackerWrapper *tracker = it->second;
4227 tracker->setDepthNormalSamplingStep(stepX, stepY);
4228 }
4229}
4230
4250{
4252
4253 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4254 it != m_mapOfTrackers.end(); ++it) {
4255 TrackerWrapper *tracker = it->second;
4256 tracker->setDisplayFeatures(displayF);
4257 }
4258}
4259
4268{
4270
4271 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4272 it != m_mapOfTrackers.end(); ++it) {
4273 TrackerWrapper *tracker = it->second;
4274 tracker->setFarClippingDistance(dist);
4275 }
4276}
4277
4286void vpMbGenericTracker::setFarClippingDistance(const double &dist1, const double &dist2)
4287{
4288 if (m_mapOfTrackers.size() == 2) {
4289 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4290 it->second->setFarClippingDistance(dist1);
4291
4292 ++it;
4293 it->second->setFarClippingDistance(dist2);
4294
4296 if (it != m_mapOfTrackers.end()) {
4297 distFarClip = it->second->getFarClippingDistance();
4298 }
4299 else {
4300 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4301 }
4302 }
4303 else {
4304 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4305 m_mapOfTrackers.size());
4306 }
4307}
4308
4314void vpMbGenericTracker::setFarClippingDistance(const std::map<std::string, double> &mapOfClippingDists)
4315{
4316 for (std::map<std::string, double>::const_iterator it = mapOfClippingDists.begin(); it != mapOfClippingDists.end();
4317 ++it) {
4318 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4319
4320 if (it_tracker != m_mapOfTrackers.end()) {
4321 TrackerWrapper *tracker = it_tracker->second;
4322 tracker->setFarClippingDistance(it->second);
4323
4324 if (it->first == m_referenceCameraName) {
4325 distFarClip = it->second;
4326 }
4327 }
4328 }
4329}
4330
4337void vpMbGenericTracker::setFeatureFactors(const std::map<vpTrackerType, double> &mapOfFeatureFactors)
4338{
4339 for (std::map<vpTrackerType, double>::iterator it = m_mapOfFeatureFactors.begin(); it != m_mapOfFeatureFactors.end();
4340 ++it) {
4341 std::map<vpTrackerType, double>::const_iterator it_factor = mapOfFeatureFactors.find(it->first);
4342 if (it_factor != mapOfFeatureFactors.end()) {
4343 it->second = it_factor->second;
4344 }
4345 }
4346}
4347
4364{
4365 m_percentageGdPt = threshold;
4366
4367 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4368 it != m_mapOfTrackers.end(); ++it) {
4369 TrackerWrapper *tracker = it->second;
4370 tracker->setGoodMovingEdgesRatioThreshold(threshold);
4371 }
4372}
4373
4374#ifdef VISP_HAVE_OGRE
4387{
4388 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4389 it != m_mapOfTrackers.end(); ++it) {
4390 TrackerWrapper *tracker = it->second;
4391 tracker->setGoodNbRayCastingAttemptsRatio(ratio);
4392 }
4393}
4394
4407{
4408 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4409 it != m_mapOfTrackers.end(); ++it) {
4410 TrackerWrapper *tracker = it->second;
4411 tracker->setNbRayCastingAttemptsForVisibility(attempts);
4412 }
4413}
4414#endif
4415
4416#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4425{
4426 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4427 it != m_mapOfTrackers.end(); ++it) {
4428 TrackerWrapper *tracker = it->second;
4429 tracker->setKltOpencv(t);
4430 }
4431}
4432
4442{
4443 if (m_mapOfTrackers.size() == 2) {
4444 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4445 it->second->setKltOpencv(t1);
4446
4447 ++it;
4448 it->second->setKltOpencv(t2);
4449 }
4450 else {
4451 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4452 m_mapOfTrackers.size());
4453 }
4454}
4455
4461void vpMbGenericTracker::setKltOpencv(const std::map<std::string, vpKltOpencv> &mapOfKlts)
4462{
4463 for (std::map<std::string, vpKltOpencv>::const_iterator it = mapOfKlts.begin(); it != mapOfKlts.end(); ++it) {
4464 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4465
4466 if (it_tracker != m_mapOfTrackers.end()) {
4467 TrackerWrapper *tracker = it_tracker->second;
4468 tracker->setKltOpencv(it->second);
4469 }
4470 }
4471}
4472
4481{
4482 m_thresholdOutlier = th;
4483
4484 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4485 it != m_mapOfTrackers.end(); ++it) {
4486 TrackerWrapper *tracker = it->second;
4487 tracker->setKltThresholdAcceptation(th);
4488 }
4489}
4490#endif
4491
4504void vpMbGenericTracker::setLod(bool useLod, const std::string &name)
4505{
4506 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4507 it != m_mapOfTrackers.end(); ++it) {
4508 TrackerWrapper *tracker = it->second;
4509 tracker->setLod(useLod, name);
4510 }
4511}
4512
4513#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
4521void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e)
4522{
4523 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4524 it != m_mapOfTrackers.end(); ++it) {
4525 TrackerWrapper *tracker = it->second;
4526 tracker->setKltMaskBorder(e);
4527 }
4528}
4529
4538void vpMbGenericTracker::setKltMaskBorder(const unsigned int &e1, const unsigned int &e2)
4539{
4540 if (m_mapOfTrackers.size() == 2) {
4541 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4542 it->second->setKltMaskBorder(e1);
4543
4544 ++it;
4545
4546 it->second->setKltMaskBorder(e2);
4547 }
4548 else {
4549 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4550 m_mapOfTrackers.size());
4551 }
4552}
4553
4559void vpMbGenericTracker::setKltMaskBorder(const std::map<std::string, unsigned int> &mapOfErosions)
4560{
4561 for (std::map<std::string, unsigned int>::const_iterator it = mapOfErosions.begin(); it != mapOfErosions.end();
4562 ++it) {
4563 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4564
4565 if (it_tracker != m_mapOfTrackers.end()) {
4566 TrackerWrapper *tracker = it_tracker->second;
4567 tracker->setKltMaskBorder(it->second);
4568 }
4569 }
4570}
4571#endif
4572
4579{
4581
4582 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4583 it != m_mapOfTrackers.end(); ++it) {
4584 TrackerWrapper *tracker = it->second;
4585 tracker->setMask(mask);
4586 }
4587}
4588
4600void vpMbGenericTracker::setMinLineLengthThresh(double minLineLengthThresh, const std::string &name)
4601{
4602 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4603 it != m_mapOfTrackers.end(); ++it) {
4604 TrackerWrapper *tracker = it->second;
4605 tracker->setMinLineLengthThresh(minLineLengthThresh, name);
4606 }
4607}
4608
4619void vpMbGenericTracker::setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name)
4620{
4621 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4622 it != m_mapOfTrackers.end(); ++it) {
4623 TrackerWrapper *tracker = it->second;
4624 tracker->setMinPolygonAreaThresh(minPolygonAreaThresh, name);
4625 }
4626}
4627
4636{
4637 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4638 it != m_mapOfTrackers.end(); ++it) {
4639 TrackerWrapper *tracker = it->second;
4640 tracker->setMovingEdge(me);
4641 }
4642}
4643
4653void vpMbGenericTracker::setMovingEdge(const vpMe &me1, const vpMe &me2)
4654{
4655 if (m_mapOfTrackers.size() == 2) {
4656 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4657 it->second->setMovingEdge(me1);
4658
4659 ++it;
4660
4661 it->second->setMovingEdge(me2);
4662 }
4663 else {
4664 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4665 m_mapOfTrackers.size());
4666 }
4667}
4668
4674void vpMbGenericTracker::setMovingEdge(const std::map<std::string, vpMe> &mapOfMe)
4675{
4676 for (std::map<std::string, vpMe>::const_iterator it = mapOfMe.begin(); it != mapOfMe.end(); ++it) {
4677 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4678
4679 if (it_tracker != m_mapOfTrackers.end()) {
4680 TrackerWrapper *tracker = it_tracker->second;
4681 tracker->setMovingEdge(it->second);
4682 }
4683 }
4684}
4685
4694{
4696
4697 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4698 it != m_mapOfTrackers.end(); ++it) {
4699 TrackerWrapper *tracker = it->second;
4700 tracker->setNearClippingDistance(dist);
4701 }
4702}
4703
4712void vpMbGenericTracker::setNearClippingDistance(const double &dist1, const double &dist2)
4713{
4714 if (m_mapOfTrackers.size() == 2) {
4715 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4716 it->second->setNearClippingDistance(dist1);
4717
4718 ++it;
4719
4720 it->second->setNearClippingDistance(dist2);
4721
4723 if (it != m_mapOfTrackers.end()) {
4724 distNearClip = it->second->getNearClippingDistance();
4725 }
4726 else {
4727 std::cerr << "Cannot find the reference camera: " << m_referenceCameraName << "!" << std::endl;
4728 }
4729 }
4730 else {
4731 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4732 m_mapOfTrackers.size());
4733 }
4734}
4735
4741void vpMbGenericTracker::setNearClippingDistance(const std::map<std::string, double> &mapOfDists)
4742{
4743 for (std::map<std::string, double>::const_iterator it = mapOfDists.begin(); it != mapOfDists.end(); ++it) {
4744 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
4745
4746 if (it_tracker != m_mapOfTrackers.end()) {
4747 TrackerWrapper *tracker = it_tracker->second;
4748 tracker->setNearClippingDistance(it->second);
4749
4750 if (it->first == m_referenceCameraName) {
4751 distNearClip = it->second;
4752 }
4753 }
4754 }
4755}
4756
4770{
4771 vpMbTracker::setOgreShowConfigDialog(showConfigDialog);
4772
4773 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4774 it != m_mapOfTrackers.end(); ++it) {
4775 TrackerWrapper *tracker = it->second;
4776 tracker->setOgreShowConfigDialog(showConfigDialog);
4777 }
4778}
4779
4791{
4793
4794 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4795 it != m_mapOfTrackers.end(); ++it) {
4796 TrackerWrapper *tracker = it->second;
4797 tracker->setOgreVisibilityTest(v);
4798 }
4799
4800#ifdef VISP_HAVE_OGRE
4801 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4802 it != m_mapOfTrackers.end(); ++it) {
4803 TrackerWrapper *tracker = it->second;
4804 tracker->faces.getOgreContext()->setWindowName("Multi Generic MBT (" + it->first + ")");
4805 }
4806#endif
4807}
4808
4817{
4819
4820 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4821 it != m_mapOfTrackers.end(); ++it) {
4822 TrackerWrapper *tracker = it->second;
4823 tracker->setOptimizationMethod(opt);
4824 }
4825}
4826
4840{
4841 if (m_mapOfTrackers.size() > 1) {
4842 throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4843 "to be configured with only one camera!");
4844 }
4845
4846 m_cMo = cdMo;
4847
4848 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4849 if (it != m_mapOfTrackers.end()) {
4850 TrackerWrapper *tracker = it->second;
4851 tracker->setPose(I, cdMo);
4852 }
4853 else {
4854 throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4855 m_referenceCameraName.c_str());
4856 }
4857}
4858
4872{
4873 if (m_mapOfTrackers.size() > 1) {
4874 throw vpException(vpTrackingException::initializationError, "The function setPose() requires the generic tracker "
4875 "to be configured with only one camera!");
4876 }
4877
4878 m_cMo = cdMo;
4879
4880 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(m_referenceCameraName);
4881 if (it != m_mapOfTrackers.end()) {
4882 TrackerWrapper *tracker = it->second;
4883 vpImageConvert::convert(I_color, m_I);
4884 tracker->setPose(m_I, cdMo);
4885 }
4886 else {
4887 throw vpException(vpTrackingException::initializationError, "The reference camera: %s does not exist!",
4888 m_referenceCameraName.c_str());
4889 }
4890}
4891
4904 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4905{
4906 if (m_mapOfTrackers.size() == 2) {
4907 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4908 it->second->setPose(I1, c1Mo);
4909
4910 ++it;
4911
4912 it->second->setPose(I2, c2Mo);
4913
4915 if (it != m_mapOfTrackers.end()) {
4916 // Set reference pose
4917 it->second->getPose(m_cMo);
4918 }
4919 else {
4920 throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4921 m_referenceCameraName.c_str());
4922 }
4923 }
4924 else {
4925 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4926 m_mapOfTrackers.size());
4927 }
4928}
4929
4942 const vpHomogeneousMatrix &c1Mo, const vpHomogeneousMatrix &c2Mo)
4943{
4944 if (m_mapOfTrackers.size() == 2) {
4945 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
4946 it->second->setPose(I_color1, c1Mo);
4947
4948 ++it;
4949
4950 it->second->setPose(I_color2, c2Mo);
4951
4953 if (it != m_mapOfTrackers.end()) {
4954 // Set reference pose
4955 it->second->getPose(m_cMo);
4956 }
4957 else {
4958 throw vpException(vpTrackingException::fatalError, "The reference camera: %s does not exist!",
4959 m_referenceCameraName.c_str());
4960 }
4961 }
4962 else {
4963 throw vpException(vpTrackingException::fatalError, "Require two cameras! There are %d cameras!",
4964 m_mapOfTrackers.size());
4965 }
4966}
4967
4983void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
4984 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
4985{
4986 // Set the reference cMo
4987 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
4988 std::map<std::string, const vpImage<unsigned char> *>::const_iterator it_img =
4989 mapOfImages.find(m_referenceCameraName);
4990 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
4991
4992 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
4993 TrackerWrapper *tracker = it_tracker->second;
4994 tracker->setPose(*it_img->second, it_camPose->second);
4995 tracker->getPose(m_cMo);
4996 }
4997 else {
4998 throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
4999 }
5000
5001 // Vector of missing pose matrices for cameras
5002 std::vector<std::string> vectorOfMissingCameraPoses;
5003
5004 // Set pose for the specified cameras
5005 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
5006 if (it_tracker->first != m_referenceCameraName) {
5007 it_img = mapOfImages.find(it_tracker->first);
5008 it_camPose = mapOfCameraPoses.find(it_tracker->first);
5009
5010 if (it_img != mapOfImages.end() && it_camPose != mapOfCameraPoses.end()) {
5011 // Set pose
5012 TrackerWrapper *tracker = it_tracker->second;
5013 tracker->setPose(*it_img->second, it_camPose->second);
5014 }
5015 else {
5016 vectorOfMissingCameraPoses.push_back(it_tracker->first);
5017 }
5018 }
5019 }
5020
5021 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5022 it != vectorOfMissingCameraPoses.end(); ++it) {
5023 it_img = mapOfImages.find(*it);
5024 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5026
5027 if (it_img != mapOfImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5028 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5029 m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5030 }
5031 else {
5033 "Missing image or missing camera transformation "
5034 "matrix! Cannot set pose for camera: %s!",
5035 it->c_str());
5036 }
5037 }
5038}
5039
5055void vpMbGenericTracker::setPose(const std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5056 const std::map<std::string, vpHomogeneousMatrix> &mapOfCameraPoses)
5057{
5058 // Set the reference cMo
5059 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(m_referenceCameraName);
5060 std::map<std::string, const vpImage<vpRGBa> *>::const_iterator it_img = mapOfColorImages.find(m_referenceCameraName);
5061 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camPose = mapOfCameraPoses.find(m_referenceCameraName);
5062
5063 if (it_tracker != m_mapOfTrackers.end() && it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5064 TrackerWrapper *tracker = it_tracker->second;
5065 tracker->setPose(*it_img->second, it_camPose->second);
5066 tracker->getPose(m_cMo);
5067 }
5068 else {
5069 throw vpException(vpTrackingException::fatalError, "Cannot set pose for the reference camera!");
5070 }
5071
5072 // Vector of missing pose matrices for cameras
5073 std::vector<std::string> vectorOfMissingCameraPoses;
5074
5075 // Set pose for the specified cameras
5076 for (it_tracker = m_mapOfTrackers.begin(); it_tracker != m_mapOfTrackers.end(); ++it_tracker) {
5077 if (it_tracker->first != m_referenceCameraName) {
5078 it_img = mapOfColorImages.find(it_tracker->first);
5079 it_camPose = mapOfCameraPoses.find(it_tracker->first);
5080
5081 if (it_img != mapOfColorImages.end() && it_camPose != mapOfCameraPoses.end()) {
5082 // Set pose
5083 TrackerWrapper *tracker = it_tracker->second;
5084 tracker->setPose(*it_img->second, it_camPose->second);
5085 }
5086 else {
5087 vectorOfMissingCameraPoses.push_back(it_tracker->first);
5088 }
5089 }
5090 }
5091
5092 for (std::vector<std::string>::const_iterator it = vectorOfMissingCameraPoses.begin();
5093 it != vectorOfMissingCameraPoses.end(); ++it) {
5094 it_img = mapOfColorImages.find(*it);
5095 std::map<std::string, vpHomogeneousMatrix>::const_iterator it_camTrans =
5097
5098 if (it_img != mapOfColorImages.end() && it_camTrans != m_mapOfCameraTransformationMatrix.end()) {
5099 vpHomogeneousMatrix cCurrentMo = it_camTrans->second * m_cMo;
5100 m_mapOfTrackers[*it]->setPose(*it_img->second, cCurrentMo);
5101 }
5102 else {
5104 "Missing image or missing camera transformation "
5105 "matrix! Cannot set pose for camera: %s!",
5106 it->c_str());
5107 }
5108 }
5109}
5110
5126{
5128
5129 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5130 it != m_mapOfTrackers.end(); ++it) {
5131 TrackerWrapper *tracker = it->second;
5132 tracker->setProjectionErrorComputation(flag);
5133 }
5134}
5135
5140{
5142
5143 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5144 it != m_mapOfTrackers.end(); ++it) {
5145 TrackerWrapper *tracker = it->second;
5146 tracker->setProjectionErrorDisplay(display);
5147 }
5148}
5149
5154{
5156
5157 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5158 it != m_mapOfTrackers.end(); ++it) {
5159 TrackerWrapper *tracker = it->second;
5160 tracker->setProjectionErrorDisplayArrowLength(length);
5161 }
5162}
5163
5165{
5167
5168 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5169 it != m_mapOfTrackers.end(); ++it) {
5170 TrackerWrapper *tracker = it->second;
5171 tracker->setProjectionErrorDisplayArrowThickness(thickness);
5172 }
5173}
5174
5180void vpMbGenericTracker::setReferenceCameraName(const std::string &referenceCameraName)
5181{
5182 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.find(referenceCameraName);
5183 if (it != m_mapOfTrackers.end()) {
5184 m_referenceCameraName = referenceCameraName;
5185 }
5186 else {
5187 std::cerr << "The reference camera: " << referenceCameraName << " does not exist!";
5188 }
5189}
5190
5192{
5194
5195 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5196 it != m_mapOfTrackers.end(); ++it) {
5197 TrackerWrapper *tracker = it->second;
5198 tracker->setScanLineVisibilityTest(v);
5199 }
5200}
5201
5214{
5215 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5216 it != m_mapOfTrackers.end(); ++it) {
5217 TrackerWrapper *tracker = it->second;
5218 tracker->setTrackerType(type);
5219 }
5220}
5221
5231void vpMbGenericTracker::setTrackerType(const std::map<std::string, int> &mapOfTrackerTypes)
5232{
5233 for (std::map<std::string, int>::const_iterator it = mapOfTrackerTypes.begin(); it != mapOfTrackerTypes.end(); ++it) {
5234 std::map<std::string, TrackerWrapper *>::const_iterator it_tracker = m_mapOfTrackers.find(it->first);
5235 if (it_tracker != m_mapOfTrackers.end()) {
5236 TrackerWrapper *tracker = it_tracker->second;
5237 tracker->setTrackerType(it->second);
5238 }
5239 }
5240}
5241
5251void vpMbGenericTracker::setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
5252{
5253 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5254 it != m_mapOfTrackers.end(); ++it) {
5255 TrackerWrapper *tracker = it->second;
5256 tracker->setUseDepthDenseTracking(name, useDepthDenseTracking);
5257 }
5258}
5259
5269void vpMbGenericTracker::setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
5270{
5271 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5272 it != m_mapOfTrackers.end(); ++it) {
5273 TrackerWrapper *tracker = it->second;
5274 tracker->setUseDepthNormalTracking(name, useDepthNormalTracking);
5275 }
5276}
5277
5287void vpMbGenericTracker::setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
5288{
5289 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5290 it != m_mapOfTrackers.end(); ++it) {
5291 TrackerWrapper *tracker = it->second;
5292 tracker->setUseEdgeTracking(name, useEdgeTracking);
5293 }
5294}
5295
5296#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5306void vpMbGenericTracker::setUseKltTracking(const std::string &name, const bool &useKltTracking)
5307{
5308 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5309 it != m_mapOfTrackers.end(); ++it) {
5310 TrackerWrapper *tracker = it->second;
5311 tracker->setUseKltTracking(name, useKltTracking);
5312 }
5313}
5314#endif
5315
5317{
5318 // Test tracking fails only if all testTracking have failed
5319 bool isOneTestTrackingOk = false;
5320 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5321 it != m_mapOfTrackers.end(); ++it) {
5322 TrackerWrapper *tracker = it->second;
5323 try {
5324 tracker->testTracking();
5325 isOneTestTrackingOk = true;
5326 }
5327 catch (const vpException &e) {
5328 std::cerr << "[" << it->first << "] " << e.what() << std::endl;
5329 }
5330 }
5331
5332 if (!isOneTestTrackingOk) {
5333 std::ostringstream oss;
5334 oss << "Not enough moving edges to track the object. Try to reduce the "
5335 "threshold="
5336 << m_percentageGdPt << " using setGoodMovingEdgesRatioThreshold()";
5338 }
5339}
5340
5351{
5352 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5353 mapOfImages[m_referenceCameraName] = &I;
5354
5355 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5356 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5357
5358 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5359}
5360
5371{
5372 std::map<std::string, const vpImage<vpRGBa> *> mapOfColorImages;
5373 mapOfColorImages[m_referenceCameraName] = &I_color;
5374
5375 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5376 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5377
5378 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5379}
5380
5392{
5393 if (m_mapOfTrackers.size() == 2) {
5394 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5395 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5396 mapOfImages[it->first] = &I1;
5397 ++it;
5398
5399 mapOfImages[it->first] = &I2;
5400
5401 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5402 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5403
5404 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5405 }
5406 else {
5407 std::stringstream ss;
5408 ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5409 throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5410 }
5411}
5412
5423void vpMbGenericTracker::track(const vpImage<vpRGBa> &I_color1, const vpImage<vpRGBa> &_colorI2)
5424{
5425 if (m_mapOfTrackers.size() == 2) {
5426 std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5427 std::map<std::string, const vpImage<vpRGBa> *> mapOfImages;
5428 mapOfImages[it->first] = &I_color1;
5429 ++it;
5430
5431 mapOfImages[it->first] = &_colorI2;
5432
5433 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5434 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5435
5436 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5437 }
5438 else {
5439 std::stringstream ss;
5440 ss << "Require two cameras! There are " << m_mapOfTrackers.size() << " cameras!";
5441 throw vpException(vpTrackingException::fatalError, ss.str().c_str());
5442 }
5443}
5444
5452void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages)
5453{
5454 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5455 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5456
5457 track(mapOfImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5458}
5459
5467void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages)
5468{
5469 std::map<std::string, const std::vector<vpColVector> *> mapOfPointClouds;
5470 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
5471
5472 track(mapOfColorImages, mapOfPointClouds, mapOfWidths, mapOfHeights);
5473}
5474
5475#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_SEGMENTATION) && defined(VISP_HAVE_PCL_FILTERS) && defined(VISP_HAVE_PCL_COMMON)
5484void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5485 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5486{
5487 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5488 it != m_mapOfTrackers.end(); ++it) {
5489 TrackerWrapper *tracker = it->second;
5490
5491 if ((tracker->m_trackerType & (EDGE_TRACKER |
5492#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5493 KLT_TRACKER |
5494#endif
5496 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5497 }
5498
5499 if (tracker->m_trackerType & (EDGE_TRACKER
5500#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5501 | KLT_TRACKER
5502#endif
5503 ) &&
5504 mapOfImages[it->first] == nullptr) {
5505 throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5506 }
5507
5508 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5509 !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5510 throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
5511 }
5512 }
5513
5514 preTracking(mapOfImages, mapOfPointClouds);
5515
5516 try {
5517 computeVVS(mapOfImages);
5518 }
5519 catch (...) {
5520 covarianceMatrix = -1;
5521 throw; // throw the original exception
5522 }
5523
5524 testTracking();
5525
5526 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5527 it != m_mapOfTrackers.end(); ++it) {
5528 TrackerWrapper *tracker = it->second;
5529
5530 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5531 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5532 }
5533
5534 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5535
5536 if (displayFeatures) {
5537#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5538 if (tracker->m_trackerType & KLT_TRACKER) {
5539 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5540 }
5541#endif
5542
5543 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5544 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5545 }
5546 }
5547 }
5548
5550}
5551
5560void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5561 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> &mapOfPointClouds)
5562{
5563 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5564 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5565 it != m_mapOfTrackers.end(); ++it) {
5566 TrackerWrapper *tracker = it->second;
5567
5568 if ((tracker->m_trackerType & (EDGE_TRACKER |
5569#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5570 KLT_TRACKER |
5571#endif
5573 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5574 }
5575
5576 if (tracker->m_trackerType & (EDGE_TRACKER
5577#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5578 | KLT_TRACKER
5579#endif
5580 ) &&
5581 mapOfImages[it->first] == nullptr) {
5582 throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5583 }
5584 else if (tracker->m_trackerType & (EDGE_TRACKER
5585#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5586 | KLT_TRACKER
5587#endif
5588 ) &&
5589 mapOfImages[it->first] != nullptr) {
5590 vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5591 mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5592 }
5593
5594 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5595 !mapOfPointClouds[it->first]) { // mapOfPointClouds[it->first] == nullptr
5596 throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
5597 }
5598 }
5599
5600 preTracking(mapOfImages, mapOfPointClouds);
5601
5602 try {
5603 computeVVS(mapOfImages);
5604 }
5605 catch (...) {
5606 covarianceMatrix = -1;
5607 throw; // throw the original exception
5608 }
5609
5610 testTracking();
5611
5612 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5613 it != m_mapOfTrackers.end(); ++it) {
5614 TrackerWrapper *tracker = it->second;
5615
5616 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5617 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5618 }
5619
5620 tracker->postTracking(mapOfImages[it->first], mapOfPointClouds[it->first]);
5621
5622 if (displayFeatures) {
5623#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5624 if (tracker->m_trackerType & KLT_TRACKER) {
5625 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5626 }
5627#endif
5628
5629 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5630 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5631 }
5632 }
5633 }
5634
5636}
5637#endif
5638
5649void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5650 std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5651 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5652 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5653{
5654 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5655 it != m_mapOfTrackers.end(); ++it) {
5656 TrackerWrapper *tracker = it->second;
5657
5658 if ((tracker->m_trackerType & (EDGE_TRACKER |
5659#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5660 KLT_TRACKER |
5661#endif
5663 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5664 }
5665
5666 if (tracker->m_trackerType & (EDGE_TRACKER
5667#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5668 | KLT_TRACKER
5669#endif
5670 ) &&
5671 mapOfImages[it->first] == nullptr) {
5672 throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5673 }
5674
5675 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5676 (mapOfPointClouds[it->first] == nullptr)) {
5677 throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5678 }
5679 }
5680
5681 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5682
5683 try {
5684 computeVVS(mapOfImages);
5685 }
5686 catch (...) {
5687 covarianceMatrix = -1;
5688 throw; // throw the original exception
5689 }
5690
5691 testTracking();
5692
5693 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5694 it != m_mapOfTrackers.end(); ++it) {
5695 TrackerWrapper *tracker = it->second;
5696
5697 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5698 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5699 }
5700
5701 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5702
5703 if (displayFeatures) {
5704#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5705 if (tracker->m_trackerType & KLT_TRACKER) {
5706 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5707 }
5708#endif
5709
5710 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5711 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5712 }
5713 }
5714 }
5715
5717}
5718
5729void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5730 std::map<std::string, const std::vector<vpColVector> *> &mapOfPointClouds,
5731 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5732 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5733{
5734 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5735 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5736 it != m_mapOfTrackers.end(); ++it) {
5737 TrackerWrapper *tracker = it->second;
5738
5739 if ((tracker->m_trackerType & (EDGE_TRACKER |
5740#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5741 KLT_TRACKER |
5742#endif
5744 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5745 }
5746
5747 if (tracker->m_trackerType & (EDGE_TRACKER
5748#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5749 | KLT_TRACKER
5750#endif
5751 ) &&
5752 mapOfColorImages[it->first] == nullptr) {
5753 throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5754 }
5755 else if (tracker->m_trackerType & (EDGE_TRACKER
5756#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5757 | KLT_TRACKER
5758#endif
5759 ) &&
5760 mapOfColorImages[it->first] != nullptr) {
5761 vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5762 mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5763 }
5764
5765 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5766 (mapOfPointClouds[it->first] == nullptr)) {
5767 throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5768 }
5769 }
5770
5771 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5772
5773 try {
5774 computeVVS(mapOfImages);
5775 }
5776 catch (...) {
5777 covarianceMatrix = -1;
5778 throw; // throw the original exception
5779 }
5780
5781 testTracking();
5782
5783 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5784 it != m_mapOfTrackers.end(); ++it) {
5785 TrackerWrapper *tracker = it->second;
5786
5787 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5788 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5789 }
5790
5791 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5792
5793 if (displayFeatures) {
5794#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5795 if (tracker->m_trackerType & KLT_TRACKER) {
5796 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5797 }
5798#endif
5799
5800 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5801 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5802 }
5803 }
5804 }
5805
5807}
5808
5809void vpMbGenericTracker::track(std::map<std::string, const vpImage<unsigned char> *> &mapOfImages,
5810 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5811 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5812 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5813{
5814 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5815 it != m_mapOfTrackers.end(); ++it) {
5816 TrackerWrapper *tracker = it->second;
5817
5818 if ((tracker->m_trackerType & (EDGE_TRACKER |
5819#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5820 KLT_TRACKER |
5821#endif
5823 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5824 }
5825
5826 if (tracker->m_trackerType & (EDGE_TRACKER
5827#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5828 | KLT_TRACKER
5829#endif
5830 ) &&
5831 mapOfImages[it->first] == nullptr) {
5832 throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5833 }
5834
5835 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5836 (mapOfPointClouds[it->first] == nullptr)) {
5837 throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5838 }
5839 }
5840
5841 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5842
5843 try {
5844 computeVVS(mapOfImages);
5845 }
5846 catch (...) {
5847 covarianceMatrix = -1;
5848 throw; // throw the original exception
5849 }
5850
5851 testTracking();
5852
5853 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5854 it != m_mapOfTrackers.end(); ++it) {
5855 TrackerWrapper *tracker = it->second;
5856
5857 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5858 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5859 }
5860
5861 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5862
5863 if (displayFeatures) {
5864#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5865 if (tracker->m_trackerType & KLT_TRACKER) {
5866 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5867 }
5868#endif
5869
5870 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5871 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5872 }
5873 }
5874 }
5875
5877}
5878
5889void vpMbGenericTracker::track(std::map<std::string, const vpImage<vpRGBa> *> &mapOfColorImages,
5890 std::map<std::string, const vpMatrix *> &mapOfPointClouds,
5891 std::map<std::string, unsigned int> &mapOfPointCloudWidths,
5892 std::map<std::string, unsigned int> &mapOfPointCloudHeights)
5893{
5894 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
5895 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5896 it != m_mapOfTrackers.end(); ++it) {
5897 TrackerWrapper *tracker = it->second;
5898
5899 if ((tracker->m_trackerType & (EDGE_TRACKER |
5900#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5901 KLT_TRACKER |
5902#endif
5904 throw vpException(vpException::fatalError, "Bad tracker type: %d", tracker->m_trackerType);
5905 }
5906
5907 if (tracker->m_trackerType & (EDGE_TRACKER
5908#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5909 | KLT_TRACKER
5910#endif
5911 ) &&
5912 mapOfColorImages[it->first] == nullptr) {
5913 throw vpException(vpException::fatalError, "Image pointer is nullptr!");
5914 }
5915 else if (tracker->m_trackerType & (EDGE_TRACKER
5916#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5917 | KLT_TRACKER
5918#endif
5919 ) &&
5920 mapOfColorImages[it->first] != nullptr) {
5921 vpImageConvert::convert(*mapOfColorImages[it->first], tracker->m_I);
5922 mapOfImages[it->first] = &tracker->m_I; // update grayscale image buffer
5923 }
5924
5925 if (tracker->m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) &&
5926 (mapOfPointClouds[it->first] == nullptr)) {
5927 throw vpException(vpException::fatalError, "Pointcloud is nullptr!");
5928 }
5929 }
5930
5931 preTracking(mapOfImages, mapOfPointClouds, mapOfPointCloudWidths, mapOfPointCloudHeights);
5932
5933 try {
5934 computeVVS(mapOfImages);
5935 }
5936 catch (...) {
5937 covarianceMatrix = -1;
5938 throw; // throw the original exception
5939 }
5940
5941 testTracking();
5942
5943 for (std::map<std::string, TrackerWrapper *>::const_iterator it = m_mapOfTrackers.begin();
5944 it != m_mapOfTrackers.end(); ++it) {
5945 TrackerWrapper *tracker = it->second;
5946
5947 if (tracker->m_trackerType & EDGE_TRACKER && displayFeatures) {
5948 tracker->m_featuresToBeDisplayedEdge = tracker->getFeaturesForDisplayEdge();
5949 }
5950
5951 tracker->postTracking(mapOfImages[it->first], mapOfPointCloudWidths[it->first], mapOfPointCloudHeights[it->first]);
5952
5953 if (displayFeatures) {
5954#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5955 if (tracker->m_trackerType & KLT_TRACKER) {
5956 tracker->m_featuresToBeDisplayedKlt = tracker->getFeaturesForDisplayKlt();
5957 }
5958#endif
5959
5960 if (tracker->m_trackerType & DEPTH_NORMAL_TRACKER) {
5961 tracker->m_featuresToBeDisplayedDepthNormal = tracker->getFeaturesForDisplayDepthNormal();
5962 }
5963 }
5964 }
5965
5967}
5968
5970vpMbGenericTracker::TrackerWrapper::TrackerWrapper()
5971 : m_error(), m_L(), m_trackerType(EDGE_TRACKER), m_w(), m_weightedError()
5972{
5973 m_lambda = 1.0;
5974 m_maxIter = 30;
5975
5976#ifdef VISP_HAVE_OGRE
5977 faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5978
5979 m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
5980#endif
5981}
5982
5983vpMbGenericTracker::TrackerWrapper::TrackerWrapper(int trackerType)
5984 : m_error(), m_L(), m_trackerType(trackerType), m_w(), m_weightedError()
5985{
5986 if ((m_trackerType & (EDGE_TRACKER |
5987#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
5988 KLT_TRACKER |
5989#endif
5991 throw vpException(vpTrackingException::badValue, "Bad value for tracker type: %d!", m_trackerType);
5992 }
5993
5994 m_lambda = 1.0;
5995 m_maxIter = 30;
5996
5997#ifdef VISP_HAVE_OGRE
5998 faces.getOgreContext()->setWindowName("MBT TrackerWrapper");
5999
6000 m_projectionErrorFaces.getOgreContext()->setWindowName("MBT TrackerWrapper (projection error)");
6001#endif
6002}
6003
6004// Implemented only for debugging purposes: use TrackerWrapper as a standalone tracker
6006{
6007 computeVVSInit(ptr_I);
6008
6009 if (m_error.getRows() < 4) {
6010 throw vpTrackingException(vpTrackingException::notEnoughPointError, "Error: not enough features");
6011 }
6012
6013 double normRes = 0;
6014 double normRes_1 = -1;
6015 unsigned int iter = 0;
6016
6017 double factorEdge = 1.0;
6018#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6019 double factorKlt = 1.0;
6020#endif
6021 double factorDepth = 1.0;
6022 double factorDepthDense = 1.0;
6023
6024 vpMatrix LTL;
6025 vpColVector LTR, v;
6026 vpColVector error_prev;
6027
6028 double mu = m_initialMu;
6029 vpHomogeneousMatrix cMo_prev;
6030#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6031 vpHomogeneousMatrix ctTc0_Prev; // Only for KLT
6032#endif
6033 bool isoJoIdentity = m_isoJoIdentity; // Backup since it can be modified if L is not full rank
6034 if (isoJoIdentity)
6035 oJo.eye();
6036
6037 // Covariance
6038 vpColVector W_true(m_error.getRows());
6039 vpMatrix L_true, LVJ_true;
6040
6041 unsigned int nb_edge_features = m_error_edge.getRows();
6042#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6043 unsigned int nb_klt_features = m_error_klt.getRows();
6044#endif
6045 unsigned int nb_depth_features = m_error_depthNormal.getRows();
6046 unsigned int nb_depth_dense_features = m_error_depthDense.getRows();
6047
6048 while (std::fabs(normRes_1 - normRes) > m_stopCriteriaEpsilon && (iter < m_maxIter)) {
6050
6051 bool reStartFromLastIncrement = false;
6052 computeVVSCheckLevenbergMarquardt(iter, m_error, error_prev, cMo_prev, mu, reStartFromLastIncrement);
6053
6054#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6055 if (reStartFromLastIncrement) {
6056 if (m_trackerType & KLT_TRACKER) {
6057 ctTc0 = ctTc0_Prev;
6058 }
6059 }
6060#endif
6061
6062 if (!reStartFromLastIncrement) {
6064
6065 if (computeCovariance) {
6066 L_true = m_L;
6067 if (!isoJoIdentity) {
6068 vpVelocityTwistMatrix cVo;
6069 cVo.buildFrom(m_cMo);
6070 LVJ_true = (m_L * cVo * oJo);
6071 }
6072 }
6073
6074 vpVelocityTwistMatrix cVo;
6075 if (iter == 0) {
6076 // If all the 6 dof should be estimated, we check if the interaction
6077 // matrix is full rank. If not we remove automatically the dof that
6078 // cannot be estimated. This is particularly useful when considering
6079 // circles (rank 5) and cylinders (rank 4)
6080 if (isoJoIdentity) {
6081 cVo.buildFrom(m_cMo);
6082
6083 vpMatrix K; // kernel
6084 unsigned int rank = (m_L * cVo).kernel(K);
6085 if (rank == 0) {
6086 throw vpException(vpException::fatalError, "Rank=0, cannot estimate the pose !");
6087 }
6088
6089 if (rank != 6) {
6090 vpMatrix I; // Identity
6091 I.eye(6);
6092 oJo = I - K.AtA();
6093
6094 isoJoIdentity = false;
6095 }
6096 }
6097 }
6098
6099 // Weighting
6100 double num = 0;
6101 double den = 0;
6102
6103 unsigned int start_index = 0;
6104 if (m_trackerType & EDGE_TRACKER) {
6105 for (unsigned int i = 0; i < nb_edge_features; i++) {
6106 double wi = m_w_edge[i] * m_factor[i] * factorEdge;
6107 W_true[i] = wi;
6108 m_weightedError[i] = wi * m_error[i];
6109
6110 num += wi * vpMath::sqr(m_error[i]);
6111 den += wi;
6112
6113 for (unsigned int j = 0; j < m_L.getCols(); j++) {
6114 m_L[i][j] *= wi;
6115 }
6116 }
6117
6118 start_index += nb_edge_features;
6119 }
6120
6121#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6122 if (m_trackerType & KLT_TRACKER) {
6123 for (unsigned int i = 0; i < nb_klt_features; i++) {
6124 double wi = m_w_klt[i] * factorKlt;
6125 W_true[start_index + i] = wi;
6126 m_weightedError[start_index + i] = wi * m_error_klt[i];
6127
6128 num += wi * vpMath::sqr(m_error[start_index + i]);
6129 den += wi;
6130
6131 for (unsigned int j = 0; j < m_L.getCols(); j++) {
6132 m_L[start_index + i][j] *= wi;
6133 }
6134 }
6135
6136 start_index += nb_klt_features;
6137 }
6138#endif
6139
6140 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6141 for (unsigned int i = 0; i < nb_depth_features; i++) {
6142 double wi = m_w_depthNormal[i] * factorDepth;
6143 m_w[start_index + i] = m_w_depthNormal[i];
6144 m_weightedError[start_index + i] = wi * m_error[start_index + i];
6145
6146 num += wi * vpMath::sqr(m_error[start_index + i]);
6147 den += wi;
6148
6149 for (unsigned int j = 0; j < m_L.getCols(); j++) {
6150 m_L[start_index + i][j] *= wi;
6151 }
6152 }
6153
6154 start_index += nb_depth_features;
6155 }
6156
6157 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6158 for (unsigned int i = 0; i < nb_depth_dense_features; i++) {
6159 double wi = m_w_depthDense[i] * factorDepthDense;
6160 m_w[start_index + i] = m_w_depthDense[i];
6161 m_weightedError[start_index + i] = wi * m_error[start_index + i];
6162
6163 num += wi * vpMath::sqr(m_error[start_index + i]);
6164 den += wi;
6165
6166 for (unsigned int j = 0; j < m_L.getCols(); j++) {
6167 m_L[start_index + i][j] *= wi;
6168 }
6169 }
6170
6171 // start_index += nb_depth_dense_features;
6172 }
6173
6174 computeVVSPoseEstimation(isoJoIdentity, iter, m_L, LTL, m_weightedError, m_error, error_prev, LTR, mu, v);
6175
6176 cMo_prev = m_cMo;
6177#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6178 if (m_trackerType & KLT_TRACKER) {
6179 ctTc0_Prev = ctTc0;
6180 }
6181#endif
6182
6184
6185#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6186 if (m_trackerType & KLT_TRACKER) {
6188 }
6189#endif
6190 normRes_1 = normRes;
6191
6192 normRes = sqrt(num / den);
6193 }
6194
6195 iter++;
6196 }
6197
6198 computeCovarianceMatrixVVS(isoJoIdentity, W_true, cMo_prev, L_true, LVJ_true, m_error);
6199
6200 if (m_trackerType & EDGE_TRACKER) {
6202 }
6203}
6204
6206{
6207 throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6208 "TrackerWrapper::computeVVSInit("
6209 ") should not be called!");
6210}
6211
6212void vpMbGenericTracker::TrackerWrapper::computeVVSInit(const vpImage<unsigned char> *const ptr_I)
6213{
6214 initMbtTracking(ptr_I);
6215
6216 unsigned int nbFeatures = 0;
6217
6218 if (m_trackerType & EDGE_TRACKER) {
6219 nbFeatures += m_error_edge.getRows();
6220 }
6221 else {
6222 m_error_edge.clear();
6223 m_weightedError_edge.clear();
6224 m_L_edge.clear();
6225 m_w_edge.clear();
6226 }
6227
6228#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6229 if (m_trackerType & KLT_TRACKER) {
6231 nbFeatures += m_error_klt.getRows();
6232 }
6233 else {
6234 m_error_klt.clear();
6235 m_weightedError_klt.clear();
6236 m_L_klt.clear();
6237 m_w_klt.clear();
6238 }
6239#endif
6240
6241 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6243 nbFeatures += m_error_depthNormal.getRows();
6244 }
6245 else {
6246 m_error_depthNormal.clear();
6247 m_weightedError_depthNormal.clear();
6248 m_L_depthNormal.clear();
6249 m_w_depthNormal.clear();
6250 }
6251
6252 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6254 nbFeatures += m_error_depthDense.getRows();
6255 }
6256 else {
6257 m_error_depthDense.clear();
6258 m_weightedError_depthDense.clear();
6259 m_L_depthDense.clear();
6260 m_w_depthDense.clear();
6261 }
6262
6263 m_L.resize(nbFeatures, 6, false, false);
6264 m_error.resize(nbFeatures, false);
6265
6266 m_weightedError.resize(nbFeatures, false);
6267 m_w.resize(nbFeatures, false);
6268 m_w = 1;
6269}
6270
6272{
6273 throw vpException(vpException::fatalError, "vpMbGenericTracker::"
6274 "TrackerWrapper::"
6275 "computeVVSInteractionMatrixAndR"
6276 "esidu() should not be called!");
6277}
6278
6279void vpMbGenericTracker::TrackerWrapper::computeVVSInteractionMatrixAndResidu(const vpImage<unsigned char> *const ptr_I)
6280{
6281 if (m_trackerType & EDGE_TRACKER) {
6283 }
6284
6285#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6286 if (m_trackerType & KLT_TRACKER) {
6288 }
6289#endif
6290
6291 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6293 }
6294
6295 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6297 }
6298
6299 unsigned int start_index = 0;
6300 if (m_trackerType & EDGE_TRACKER) {
6301 m_L.insert(m_L_edge, start_index, 0);
6302 m_error.insert(start_index, m_error_edge);
6303
6304 start_index += m_error_edge.getRows();
6305 }
6306
6307#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6308 if (m_trackerType & KLT_TRACKER) {
6309 m_L.insert(m_L_klt, start_index, 0);
6310 m_error.insert(start_index, m_error_klt);
6311
6312 start_index += m_error_klt.getRows();
6313 }
6314#endif
6315
6316 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6317 m_L.insert(m_L_depthNormal, start_index, 0);
6318 m_error.insert(start_index, m_error_depthNormal);
6319
6320 start_index += m_error_depthNormal.getRows();
6321 }
6322
6323 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6324 m_L.insert(m_L_depthDense, start_index, 0);
6325 m_error.insert(start_index, m_error_depthDense);
6326
6327 // start_index += m_error_depthDense.getRows();
6328 }
6329}
6330
6332{
6333 unsigned int start_index = 0;
6334
6335 if (m_trackerType & EDGE_TRACKER) {
6337 m_w.insert(start_index, m_w_edge);
6338
6339 start_index += m_w_edge.getRows();
6340 }
6341
6342#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6343 if (m_trackerType & KLT_TRACKER) {
6344 vpMbTracker::computeVVSWeights(m_robust_klt, m_error_klt, m_w_klt);
6345 m_w.insert(start_index, m_w_klt);
6346
6347 start_index += m_w_klt.getRows();
6348 }
6349#endif
6350
6351 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6352 if (m_depthNormalUseRobust) {
6353 vpMbTracker::computeVVSWeights(m_robust_depthNormal, m_error_depthNormal, m_w_depthNormal);
6354 m_w.insert(start_index, m_w_depthNormal);
6355 }
6356
6357 start_index += m_w_depthNormal.getRows();
6358 }
6359
6360 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6362 m_w.insert(start_index, m_w_depthDense);
6363
6364 // start_index += m_w_depthDense.getRows();
6365 }
6366}
6367
6368void vpMbGenericTracker::TrackerWrapper::display(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cMo,
6369 const vpCameraParameters &cam, const vpColor &col,
6370 unsigned int thickness, bool displayFullModel)
6371{
6372 if (displayFeatures) {
6373 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6374 for (size_t i = 0; i < features.size(); i++) {
6375 if (vpMath::equal(features[i][0], 0)) {
6376 vpImagePoint ip(features[i][1], features[i][2]);
6377 int state = static_cast<int>(features[i][3]);
6378
6379 switch (state) {
6382 break;
6383
6384 case vpMeSite::CONTRAST:
6386 break;
6387
6390 break;
6391
6394 break;
6395
6396 case vpMeSite::TOO_NEAR:
6398 break;
6399
6400 default:
6402 }
6403 }
6404 else if (vpMath::equal(features[i][0], 1)) {
6405 vpImagePoint ip1(features[i][1], features[i][2]);
6407
6408 vpImagePoint ip2(features[i][3], features[i][4]);
6409 double id = features[i][5];
6410 std::stringstream ss;
6411 ss << id;
6412 vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6413 }
6414 else if (vpMath::equal(features[i][0], 2)) {
6415 vpImagePoint im_centroid(features[i][1], features[i][2]);
6416 vpImagePoint im_extremity(features[i][3], features[i][4]);
6417 bool desired = vpMath::equal(features[i][0], 2);
6418 vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6419 }
6420 }
6421 }
6422
6423 std::vector<std::vector<double> > models =
6424 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6425 for (size_t i = 0; i < models.size(); i++) {
6426 if (vpMath::equal(models[i][0], 0)) {
6427 vpImagePoint ip1(models[i][1], models[i][2]);
6428 vpImagePoint ip2(models[i][3], models[i][4]);
6429 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6430 }
6431 else if (vpMath::equal(models[i][0], 1)) {
6432 vpImagePoint center(models[i][1], models[i][2]);
6433 double n20 = models[i][3];
6434 double n11 = models[i][4];
6435 double n02 = models[i][5];
6436 vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6437 }
6438 }
6439
6440#ifdef VISP_HAVE_OGRE
6441 if ((m_trackerType & EDGE_TRACKER)
6442#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6443 || (m_trackerType & KLT_TRACKER)
6444#endif
6445 ) {
6446 if (useOgre)
6447 faces.displayOgre(cMo);
6448 }
6449#endif
6450}
6451
6452void vpMbGenericTracker::TrackerWrapper::display(const vpImage<vpRGBa> &I, const vpHomogeneousMatrix &cMo,
6453 const vpCameraParameters &cam, const vpColor &col,
6454 unsigned int thickness, bool displayFullModel)
6455{
6456 if (displayFeatures) {
6457 std::vector<std::vector<double> > features = getFeaturesForDisplay();
6458 for (size_t i = 0; i < features.size(); i++) {
6459 if (vpMath::equal(features[i][0], 0)) {
6460 vpImagePoint ip(features[i][1], features[i][2]);
6461 int state = static_cast<int>(features[i][3]);
6462
6463 switch (state) {
6466 break;
6467
6468 case vpMeSite::CONTRAST:
6470 break;
6471
6474 break;
6475
6478 break;
6479
6480 case vpMeSite::TOO_NEAR:
6482 break;
6483
6484 default:
6486 }
6487 }
6488 else if (vpMath::equal(features[i][0], 1)) {
6489 vpImagePoint ip1(features[i][1], features[i][2]);
6491
6492 vpImagePoint ip2(features[i][3], features[i][4]);
6493 double id = features[i][5];
6494 std::stringstream ss;
6495 ss << id;
6496 vpDisplay::displayText(I, ip2, ss.str(), vpColor::red);
6497 }
6498 else if (vpMath::equal(features[i][0], 2)) {
6499 vpImagePoint im_centroid(features[i][1], features[i][2]);
6500 vpImagePoint im_extremity(features[i][3], features[i][4]);
6501 bool desired = vpMath::equal(features[i][0], 2);
6502 vpDisplay::displayArrow(I, im_centroid, im_extremity, desired ? vpColor::blue : vpColor::red, 4, 2, thickness);
6503 }
6504 }
6505 }
6506
6507 std::vector<std::vector<double> > models =
6508 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
6509 for (size_t i = 0; i < models.size(); i++) {
6510 if (vpMath::equal(models[i][0], 0)) {
6511 vpImagePoint ip1(models[i][1], models[i][2]);
6512 vpImagePoint ip2(models[i][3], models[i][4]);
6513 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
6514 }
6515 else if (vpMath::equal(models[i][0], 1)) {
6516 vpImagePoint center(models[i][1], models[i][2]);
6517 double n20 = models[i][3];
6518 double n11 = models[i][4];
6519 double n02 = models[i][5];
6520 vpDisplay::displayEllipse(I, center, n20, n11, n02, true, col, thickness);
6521 }
6522 }
6523
6524#ifdef VISP_HAVE_OGRE
6525 if ((m_trackerType & EDGE_TRACKER)
6526#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6527 || (m_trackerType & KLT_TRACKER)
6528#endif
6529 ) {
6530 if (useOgre)
6531 faces.displayOgre(cMo);
6532 }
6533#endif
6534}
6535
6536std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getFeaturesForDisplay()
6537{
6538 std::vector<std::vector<double> > features;
6539
6540 if (m_trackerType & EDGE_TRACKER) {
6541 // m_featuresToBeDisplayedEdge updated after computeVVS()
6542 features.insert(features.end(), m_featuresToBeDisplayedEdge.begin(), m_featuresToBeDisplayedEdge.end());
6543 }
6544
6545#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6546 if (m_trackerType & KLT_TRACKER) {
6547 // m_featuresToBeDisplayedKlt updated after postTracking()
6548 features.insert(features.end(), m_featuresToBeDisplayedKlt.begin(), m_featuresToBeDisplayedKlt.end());
6549 }
6550#endif
6551
6552 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6553 // m_featuresToBeDisplayedDepthNormal updated after postTracking()
6554 features.insert(features.end(), m_featuresToBeDisplayedDepthNormal.begin(),
6555 m_featuresToBeDisplayedDepthNormal.end());
6556 }
6557
6558 return features;
6559}
6560
6561std::vector<std::vector<double> > vpMbGenericTracker::TrackerWrapper::getModelForDisplay(unsigned int width,
6562 unsigned int height,
6563 const vpHomogeneousMatrix &cMo,
6564 const vpCameraParameters &cam,
6565 bool displayFullModel)
6566{
6567 std::vector<std::vector<double> > models;
6568
6569 // Do not add multiple times the same models
6570 if (m_trackerType == EDGE_TRACKER) {
6571 models = vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6572 }
6573#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6574 else if (m_trackerType == KLT_TRACKER) {
6575 models = vpMbKltTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6576 }
6577#endif
6578 else if (m_trackerType == DEPTH_NORMAL_TRACKER) {
6579 models = vpMbDepthNormalTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6580 }
6581 else if (m_trackerType == DEPTH_DENSE_TRACKER) {
6582 models = vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6583 }
6584 else {
6585 // Edge and KLT trackers use the same primitives
6586 if (m_trackerType & EDGE_TRACKER) {
6587 std::vector<std::vector<double> > edgeModels =
6588 vpMbEdgeTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6589 models.insert(models.end(), edgeModels.begin(), edgeModels.end());
6590 }
6591
6592 // Depth dense and depth normal trackers use the same primitives
6593 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6594 std::vector<std::vector<double> > depthDenseModels =
6595 vpMbDepthDenseTracker::getModelForDisplay(width, height, cMo, cam, displayFullModel);
6596 models.insert(models.end(), depthDenseModels.begin(), depthDenseModels.end());
6597 }
6598 }
6599
6600 return models;
6601}
6602
6603void vpMbGenericTracker::TrackerWrapper::init(const vpImage<unsigned char> &I)
6604{
6605 if (!modelInitialised) {
6606 throw vpException(vpException::fatalError, "model not initialized");
6607 }
6608
6609 if (useScanLine || clippingFlag > 3)
6610 m_cam.computeFov(I.getWidth(), I.getHeight());
6611
6612 bool reInitialisation = false;
6613 if (!useOgre) {
6614 faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6615 }
6616 else {
6617#ifdef VISP_HAVE_OGRE
6618 if (!faces.isOgreInitialised()) {
6619 faces.setBackgroundSizeOgre(I.getHeight(), I.getWidth());
6620
6621 faces.setOgreShowConfigDialog(ogreShowConfigDialog);
6622 faces.initOgre(m_cam);
6623 // Turn off Ogre config dialog display for the next call to this
6624 // function since settings are saved in the ogre.cfg file and used
6625 // during the next call
6626 ogreShowConfigDialog = false;
6627 }
6628
6629 faces.setVisibleOgre(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6630#else
6631 faces.setVisible(I.getWidth(), I.getHeight(), m_cam, m_cMo, angleAppears, angleDisappears, reInitialisation);
6632#endif
6633 }
6634
6635 if (useScanLine) {
6636 faces.computeClippedPolygons(m_cMo, m_cam);
6637 faces.computeScanLineRender(m_cam, I.getWidth(), I.getHeight());
6638 }
6639
6640#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6641 if (m_trackerType & KLT_TRACKER)
6643#endif
6644
6645 if (m_trackerType & EDGE_TRACKER) {
6647
6648 bool a = false;
6649 vpMbEdgeTracker::visibleFace(I, m_cMo, a); // should be useless, but keep it for nbvisiblepolygone
6650
6652 }
6653
6654 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6655 vpMbDepthNormalTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthNormalTracker::init(I);
6656
6657 if (m_trackerType & DEPTH_DENSE_TRACKER)
6658 vpMbDepthDenseTracker::computeVisibility(I.getWidth(), I.getHeight()); // vpMbDepthDenseTracker::init(I);
6659}
6660
6661void vpMbGenericTracker::TrackerWrapper::initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3,
6662 double radius, int idFace, const std::string &name)
6663{
6664 if (m_trackerType & EDGE_TRACKER)
6665 vpMbEdgeTracker::initCircle(p1, p2, p3, radius, idFace, name);
6666
6667#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6668 if (m_trackerType & KLT_TRACKER)
6669 vpMbKltTracker::initCircle(p1, p2, p3, radius, idFace, name);
6670#endif
6671}
6672
6673void vpMbGenericTracker::TrackerWrapper::initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace,
6674 const std::string &name)
6675{
6676 if (m_trackerType & EDGE_TRACKER)
6677 vpMbEdgeTracker::initCylinder(p1, p2, radius, idFace, name);
6678
6679#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6680 if (m_trackerType & KLT_TRACKER)
6681 vpMbKltTracker::initCylinder(p1, p2, radius, idFace, name);
6682#endif
6683}
6684
6685void vpMbGenericTracker::TrackerWrapper::initFaceFromCorners(vpMbtPolygon &polygon)
6686{
6687 if (m_trackerType & EDGE_TRACKER)
6689
6690#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6691 if (m_trackerType & KLT_TRACKER)
6693#endif
6694
6695 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6697
6698 if (m_trackerType & DEPTH_DENSE_TRACKER)
6700}
6701
6702void vpMbGenericTracker::TrackerWrapper::initFaceFromLines(vpMbtPolygon &polygon)
6703{
6704 if (m_trackerType & EDGE_TRACKER)
6706
6707#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6708 if (m_trackerType & KLT_TRACKER)
6710#endif
6711
6712 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6714
6715 if (m_trackerType & DEPTH_DENSE_TRACKER)
6717}
6718
6719void vpMbGenericTracker::TrackerWrapper::initMbtTracking(const vpImage<unsigned char> *const ptr_I)
6720{
6721 if (m_trackerType & EDGE_TRACKER) {
6724 }
6725}
6726
6727void vpMbGenericTracker::TrackerWrapper::loadConfigFile(const std::string &configFile, bool verbose)
6728{
6729#if defined(VISP_HAVE_PUGIXML)
6730 // Load projection error config
6731 vpMbTracker::loadConfigFile(configFile, verbose);
6732
6733 vpMbtXmlGenericParser xmlp((vpMbtXmlGenericParser::vpParserType)m_trackerType);
6734 xmlp.setVerbose(verbose);
6735 xmlp.setCameraParameters(m_cam);
6736 xmlp.setAngleAppear(vpMath::deg(angleAppears));
6737 xmlp.setAngleDisappear(vpMath::deg(angleDisappears));
6738
6739 // Edge
6740 xmlp.setEdgeMe(me);
6741
6742 // KLT
6743 xmlp.setKltMaxFeatures(10000);
6744 xmlp.setKltWindowSize(5);
6745 xmlp.setKltQuality(0.01);
6746 xmlp.setKltMinDistance(5);
6747 xmlp.setKltHarrisParam(0.01);
6748 xmlp.setKltBlockSize(3);
6749 xmlp.setKltPyramidLevels(3);
6750#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6751 xmlp.setKltMaskBorder(maskBorder);
6752#endif
6753
6754 // Depth normal
6755 xmlp.setDepthNormalFeatureEstimationMethod(m_depthNormalFeatureEstimationMethod);
6756 xmlp.setDepthNormalPclPlaneEstimationMethod(m_depthNormalPclPlaneEstimationMethod);
6757 xmlp.setDepthNormalPclPlaneEstimationRansacMaxIter(m_depthNormalPclPlaneEstimationRansacMaxIter);
6758 xmlp.setDepthNormalPclPlaneEstimationRansacThreshold(m_depthNormalPclPlaneEstimationRansacThreshold);
6759 xmlp.setDepthNormalSamplingStepX(m_depthNormalSamplingStepX);
6760 xmlp.setDepthNormalSamplingStepY(m_depthNormalSamplingStepY);
6761
6762 // Depth dense
6763 xmlp.setDepthDenseSamplingStepX(m_depthDenseSamplingStepX);
6764 xmlp.setDepthDenseSamplingStepY(m_depthDenseSamplingStepY);
6765
6766 try {
6767 if (verbose) {
6768 std::cout << " *********** Parsing XML for";
6769 }
6770
6771 std::vector<std::string> tracker_names;
6772 if (m_trackerType & EDGE_TRACKER)
6773 tracker_names.push_back("Edge");
6774#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6775 if (m_trackerType & KLT_TRACKER)
6776 tracker_names.push_back("Klt");
6777#endif
6778 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6779 tracker_names.push_back("Depth Normal");
6780 if (m_trackerType & DEPTH_DENSE_TRACKER)
6781 tracker_names.push_back("Depth Dense");
6782
6783 if (verbose) {
6784 for (size_t i = 0; i < tracker_names.size(); i++) {
6785 std::cout << " " << tracker_names[i];
6786 if (i == tracker_names.size() - 1) {
6787 std::cout << " ";
6788 }
6789 }
6790
6791 std::cout << "Model-Based Tracker ************ " << std::endl;
6792 }
6793
6794 xmlp.parse(configFile);
6795 }
6796 catch (...) {
6797 throw vpException(vpException::ioError, "Can't open XML file \"%s\"\n ", configFile.c_str());
6798 }
6799
6800 vpCameraParameters camera;
6801 xmlp.getCameraParameters(camera);
6802 setCameraParameters(camera);
6803
6804 angleAppears = vpMath::rad(xmlp.getAngleAppear());
6805 angleDisappears = vpMath::rad(xmlp.getAngleDisappear());
6806
6807 if (xmlp.hasNearClippingDistance())
6808 setNearClippingDistance(xmlp.getNearClippingDistance());
6809
6810 if (xmlp.hasFarClippingDistance())
6811 setFarClippingDistance(xmlp.getFarClippingDistance());
6812
6813 if (xmlp.getFovClipping()) {
6815 }
6816
6817 useLodGeneral = xmlp.getLodState();
6818 minLineLengthThresholdGeneral = xmlp.getLodMinLineLengthThreshold();
6819 minPolygonAreaThresholdGeneral = xmlp.getLodMinPolygonAreaThreshold();
6820
6822 if (this->getNbPolygon() > 0) {
6827 }
6828
6829 // Edge
6830 vpMe meParser;
6831 xmlp.getEdgeMe(meParser);
6833
6834 // KLT
6835#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6836 tracker.setMaxFeatures(static_cast<int>(xmlp.getKltMaxFeatures()));
6837 tracker.setWindowSize(static_cast<int>(xmlp.getKltWindowSize()));
6838 tracker.setQuality(xmlp.getKltQuality());
6839 tracker.setMinDistance(xmlp.getKltMinDistance());
6840 tracker.setHarrisFreeParameter(xmlp.getKltHarrisParam());
6841 tracker.setBlockSize(static_cast<int>(xmlp.getKltBlockSize()));
6842 tracker.setPyramidLevels(static_cast<int>(xmlp.getKltPyramidLevels()));
6843 maskBorder = xmlp.getKltMaskBorder();
6844
6845 // if(useScanLine)
6846 faces.getMbScanLineRenderer().setMaskBorder(maskBorder);
6847#endif
6848
6849 // Depth normal
6850 setDepthNormalFeatureEstimationMethod(xmlp.getDepthNormalFeatureEstimationMethod());
6851 setDepthNormalPclPlaneEstimationMethod(xmlp.getDepthNormalPclPlaneEstimationMethod());
6852 setDepthNormalPclPlaneEstimationRansacMaxIter(xmlp.getDepthNormalPclPlaneEstimationRansacMaxIter());
6853 setDepthNormalPclPlaneEstimationRansacThreshold(xmlp.getDepthNormalPclPlaneEstimationRansacThreshold());
6854 setDepthNormalSamplingStep(xmlp.getDepthNormalSamplingStepX(), xmlp.getDepthNormalSamplingStepY());
6855
6856 // Depth dense
6857 setDepthDenseSamplingStep(xmlp.getDepthDenseSamplingStepX(), xmlp.getDepthDenseSamplingStepY());
6858#else
6859 (void)configFile;
6860 (void)verbose;
6861 throw(vpException(vpException::ioError, "vpMbGenericTracker::TrackerWrapper::loadConfigFile() needs pugixml built-in 3rdparty"));
6862#endif
6863}
6864
6865#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_SEGMENTATION) && defined(VISP_HAVE_PCL_FILTERS) && defined(VISP_HAVE_PCL_COMMON)
6866void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6867 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6868{
6869#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6870 // KLT
6871 if (m_trackerType & KLT_TRACKER) {
6872 if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6873 vpMbKltTracker::reinit(*ptr_I);
6874 }
6875 }
6876#endif
6877
6878 // Looking for new visible face
6879 if (m_trackerType & EDGE_TRACKER) {
6880 bool newvisibleface = false;
6881 vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6882
6883 if (useScanLine) {
6884 faces.computeClippedPolygons(m_cMo, m_cam);
6885 faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6886 }
6887 }
6888
6889 // Depth normal
6890 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6891 vpMbDepthNormalTracker::computeVisibility(point_cloud->width, point_cloud->height);
6892
6893 // Depth dense
6894 if (m_trackerType & DEPTH_DENSE_TRACKER)
6895 vpMbDepthDenseTracker::computeVisibility(point_cloud->width, point_cloud->height);
6896
6897 // Edge
6898 if (m_trackerType & EDGE_TRACKER) {
6900
6901 vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo, false);
6902 // Reinit the moving edge for the lines which need it.
6904
6905 if (computeProjError) {
6907 }
6908 }
6909}
6910
6911void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
6912 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
6913{
6914 if (m_trackerType & EDGE_TRACKER) {
6915 try {
6917 }
6918 catch (...) {
6919 std::cerr << "Error in moving edge tracking" << std::endl;
6920 throw;
6921 }
6922 }
6923
6924#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6925 if (m_trackerType & KLT_TRACKER) {
6926 try {
6928 }
6929 catch (const vpException &e) {
6930 std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
6931 throw;
6932 }
6933 }
6934#endif
6935
6936 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
6937 try {
6939 }
6940 catch (...) {
6941 std::cerr << "Error in Depth normal tracking" << std::endl;
6942 throw;
6943 }
6944 }
6945
6946 if (m_trackerType & DEPTH_DENSE_TRACKER) {
6947 try {
6949 }
6950 catch (...) {
6951 std::cerr << "Error in Depth dense tracking" << std::endl;
6952 throw;
6953 }
6954 }
6955}
6956#endif
6957
6958void vpMbGenericTracker::TrackerWrapper::postTracking(const vpImage<unsigned char> *const ptr_I,
6959 const unsigned int pointcloud_width,
6960 const unsigned int pointcloud_height)
6961{
6962#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
6963 // KLT
6964 if (m_trackerType & KLT_TRACKER) {
6965 if (vpMbKltTracker::postTracking(*ptr_I, m_w_klt)) {
6966 vpMbKltTracker::reinit(*ptr_I);
6967 }
6968 }
6969#endif
6970
6971 // Looking for new visible face
6972 if (m_trackerType & EDGE_TRACKER) {
6973 bool newvisibleface = false;
6974 vpMbEdgeTracker::visibleFace(*ptr_I, m_cMo, newvisibleface);
6975
6976 if (useScanLine) {
6977 faces.computeClippedPolygons(m_cMo, m_cam);
6978 faces.computeScanLineRender(m_cam, ptr_I->getWidth(), ptr_I->getHeight());
6979 }
6980 }
6981
6982 // Depth normal
6983 if (m_trackerType & DEPTH_NORMAL_TRACKER)
6984 vpMbDepthNormalTracker::computeVisibility(pointcloud_width, pointcloud_height);
6985
6986 // Depth dense
6987 if (m_trackerType & DEPTH_DENSE_TRACKER)
6988 vpMbDepthDenseTracker::computeVisibility(pointcloud_width, pointcloud_height);
6989
6990 // Edge
6991 if (m_trackerType & EDGE_TRACKER) {
6993
6994 vpMbEdgeTracker::initMovingEdge(*ptr_I, m_cMo, false);
6995 // Reinit the moving edge for the lines which need it.
6997
6998 if (computeProjError) {
7000 }
7001 }
7002}
7003
7004void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
7005 const std::vector<vpColVector> *const point_cloud,
7006 const unsigned int pointcloud_width,
7007 const unsigned int pointcloud_height)
7008{
7009 if (m_trackerType & EDGE_TRACKER) {
7010 try {
7012 }
7013 catch (...) {
7014 std::cerr << "Error in moving edge tracking" << std::endl;
7015 throw;
7016 }
7017 }
7018
7019#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7020 if (m_trackerType & KLT_TRACKER) {
7021 try {
7023 }
7024 catch (const vpException &e) {
7025 std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
7026 throw;
7027 }
7028 }
7029#endif
7030
7031 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7032 try {
7033 vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7034 }
7035 catch (...) {
7036 std::cerr << "Error in Depth tracking" << std::endl;
7037 throw;
7038 }
7039 }
7040
7041 if (m_trackerType & DEPTH_DENSE_TRACKER) {
7042 try {
7043 vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7044 }
7045 catch (...) {
7046 std::cerr << "Error in Depth dense tracking" << std::endl;
7047 throw;
7048 }
7049 }
7050}
7051
7052void vpMbGenericTracker::TrackerWrapper::preTracking(const vpImage<unsigned char> *const ptr_I,
7053 const vpMatrix *const point_cloud,
7054 const unsigned int pointcloud_width,
7055 const unsigned int pointcloud_height)
7056{
7057 if (m_trackerType & EDGE_TRACKER) {
7058 try {
7060 }
7061 catch (...) {
7062 std::cerr << "Error in moving edge tracking" << std::endl;
7063 throw;
7064 }
7065 }
7066
7067#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7068 if (m_trackerType & KLT_TRACKER) {
7069 try {
7071 }
7072 catch (const vpException &e) {
7073 std::cerr << "Error in KLT tracking: " << e.what() << std::endl;
7074 throw;
7075 }
7076 }
7077#endif
7078
7079 if (m_trackerType & DEPTH_NORMAL_TRACKER) {
7080 try {
7081 vpMbDepthNormalTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7082 }
7083 catch (...) {
7084 std::cerr << "Error in Depth tracking" << std::endl;
7085 throw;
7086 }
7087 }
7088
7089 if (m_trackerType & DEPTH_DENSE_TRACKER) {
7090 try {
7091 vpMbDepthDenseTracker::segmentPointCloud(*point_cloud, pointcloud_width, pointcloud_height);
7092 }
7093 catch (...) {
7094 std::cerr << "Error in Depth dense tracking" << std::endl;
7095 throw;
7096 }
7097 }
7098}
7099
7100void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> *const I,
7101 const vpImage<vpRGBa> *const I_color, const std::string &cad_name,
7102 const vpHomogeneousMatrix &cMo, bool verbose,
7103 const vpHomogeneousMatrix &T)
7104{
7105 m_cMo.eye();
7106
7107 // Edge
7108 vpMbtDistanceLine *l;
7109 vpMbtDistanceCylinder *cy;
7110 vpMbtDistanceCircle *ci;
7111
7112 for (unsigned int i = 0; i < scales.size(); i++) {
7113 if (scales[i]) {
7114 for (std::list<vpMbtDistanceLine *>::const_iterator it = lines[i].begin(); it != lines[i].end(); ++it) {
7115 l = *it;
7116 if (l != nullptr)
7117 delete l;
7118 l = nullptr;
7119 }
7120
7121 for (std::list<vpMbtDistanceCylinder *>::const_iterator it = cylinders[i].begin(); it != cylinders[i].end();
7122 ++it) {
7123 cy = *it;
7124 if (cy != nullptr)
7125 delete cy;
7126 cy = nullptr;
7127 }
7128
7129 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles[i].begin(); it != circles[i].end(); ++it) {
7130 ci = *it;
7131 if (ci != nullptr)
7132 delete ci;
7133 ci = nullptr;
7134 }
7135
7136 lines[i].clear();
7137 cylinders[i].clear();
7138 circles[i].clear();
7139 }
7140 }
7141
7142 nline = 0;
7143 ncylinder = 0;
7144 ncircle = 0;
7145 nbvisiblepolygone = 0;
7146
7147 // KLT
7148#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7149 // delete the Klt Polygon features
7150 for (std::list<vpMbtDistanceKltPoints *>::const_iterator it = kltPolygons.begin(); it != kltPolygons.end(); ++it) {
7151 vpMbtDistanceKltPoints *kltpoly = *it;
7152 if (kltpoly != nullptr) {
7153 delete kltpoly;
7154 }
7155 kltpoly = nullptr;
7156 }
7157 kltPolygons.clear();
7158
7159 for (std::list<vpMbtDistanceKltCylinder *>::const_iterator it = kltCylinders.begin(); it != kltCylinders.end();
7160 ++it) {
7161 vpMbtDistanceKltCylinder *kltPolyCylinder = *it;
7162 if (kltPolyCylinder != nullptr) {
7163 delete kltPolyCylinder;
7164 }
7165 kltPolyCylinder = nullptr;
7166 }
7167 kltCylinders.clear();
7168
7169 // delete the structures used to display circles
7170 for (std::list<vpMbtDistanceCircle *>::const_iterator it = circles_disp.begin(); it != circles_disp.end(); ++it) {
7171 ci = *it;
7172 if (ci != nullptr) {
7173 delete ci;
7174 }
7175 ci = nullptr;
7176 }
7177 circles_disp.clear();
7178
7179 firstInitialisation = true;
7180
7181#endif
7182
7183 // Depth normal
7184 for (size_t i = 0; i < m_depthNormalFaces.size(); i++) {
7185 delete m_depthNormalFaces[i];
7186 m_depthNormalFaces[i] = nullptr;
7187 }
7188 m_depthNormalFaces.clear();
7189
7190 // Depth dense
7191 for (size_t i = 0; i < m_depthDenseFaces.size(); i++) {
7192 delete m_depthDenseFaces[i];
7193 m_depthDenseFaces[i] = nullptr;
7194 }
7195 m_depthDenseFaces.clear();
7196
7197 faces.reset();
7198
7199 loadModel(cad_name, verbose, T);
7200 if (I) {
7201 initFromPose(*I, cMo);
7202 }
7203 else {
7204 initFromPose(*I_color, cMo);
7205 }
7206}
7207
7208void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<unsigned char> &I, const std::string &cad_name,
7209 const vpHomogeneousMatrix &cMo, bool verbose,
7210 const vpHomogeneousMatrix &T)
7211{
7212 reInitModel(&I, nullptr, cad_name, cMo, verbose, T);
7213}
7214
7215void vpMbGenericTracker::TrackerWrapper::reInitModel(const vpImage<vpRGBa> &I_color, const std::string &cad_name,
7216 const vpHomogeneousMatrix &cMo, bool verbose,
7217 const vpHomogeneousMatrix &T)
7218{
7219 reInitModel(nullptr, &I_color, cad_name, cMo, verbose, T);
7220}
7221
7223{
7225#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7227#endif
7230}
7231
7232void vpMbGenericTracker::TrackerWrapper::setCameraParameters(const vpCameraParameters &cam)
7233{
7234 m_cam = cam;
7235
7237#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7239#endif
7242}
7243
7244void vpMbGenericTracker::TrackerWrapper::setClipping(const unsigned int &flags) { vpMbEdgeTracker::setClipping(flags); }
7245
7246void vpMbGenericTracker::TrackerWrapper::setFarClippingDistance(const double &dist)
7247{
7249}
7250
7251void vpMbGenericTracker::TrackerWrapper::setNearClippingDistance(const double &dist)
7252{
7254}
7255
7256void vpMbGenericTracker::TrackerWrapper::setOgreVisibilityTest(const bool &v)
7257{
7259#ifdef VISP_HAVE_OGRE
7260 faces.getOgreContext()->setWindowName("TrackerWrapper");
7261#endif
7262}
7263
7264void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> *const I,
7265 const vpImage<vpRGBa> *const I_color, const vpHomogeneousMatrix &cdMo)
7266{
7267 bool performKltSetPose = false;
7268 if (I_color) {
7269 vpImageConvert::convert(*I_color, m_I);
7270 }
7271
7272#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7273 if (m_trackerType & KLT_TRACKER) {
7274 performKltSetPose = true;
7275
7276 if (useScanLine || clippingFlag > 3) {
7277 m_cam.computeFov(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7278 }
7279
7280 vpMbKltTracker::setPose(I ? *I : m_I, cdMo);
7281 }
7282#endif
7283
7284 if (!performKltSetPose) {
7285 m_cMo = cdMo;
7286 init(I ? *I : m_I);
7287 return;
7288 }
7289
7290 if (m_trackerType & EDGE_TRACKER) {
7292 }
7293
7294 if (useScanLine) {
7295 faces.computeClippedPolygons(m_cMo, m_cam);
7296 faces.computeScanLineRender(m_cam, I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7297 }
7298
7299#if 0
7300 if (m_trackerType & EDGE_TRACKER) {
7301 initPyramid(I, Ipyramid);
7302
7303 unsigned int i = static_cast<unsigned int>(scales.size());
7304 do {
7305 i--;
7306 if (scales[i]) {
7307 downScale(i);
7308 vpMbEdgeTracker::initMovingEdge(*Ipyramid[i], cMo);
7309 upScale(i);
7310 }
7311 } while (i != 0);
7312
7313 cleanPyramid(Ipyramid);
7314 }
7315#else
7316 if (m_trackerType & EDGE_TRACKER) {
7318 }
7319#endif
7320
7321 // Depth normal
7322 vpMbDepthNormalTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7323
7324 // Depth dense
7325 vpMbDepthDenseTracker::computeVisibility(I ? I->getWidth() : m_I.getWidth(), I ? I->getHeight() : m_I.getHeight());
7326}
7327
7328void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<unsigned char> &I, const vpHomogeneousMatrix &cdMo)
7329{
7330 setPose(&I, nullptr, cdMo);
7331}
7332
7333void vpMbGenericTracker::TrackerWrapper::setPose(const vpImage<vpRGBa> &I_color, const vpHomogeneousMatrix &cdMo)
7334{
7335 setPose(nullptr, &I_color, cdMo);
7336}
7337
7338void vpMbGenericTracker::TrackerWrapper::setProjectionErrorComputation(const bool &flag)
7339{
7341}
7342
7343void vpMbGenericTracker::TrackerWrapper::setScanLineVisibilityTest(const bool &v)
7344{
7346#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7348#endif
7351}
7352
7353void vpMbGenericTracker::TrackerWrapper::setTrackerType(int type)
7354{
7355 if ((type & (EDGE_TRACKER |
7356#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7357 KLT_TRACKER |
7358#endif
7360 throw vpException(vpTrackingException::badValue, "bad value for tracker type: !", type);
7361 }
7362
7363 m_trackerType = type;
7364}
7365
7367{
7368 if (m_trackerType & EDGE_TRACKER) {
7370 }
7371}
7372
7373void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> &
7374#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_SEGMENTATION) && defined(VISP_HAVE_PCL_FILTERS) && defined(VISP_HAVE_PCL_COMMON)
7375 I
7376#endif
7377)
7378{
7379 if ((m_trackerType & (EDGE_TRACKER
7380#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7381 | KLT_TRACKER
7382#endif
7383 )) == 0) {
7384 std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7385 return;
7386 }
7387
7388#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_SEGMENTATION) && defined(VISP_HAVE_PCL_FILTERS) && defined(VISP_HAVE_PCL_COMMON)
7389 track(&I, nullptr);
7390#endif
7391}
7392
7393void vpMbGenericTracker::TrackerWrapper::track(const vpImage<vpRGBa> &)
7394{
7395 // not exposed to the public API, only for debug
7396}
7397
7398#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_SEGMENTATION) && defined(VISP_HAVE_PCL_FILTERS) && defined(VISP_HAVE_PCL_COMMON)
7399void vpMbGenericTracker::TrackerWrapper::track(const vpImage<unsigned char> *const ptr_I,
7400 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud)
7401{
7402 if ((m_trackerType & (EDGE_TRACKER |
7403#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7404 KLT_TRACKER |
7405#endif
7407 std::cerr << "Bad tracker type: " << m_trackerType << std::endl;
7408 return;
7409 }
7410
7411 if (m_trackerType & (EDGE_TRACKER
7412#if defined(VISP_HAVE_MODULE_KLT) && defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
7413 | KLT_TRACKER
7414#endif
7415 ) &&
7416 ptr_I == nullptr) {
7417 throw vpException(vpException::fatalError, "Image pointer is nullptr!");
7418 }
7419
7420 if (m_trackerType & (DEPTH_NORMAL_TRACKER | DEPTH_DENSE_TRACKER) && !point_cloud) { // point_cloud == nullptr
7421 throw vpException(vpException::fatalError, "Pointcloud smart pointer is nullptr!");
7422 }
7423
7424 // Back-up cMo in case of exception
7425 vpHomogeneousMatrix cMo_1 = m_cMo;
7426 try {
7427 preTracking(ptr_I, point_cloud);
7428
7429 try {
7430 computeVVS(ptr_I);
7431 }
7432 catch (...) {
7433 covarianceMatrix = -1;
7434 throw; // throw the original exception
7435 }
7436
7437 if (m_trackerType == EDGE_TRACKER)
7438 testTracking();
7439
7440 postTracking(ptr_I, point_cloud);
7441
7442 }
7443 catch (const vpException &e) {
7444 std::cerr << "Exception: " << e.what() << std::endl;
7445 m_cMo = cMo_1;
7446 throw; // rethrowing the original exception
7447 }
7448}
7449#endif
7450END_VISP_NAMESPACE
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
Class to define RGB colors available for display functionalities.
Definition vpColor.h:157
static const vpColor red
Definition vpColor.h:198
static const vpColor cyan
Definition vpColor.h:207
static const vpColor blue
Definition vpColor.h:204
static const vpColor purple
Definition vpColor.h:209
static const vpColor yellow
Definition vpColor.h:206
static const vpColor green
Definition vpColor.h:201
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayEllipse(const vpImage< unsigned char > &I, const vpImagePoint &center, const double &coef1, const double &coef2, const double &coef3, bool use_normalized_centered_moments, const vpColor &color, unsigned int thickness=1, bool display_center=false, bool display_arc=false)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
@ ioError
I/O error.
Definition vpException.h:67
@ badValue
Used to indicate that a value is not in the allowed range.
Definition vpException.h:73
@ notInitialized
Used to indicate that a parameter is not initialized.
Definition vpException.h:74
@ fatalError
Fatal error.
Definition vpException.h:72
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition vpImage.h:131
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getHeight() const
Definition vpImage.h:181
static std::string getFileExtension(const std::string &pathname, bool checkFile=false)
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition vpKltOpencv.h:83
static double rad(double deg)
Definition vpMath.h:129
static double sqr(double x)
Definition vpMath.h:203
static bool equal(double x, double y, double threshold=0.001)
Definition vpMath.h:470
static double deg(double rad)
Definition vpMath.h:119
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
void computeVisibility(unsigned int width, unsigned int height)
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void testTracking() VP_OVERRIDE
vpColVector m_error_depthDense
(s - s*)
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
virtual void computeVVSInit() VP_OVERRIDE
vpColVector m_w_depthDense
Robust weights.
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
void segmentPointCloud(const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
vpColVector m_w_depthNormal
Robust weights.
vpColVector m_error_depthNormal
(s - s*)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void computeVVSInit() VP_OVERRIDE
void computeVisibility(unsigned int width, unsigned int height)
vpColVector m_w_edge
Robust weights.
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
void computeProjectionError(const vpImage< unsigned char > &_I)
virtual void testTracking() VP_OVERRIDE
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual void computeVVSWeights()
vpColVector m_error_edge
(s - s*)
virtual void computeVVSInit() VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
void trackMovingEdge(const vpImage< unsigned char > &I)
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
void computeVVSFirstPhaseFactor(const vpImage< unsigned char > &I, unsigned int lvl=0)
void updateMovingEdge(const vpImage< unsigned char > &I)
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
void reinitMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
void resetTracker() VP_OVERRIDE
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
void setMovingEdge(const vpMe &me)
virtual void setCameraParameters(const vpCameraParameters &cam) VP_OVERRIDE
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
vpColVector m_factor
Edge VVS variables.
void visibleFace(const vpImage< unsigned char > &_I, const vpHomogeneousMatrix &_cMo, bool &newvisibleline)
void initMovingEdge(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const bool &useInitRange=true)
virtual void setUseKltTracking(const std::string &name, const bool &useKltTracking)
virtual std::vector< std::vector< double > > getFeaturesForDisplay()
virtual void setCameraParameters(const vpCameraParameters &camera) VP_OVERRIDE
virtual double getGoodMovingEdgesRatioThreshold() const
virtual int getTrackerType() const
virtual void setOgreVisibilityTest(const bool &v) VP_OVERRIDE
std::map< std::string, TrackerWrapper * > m_mapOfTrackers
virtual std::pair< std::vector< vpPolygon >, std::vector< std::vector< vpPoint > > > getPolygonFaces(bool orderPolygons=true, bool useVisibility=true, bool clipPolygon=false) VP_OVERRIDE
virtual void setKltMaskBorder(const unsigned int &e)
virtual void setProjectionErrorComputation(const bool &flag) VP_OVERRIDE
vpMbGenericTracker()
json namespace shortcut
virtual void setDisplayFeatures(bool displayF) VP_OVERRIDE
unsigned int m_nb_feat_edge
Number of moving-edges features.
virtual void setKltThresholdAcceptation(double th)
std::map< std::string, vpHomogeneousMatrix > m_mapOfCameraTransformationMatrix
virtual double getKltThresholdAcceptation() const
virtual double computeCurrentProjectionError(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &_cMo, const vpCameraParameters &_cam) VP_OVERRIDE
virtual void getCameraParameters(vpCameraParameters &camera) const VP_OVERRIDE
virtual void getLcircle(std::list< vpMbtDistanceCircle * > &circlesList, unsigned int level=0) const
virtual void setDepthDenseFilteringMaxDistance(double maxDistance)
virtual std::list< vpMbtDistanceKltCylinder * > & getFeaturesKltCylinder()
virtual void setProjectionErrorDisplayArrowLength(unsigned int length) VP_OVERRIDE
virtual vpHomogeneousMatrix getPose() const
virtual std::vector< cv::Point2f > getKltPoints() const
virtual void initCircle(const vpPoint &p1, const vpPoint &p2, const vpPoint &p3, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
virtual void initFromPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo) VP_OVERRIDE
virtual void setGoodMovingEdgesRatioThreshold(double threshold)
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual vpMbtPolygon * getPolygon(unsigned int index) VP_OVERRIDE
friend void from_json(const nlohmann::json &j, TrackerWrapper &t)
Load configuration settings from a JSON object for a tracker wrapper.
virtual void setDepthDenseFilteringMinDistance(double minDistance)
virtual void setUseEdgeTracking(const std::string &name, const bool &useEdgeTracking)
virtual void setMinPolygonAreaThresh(double minPolygonAreaThresh, const std::string &name="") VP_OVERRIDE
virtual void setLod(bool useLod, const std::string &name="") VP_OVERRIDE
virtual void setDepthNormalSamplingStep(unsigned int stepX, unsigned int stepY)
vpColVector m_w
Robust weights.
virtual void setNbRayCastingAttemptsForVisibility(const unsigned int &attempts) VP_OVERRIDE
virtual void saveConfigFile(const std::string &settingsFile) const
virtual std::string getReferenceCameraName() const
virtual std::map< std::string, int > getCameraTrackerTypes() const
virtual void loadConfigFileJSON(const std::string &configFile, bool verbose=true)
virtual void testTracking() VP_OVERRIDE
unsigned int m_nb_feat_depthDense
Number of depth dense features.
virtual void setDepthNormalPclPlaneEstimationMethod(int method)
virtual void computeProjectionError()
virtual void reInitModel(const vpImage< unsigned char > &I, const std::string &cad_name, const vpHomogeneousMatrix &cMo, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix())
virtual void setUseDepthDenseTracking(const std::string &name, const bool &useDepthDenseTracking)
virtual void setMovingEdge(const vpMe &me)
virtual void setDepthNormalPclPlaneEstimationRansacMaxIter(int maxIter)
virtual void setDepthNormalPclPlaneEstimationRansacThreshold(double threshold)
virtual void setAngleDisappear(const double &a) VP_OVERRIDE
virtual void setFeatureFactors(const std::map< vpTrackerType, double > &mapOfFeatureFactors)
virtual void computeVVSInit() VP_OVERRIDE
virtual void init(const vpImage< unsigned char > &I) VP_OVERRIDE
virtual void getLcylinder(std::list< vpMbtDistanceCylinder * > &cylindersList, unsigned int level=0) const
virtual std::vector< vpImagePoint > getKltImagePoints() const
virtual void setDepthDenseSamplingStep(unsigned int stepX, unsigned int stepY)
virtual std::vector< std::string > getCameraNames() const
virtual void setDepthDenseFilteringMethod(int method)
virtual void track(const vpImage< unsigned char > &I) VP_OVERRIDE
vpColVector m_weightedError
Weighted error.
virtual std::list< vpMbtDistanceKltPoints * > & getFeaturesKlt()
virtual void loadModel(const std::string &modelFile, bool verbose=false, const vpHomogeneousMatrix &T=vpHomogeneousMatrix()) VP_OVERRIDE
vpMatrix m_L
Interaction matrix.
virtual void setKltOpencv(const vpKltOpencv &t)
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt) VP_OVERRIDE
virtual void setDepthNormalFaceCentroidMethod(const vpMbtFaceDepthNormal::vpFaceCentroidType &method)
virtual void computeVVSWeights()
virtual void loadConfigFileXML(const std::string &configFile, bool verbose=true)
virtual void setMask(const vpImage< bool > &mask) VP_OVERRIDE
virtual void setDepthDenseFilteringOccupancyRatio(double occupancyRatio)
virtual unsigned int getKltMaskBorder() const
vpColVector m_error
(s - s*)
virtual void setReferenceCameraName(const std::string &referenceCameraName)
virtual void setMinLineLengthThresh(double minLineLengthThresh, const std::string &name="") VP_OVERRIDE
virtual void setClipping(const unsigned int &flags) VP_OVERRIDE
std::map< vpTrackerType, double > m_mapOfFeatureFactors
Ponderation between each feature type in the VVS stage.
virtual void computeVVS(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages)
virtual std::list< vpMbtDistanceCircle * > & getFeaturesCircle()
virtual void getLline(std::list< vpMbtDistanceLine * > &linesList, unsigned int level=0) const
virtual void setTrackerType(int type)
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
unsigned int m_nb_feat_klt
Number of klt features.
virtual void initFromPoints(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2)
virtual unsigned int getNbPoints(unsigned int level=0) const
virtual void initCylinder(const vpPoint &p1, const vpPoint &p2, double radius, int idFace=0, const std::string &name="") VP_OVERRIDE
virtual vpKltOpencv getKltOpencv() const
virtual void setGoodNbRayCastingAttemptsRatio(const double &ratio) VP_OVERRIDE
virtual int getKltNbPoints() const
unsigned int m_nb_feat_depthNormal
Number of depth normal features.
virtual void setOgreShowConfigDialog(bool showConfigDialog) VP_OVERRIDE
virtual void initClick(const vpImage< unsigned char > &I1, const vpImage< unsigned char > &I2, const std::string &initFile1, const std::string &initFile2, bool displayHelp=false, const vpHomogeneousMatrix &T1=vpHomogeneousMatrix(), const vpHomogeneousMatrix &T2=vpHomogeneousMatrix())
virtual vpMe getMovingEdge() const
virtual unsigned int getNbPolygon() const VP_OVERRIDE
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void resetTracker() VP_OVERRIDE
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) VP_OVERRIDE
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void preTracking(std::map< std::string, const vpImage< unsigned char > * > &mapOfImages, std::map< std::string, pcl::PointCloud< pcl::PointXYZ >::ConstPtr > &mapOfPointClouds)
virtual void setNearClippingDistance(const double &dist) VP_OVERRIDE
virtual ~vpMbGenericTracker() VP_OVERRIDE
virtual void setCameraTransformationMatrix(const std::string &cameraName, const vpHomogeneousMatrix &cameraTransformationMatrix)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness) VP_OVERRIDE
virtual void setUseDepthNormalTracking(const std::string &name, const bool &useDepthNormalTracking)
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
std::string m_referenceCameraName
Name of the reference camera.
virtual vpMbHiddenFaces< vpMbtPolygon > & getFaces() VP_OVERRIDE
virtual void setDepthNormalFeatureEstimationMethod(const vpMbtFaceDepthNormal::vpFeatureEstimationType &method)
virtual void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false) VP_OVERRIDE
virtual void loadConfigFile(const std::string &configFile, bool verbose=true) VP_OVERRIDE
virtual void setProjectionErrorDisplay(bool display) VP_OVERRIDE
virtual unsigned int getClipping() const
virtual std::map< int, vpImagePoint > getKltImagePointsWithId() const
virtual void setAngleAppear(const double &a) VP_OVERRIDE
virtual void setFarClippingDistance(const double &dist) VP_OVERRIDE
Implementation of the polygons management for the model-based trackers.
vpAROgre * getOgreContext()
vpColVector m_error_klt
(s - s*)
vpHomogeneousMatrix ctTc0
virtual void setScanLineVisibilityTest(const bool &v) VP_OVERRIDE
virtual void initCircle(const vpPoint &, const vpPoint &, const vpPoint &, double, int, const std::string &name="") VP_OVERRIDE
void resetTracker() VP_OVERRIDE
virtual void computeVVSInteractionMatrixAndResidu() VP_OVERRIDE
void setCameraParameters(const vpCameraParameters &cam) VP_OVERRIDE
virtual std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false) VP_OVERRIDE
void preTracking(const vpImage< unsigned char > &I)
virtual void initCylinder(const vpPoint &, const vpPoint &, double, int, const std::string &name="") VP_OVERRIDE
virtual void initFaceFromCorners(vpMbtPolygon &polygon) VP_OVERRIDE
virtual void computeVVSInit() VP_OVERRIDE
bool postTracking(const vpImage< unsigned char > &I, vpColVector &w)
virtual void reinit(const vpImage< unsigned char > &I)
virtual void setPose(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cdMo) VP_OVERRIDE
vpColVector m_w_klt
Robust weights.
virtual void initFaceFromLines(vpMbtPolygon &polygon) VP_OVERRIDE
virtual double getNearClippingDistance() const
virtual void setProjectionErrorDisplayArrowLength(unsigned int length)
double m_lambda
Gain of the virtual visual servoing stage.
virtual void setMaxIter(unsigned int max)
bool modelInitialised
double minLineLengthThresholdGeneral
Minimum line length threshold for LOD mode (general setting).
bool m_projectionErrorDisplay
Display gradient and model orientation for projection error computation.
virtual void setOgreShowConfigDialog(bool showConfigDialog)
virtual double getAngleAppear() const
virtual void setMask(const vpImage< bool > &mask)
virtual void getCameraParameters(vpCameraParameters &cam) const
vpImage< unsigned char > m_I
Grayscale image buffer, used when passing color images.
virtual void setDisplayFeatures(bool displayF)
virtual vpHomogeneousMatrix getPose() const
bool useLodGeneral
True if LOD mode is enabled.
double minPolygonAreaThresholdGeneral
Minimum polygon area threshold for LOD mode (general setting).
bool m_computeInteraction
vpMatrix oJo
The Degrees of Freedom to estimate.
vpMatrix covarianceMatrix
Covariance matrix.
double m_initialMu
Initial Mu for Levenberg Marquardt optimization loop.
bool computeProjError
vpHomogeneousMatrix m_cMo
The current pose.
virtual void computeVVSCheckLevenbergMarquardt(unsigned int iter, vpColVector &error, const vpColVector &m_error_prev, const vpHomogeneousMatrix &cMoPrev, double &mu, bool &reStartFromLastIncrement, vpColVector *const w=nullptr, const vpColVector *const m_w_prev=nullptr)
vpCameraParameters m_cam
The camera parameters.
double projectionError
double m_stopCriteriaEpsilon
Epsilon threshold to stop the VVS optimization loop.
bool useOgre
Use Ogre3d for global visibility tests.
virtual double getAngleDisappear() const
virtual void computeVVSWeights(vpRobust &robust, const vpColVector &error, vpColVector &w)
vpMbHiddenFaces< vpMbtPolygon > faces
Set of faces describing the object.
virtual void setCameraParameters(const vpCameraParameters &cam)
virtual void setAngleDisappear(const double &a)
virtual void setInitialMu(double mu)
virtual void setScanLineVisibilityTest(const bool &v)
virtual void setOgreVisibilityTest(const bool &v)
virtual void computeCovarianceMatrixVVS(const bool isoJoIdentity, const vpColVector &w_true, const vpHomogeneousMatrix &cMoPrev, const vpMatrix &L_true, const vpMatrix &LVJ_true, const vpColVector &error)
vpMbtOptimizationMethod m_optimizationMethod
Optimization method used.
virtual void computeVVSPoseEstimation(const bool isoJoIdentity, unsigned int iter, vpMatrix &L, vpMatrix &LTL, vpColVector &R, const vpColVector &error, vpColVector &error_prev, vpColVector &LTR, double &mu, vpColVector &v, const vpColVector *const w=nullptr, vpColVector *const m_w_prev=nullptr)
bool displayFeatures
If true, the features are displayed.
virtual void setProjectionErrorDisplay(bool display)
double angleDisappears
Angle used to detect a face disappearance.
virtual void setLambda(double gain)
virtual void setNearClippingDistance(const double &dist)
bool applyLodSettingInConfig
virtual void setFarClippingDistance(const double &dist)
double distFarClip
Distance for near clipping.
bool m_isoJoIdentity
Boolean to know if oJo is identity (for fast computation).
bool useScanLine
Use Scanline for global visibility tests.
virtual void setProjectionErrorComputation(const bool &flag)
virtual void setClipping(const unsigned int &flags)
double angleAppears
Angle used to detect a face appearance.
virtual void setOptimizationMethod(const vpMbtOptimizationMethod &opt)
virtual void initFromPose(const vpImage< unsigned char > &I, const std::string &initFile)
virtual void setProjectionErrorDisplayArrowThickness(unsigned int thickness)
virtual void setAngleAppear(const double &a)
bool computeCovariance
Flag used to specify if the covariance matrix has to be computed or not.
double distNearClip
Distance for near clipping.
unsigned int m_maxIter
Maximum number of iterations of the virtual visual servoing stage.
bool ogreShowConfigDialog
unsigned int clippingFlag
Flags specifying which clipping to used.
virtual double getFarClippingDistance() const
vpMbHiddenFaces< vpMbtPolygon > m_projectionErrorFaces
Set of faces describing the object, used for projection error.
virtual unsigned int getClipping() const
virtual void loadConfigFile(const std::string &configFile, bool verbose=true)
Implementation of a polygon of the model used by the model-based tracker.
@ TOO_NEAR
Point not tracked anymore, since too near from its neighbor.
Definition vpMeSite.h:100
@ THRESHOLD
Point not tracked due to the likelihood that is below the threshold, but retained in the ME list.
Definition vpMeSite.h:98
@ CONTRAST
Point not tracked due to a contrast problem, but retained in the ME list.
Definition vpMeSite.h:94
@ M_ESTIMATOR
Point detected as an outlier during virtual visual-servoing.
Definition vpMeSite.h:99
@ NO_SUPPRESSION
Point successfully tracked.
Definition vpMeSite.h:93
Definition vpMe.h:143
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:79
Error that can be emitted by the vpTracker class and its derivatives.
@ notEnoughPointError
Not enough point to track.
@ initializationError
Tracker initialization error.
@ fatalError
Tracker fatal error.
vpVelocityTwistMatrix & buildFrom(const vpTranslationVector &t, const vpRotationMatrix &R)