Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
vpMbtFaceDepthDense.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * Manage depth dense features for a particular face.
32 */
33
34#include <visp3/core/vpCPUFeatures.h>
35#include <visp3/mbt/vpMbtFaceDepthDense.h>
36
37#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
38#include <pcl/common/point_tests.h>
39#endif
40
41#if defined __SSE2__ || defined _M_X64 || (defined _M_IX86_FP && _M_IX86_FP >= 2)
42#include <emmintrin.h>
43#define VISP_HAVE_SSE2 1
44#endif
45
46// https://stackoverflow.com/a/40765925
47#if !defined(__FMA__) && defined(__AVX2__)
48#define __FMA__ 1
49#endif
50
51#if defined _WIN32 && defined(_M_ARM64)
52#define _ARM64_DISTINCT_NEON_TYPES
53#include <Intrin.h>
54#include <arm_neon.h>
55#define VISP_HAVE_NEON 1
56#elif (defined(__ARM_NEON__) || defined (__ARM_NEON)) && defined(__aarch64__)
57#include <arm_neon.h>
58#define VISP_HAVE_NEON 1
59#else
60#define VISP_HAVE_NEON 0
61#endif
62
63#define USE_SIMD_CODE 1
64
65#if VISP_HAVE_SSE2 && USE_SIMD_CODE
66#define USE_SSE 1
67#else
68#define USE_SSE 0
69#endif
70
71#if VISP_HAVE_NEON && USE_SIMD_CODE
72#define USE_NEON 1
73#else
74#define USE_NEON 0
75#endif
76
77#if defined(VISP_HAVE_OPENCV) && \
78 (VISP_HAVE_OPENCV_VERSION >= 0x040101 || (VISP_HAVE_OPENCV_VERSION < 0x040000 && VISP_HAVE_OPENCV_VERSION >= 0x030407)) && USE_SIMD_CODE
79
80// See: https://github.com/lagadic/visp/issues/1606
81// 0x040B00 --> (4<<16 | 11<<8 | 0)
82// Only starting from OpenCV 4.11 cv::v_mul is available for all the platforms
83// So if OpenCV >= 4.11 || OpenCV < 4.9 --> use OpenCV HAL API
84// Otherwise, only if between >= 4.9 && < 4.11 and on regular platform (X86 && ARM64) --> use OpenCV HAL API
85#if (VISP_HAVE_OPENCV_VERSION >= 0x040B00) || (VISP_HAVE_OPENCV_VERSION < 0x040900) || \
86 ( (VISP_HAVE_OPENCV_VERSION >= 0x040900) && (VISP_HAVE_OPENCV_VERSION < 0x040B00) && (USE_SSE || USE_NEON) )
87# define USE_OPENCV_HAL 1
88# include <opencv2/core/simd_intrinsics.hpp>
89# include <opencv2/core/hal/intrin.hpp>
90# else
91# define USE_OPENCV_HAL 0
92# endif
93#else
94# define USE_OPENCV_HAL 0
95#endif
96
97#if !USE_OPENCV_HAL && (USE_SSE || USE_NEON)
98#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
99#include <cstdint>
100#endif
101
102namespace
103{
104#if USE_SSE
105inline void v_load_deinterleave(const uint64_t *ptr, __m128i &a, __m128i &b, __m128i &c)
106{
107 __m128i t0 = _mm_loadu_si128((const __m128i *)ptr); // a0, b0
108 __m128i t1 = _mm_loadu_si128((const __m128i *)(ptr + 2)); // c0, a1
109 __m128i t2 = _mm_loadu_si128((const __m128i *)(ptr + 4)); // b1, c1
110
111 t1 = _mm_shuffle_epi32(t1, 0x4e); // a1, c0
112
113 a = _mm_unpacklo_epi64(t0, t1);
114 b = _mm_unpacklo_epi64(_mm_unpackhi_epi64(t0, t0), t2);
115 c = _mm_unpackhi_epi64(t1, t2);
116}
117
118inline void v_load_deinterleave(const double *ptr, __m128d &a0, __m128d &b0, __m128d &c0)
119{
120 __m128i a1, b1, c1;
121 v_load_deinterleave((const uint64_t *)ptr, a1, b1, c1);
122 a0 = _mm_castsi128_pd(a1);
123 b0 = _mm_castsi128_pd(b1);
124 c0 = _mm_castsi128_pd(c1);
125}
126
127inline __m128d v_combine_low(const __m128d &a, const __m128d &b)
128{
129 __m128i a1 = _mm_castpd_si128(a), b1 = _mm_castpd_si128(b);
130 return _mm_castsi128_pd(_mm_unpacklo_epi64(a1, b1));
131}
132
133inline __m128d v_combine_high(const __m128d &a, const __m128d &b)
134{
135 __m128i a1 = _mm_castpd_si128(a), b1 = _mm_castpd_si128(b);
136 return _mm_castsi128_pd(_mm_unpackhi_epi64(a1, b1));
137}
138
139inline __m128d v_fma(const __m128d &a, const __m128d &b, const __m128d &c)
140{
141#if __FMA__
142 return _mm_fmadd_pd(a, b, c);
143#else
144 return _mm_add_pd(_mm_mul_pd(a, b), c);
145#endif
146}
147#else
148inline void v_load_deinterleave(const double *ptr, float64x2_t &a0, float64x2_t &b0, float64x2_t &c0)
149{
150 float64x2x3_t v = vld3q_f64(ptr);
151 a0 = v.val[0];
152 b0 = v.val[1];
153 c0 = v.val[2];
154}
155
156inline float64x2_t v_combine_low(const float64x2_t &a, const float64x2_t &b)
157{
158 return vcombine_f64(vget_low_f64(a), vget_low_f64(b));
159}
160
161inline float64x2_t v_combine_high(const float64x2_t &a, const float64x2_t &b)
162{
163 return vcombine_f64(vget_high_f64(a), vget_high_f64(b));
164}
165
166inline float64x2_t v_fma(const float64x2_t &a, const float64x2_t &b, const float64x2_t &c)
167{
168 return vfmaq_f64(c, a, b);
169}
170#endif
171}
172#endif // !USE_OPENCV_HAL && (USE_SSE || USE_NEON)
173
175
186
192{
193 *this = mbt_face;
194}
195
222
227{
228 for (size_t i = 0; i < m_listOfFaceLines.size(); i++) {
229 delete m_listOfFaceLines[i];
230 }
231}
232
248 vpUniRand &rand_gen, int polygon, std::string name)
249{
250 // Build a PolygonLine to be able to easily display the lines model
251 PolygonLine polygon_line;
252
253 // Add polygon
254 polygon_line.m_poly.setNbPoint(2);
255 polygon_line.m_poly.addPoint(0, P1);
256 polygon_line.m_poly.addPoint(1, P2);
257
258 polygon_line.m_poly.setClipping(m_clippingFlag);
259 polygon_line.m_poly.setNearClippingDistance(m_distNearClip);
260 polygon_line.m_poly.setFarClippingDistance(m_distFarClip);
261
262 polygon_line.m_p1 = &polygon_line.m_poly.p[0];
263 polygon_line.m_p2 = &polygon_line.m_poly.p[1];
264
265 m_polygonLines.push_back(polygon_line);
266
267 // suppress line already in the model
268 bool already_here = false;
270
271 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
272 ++it) {
273 l = *it;
274 if ((samePoint(*(l->p1), P1) && samePoint(*(l->p2), P2)) || (samePoint(*(l->p1), P2) && samePoint(*(l->p2), P1))) {
275 already_here = true;
276 l->addPolygon(polygon);
277 l->hiddenface = faces;
279 }
280 }
281
282 if (!already_here) {
283 l = new vpMbtDistanceLine;
284
286 l->buildFrom(P1, P2, rand_gen);
287 l->addPolygon(polygon);
288 l->hiddenface = faces;
290
291 l->setIndex(static_cast<unsigned int>(m_listOfFaceLines.size()));
292 l->setName(name);
293
296
299
302
303 m_listOfFaceLines.push_back(l);
304 }
305}
306
307#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
309 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
310 unsigned int stepX, unsigned int stepY
311#if DEBUG_DISPLAY_DEPTH_DENSE
312 ,
313 vpImage<unsigned char> &debugImage,
314 std::vector<std::vector<vpImagePoint> > &roiPts_vec
315#endif
316 ,
317 const vpImage<bool> *mask)
318{
319 unsigned int width = point_cloud->width, height = point_cloud->height;
320 m_pointCloudFace.clear();
321
322 if (point_cloud->width == 0 || point_cloud->height == 0)
323 return false;
324
325 std::vector<vpImagePoint> roiPts;
326 double distanceToFace;
327 computeROI(cMo, width, height, roiPts
328#if DEBUG_DISPLAY_DEPTH_DENSE
329 ,
330 roiPts_vec
331#endif
332 ,
333 distanceToFace);
334
335 if (roiPts.size() <= 2) {
336#ifndef NDEBUG
337 std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
338#endif
339 return false;
340 }
341
344 return false;
345 }
346
347 vpPolygon polygon_2d(roiPts);
348 vpRect bb = polygon_2d.getBoundingBox();
349
350 unsigned int top = static_cast<unsigned int>(std::max<double>(0.0, bb.getTop()));
351 unsigned int bottom = static_cast<unsigned int>(std::min<double>(static_cast<double>(height), std::max<double>(0.0, bb.getBottom())));
352 unsigned int left = static_cast<unsigned int>(std::max<double>(0.0, bb.getLeft()));
353 unsigned int right = static_cast<unsigned int>(std::min<double>(static_cast<double>(width), std::max<double>(0.0, bb.getRight())));
354
355 bb.setTop(top);
356 bb.setBottom(bottom);
357 bb.setLeft(left);
358 bb.setRight(right);
359
360 if (bb.getHeight() < 0 || bb.getWidth() < 0) {
361 return false;
362 }
363
364 m_pointCloudFace.reserve(static_cast<size_t>(bb.getWidth() * bb.getHeight()));
365
366 int totalTheoreticalPoints = 0, totalPoints = 0;
367 for (unsigned int i = top; i < bottom; i += stepY) {
368 for (unsigned int j = left; j < right; j += stepX) {
369 if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
370 j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
371 m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
372 : polygon_2d.isInside(vpImagePoint(i, j)))) {
373 totalTheoreticalPoints++;
374
375 if (vpMeTracker::inRoiMask(mask, i, j) && pcl::isFinite((*point_cloud)(j, i)) && (*point_cloud)(j, i).z > 0) {
376 totalPoints++;
377
378 m_pointCloudFace.push_back((*point_cloud)(j, i).x);
379 m_pointCloudFace.push_back((*point_cloud)(j, i).y);
380 m_pointCloudFace.push_back((*point_cloud)(j, i).z);
381
382#if DEBUG_DISPLAY_DEPTH_DENSE
383 debugImage[i][j] = 255;
384#endif
385 }
386 }
387 }
388 }
389
391 totalPoints / static_cast<double>(totalTheoreticalPoints) < m_depthDenseFilteringOccupancyRatio)) {
392 return false;
393 }
394
395 return true;
396}
397#endif
398
400 unsigned int height, const std::vector<vpColVector> &point_cloud,
401 unsigned int stepX, unsigned int stepY
402#if DEBUG_DISPLAY_DEPTH_DENSE
403 ,
404 vpImage<unsigned char> &debugImage,
405 std::vector<std::vector<vpImagePoint> > &roiPts_vec
406#endif
407 ,
408 const vpImage<bool> *mask)
409{
410 m_pointCloudFace.clear();
411
412 if (width == 0 || height == 0)
413 return 0;
414
415 std::vector<vpImagePoint> roiPts;
416 double distanceToFace;
417 computeROI(cMo, width, height, roiPts
418#if DEBUG_DISPLAY_DEPTH_DENSE
419 ,
420 roiPts_vec
421#endif
422 ,
423 distanceToFace);
424
425 if (roiPts.size() <= 2) {
426#ifndef NDEBUG
427 std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
428#endif
429 return false;
430 }
431
434 return false;
435 }
436
437 vpPolygon polygon_2d(roiPts);
438 vpRect bb = polygon_2d.getBoundingBox();
439
440 unsigned int top = static_cast<unsigned int>(std::max<double>(0.0, bb.getTop()));
441 unsigned int bottom = static_cast<unsigned int>(std::min<double>(static_cast<double>(height), std::max<double>(0.0, bb.getBottom())));
442 unsigned int left = static_cast<unsigned int>(std::max<double>(0.0, bb.getLeft()));
443 unsigned int right = static_cast<unsigned int>(std::min<double>(static_cast<double>(width), std::max<double>(0.0, bb.getRight())));
444
445 bb.setTop(top);
446 bb.setBottom(bottom);
447 bb.setLeft(left);
448 bb.setRight(right);
449
450 m_pointCloudFace.reserve(static_cast<size_t>(bb.getWidth() * bb.getHeight()));
451
452 int totalTheoreticalPoints = 0, totalPoints = 0;
453 for (unsigned int i = top; i < bottom; i += stepY) {
454 for (unsigned int j = left; j < right; j += stepX) {
455 if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
456 j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
457 m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
458 : polygon_2d.isInside(vpImagePoint(i, j)))) {
459 totalTheoreticalPoints++;
460
461 if (vpMeTracker::inRoiMask(mask, i, j) && point_cloud[i * width + j][2] > 0) {
462 totalPoints++;
463
464 m_pointCloudFace.push_back(point_cloud[i * width + j][0]);
465 m_pointCloudFace.push_back(point_cloud[i * width + j][1]);
466 m_pointCloudFace.push_back(point_cloud[i * width + j][2]);
467
468#if DEBUG_DISPLAY_DEPTH_DENSE
469 debugImage[i][j] = 255;
470#endif
471 }
472 }
473 }
474 }
475
477 totalPoints / static_cast<double>(totalTheoreticalPoints) < m_depthDenseFilteringOccupancyRatio)) {
478 return false;
479 }
480
481 return true;
482}
483
485 unsigned int height, const vpMatrix &point_cloud,
486 unsigned int stepX, unsigned int stepY
487#if DEBUG_DISPLAY_DEPTH_DENSE
488 ,
489 vpImage<unsigned char> &debugImage,
490 std::vector<std::vector<vpImagePoint> > &roiPts_vec
491#endif
492 ,
493 const vpImage<bool> *mask)
494{
495 m_pointCloudFace.clear();
496
497 if (width == 0 || height == 0)
498 return 0;
499
500 std::vector<vpImagePoint> roiPts;
501 double distanceToFace;
502 computeROI(cMo, width, height, roiPts
503#if DEBUG_DISPLAY_DEPTH_DENSE
504 ,
505 roiPts_vec
506#endif
507 ,
508 distanceToFace);
509
510 if (roiPts.size() <= 2) {
511#ifndef NDEBUG
512 std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
513#endif
514 return false;
515 }
516
519 return false;
520 }
521
522 vpPolygon polygon_2d(roiPts);
523 vpRect bb = polygon_2d.getBoundingBox();
524
525 unsigned int top = static_cast<unsigned int>(std::max<double>(0.0, bb.getTop()));
526 unsigned int bottom = static_cast<unsigned int>(std::min<double>(static_cast<double>(height), std::max<double>(0.0, bb.getBottom())));
527 unsigned int left = static_cast<unsigned int>(std::max<double>(0.0, bb.getLeft()));
528 unsigned int right = static_cast<unsigned int>(std::min<double>(static_cast<double>(width), std::max<double>(0.0, bb.getRight())));
529
530 bb.setTop(top);
531 bb.setBottom(bottom);
532 bb.setLeft(left);
533 bb.setRight(right);
534
535 m_pointCloudFace.reserve(static_cast<size_t>(bb.getWidth() * bb.getHeight()));
536
537 int totalTheoreticalPoints = 0, totalPoints = 0;
538 for (unsigned int i = top; i < bottom; i += stepY) {
539 for (unsigned int j = left; j < right; j += stepX) {
540 if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
541 j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
542 m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
543 : polygon_2d.isInside(vpImagePoint(i, j)))) {
544 totalTheoreticalPoints++;
545
546 if (vpMeTracker::inRoiMask(mask, i, j) && point_cloud[i * width + j][2] > 0) {
547 totalPoints++;
548
549 m_pointCloudFace.push_back(point_cloud[i * width + j][0]);
550 m_pointCloudFace.push_back(point_cloud[i * width + j][1]);
551 m_pointCloudFace.push_back(point_cloud[i * width + j][2]);
552
553#if DEBUG_DISPLAY_DEPTH_DENSE
554 debugImage[i][j] = 255;
555#endif
556 }
557 }
558 }
559 }
560
562 totalPoints / static_cast<double>(totalTheoreticalPoints) < m_depthDenseFilteringOccupancyRatio)) {
563 return false;
564 }
565
566 return true;
567}
568
570
572{
573 // Compute lines visibility, only for display
574 vpMbtDistanceLine *line;
575 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
576 ++it) {
577 line = *it;
578 bool isvisible = false;
579
580 for (std::list<int>::const_iterator itindex = line->Lindex_polygon.begin(); itindex != line->Lindex_polygon.end();
581 ++itindex) {
582 int index = *itindex;
583 if (index == -1) {
584 isvisible = true;
585 }
586 else {
587 if (line->hiddenface->isVisible(static_cast<unsigned int>(index))) {
588 isvisible = true;
589 }
590 }
591 }
592
593 // Si la ligne n'appartient a aucune face elle est tout le temps visible
594 if (line->Lindex_polygon.empty())
595 isvisible = true; // Not sure that this can occur
596
597 if (isvisible) {
598 line->setVisible(true);
599 }
600 else {
601 line->setVisible(false);
602 }
603 }
604}
605
607 vpColVector &error)
608{
609 if (m_pointCloudFace.empty()) {
610 L.resize(0, 0);
611 error.resize(0);
612 return;
613 }
614
615 L.resize(getNbFeatures(), 6, false, false);
616 error.resize(getNbFeatures(), false);
617
618 // Transform the plane equation for the current pose
620 m_planeCamera.changeFrame(cMo);
621
622 double nx = m_planeCamera.getA();
623 double ny = m_planeCamera.getB();
624 double nz = m_planeCamera.getC();
625 double D = m_planeCamera.getD();
626
627#if defined(VISP_HAVE_SIMDLIB)
629#else
630 bool useSIMD = vpCPUFeatures::checkSSE2();
631#endif
632#if USE_OPENCV_HAL
633 useSIMD = true;
634#endif
635#if !USE_SSE && !USE_NEON && !USE_OPENCV_HAL
636 useSIMD = false;
637#endif
638
639 if (useSIMD) {
640#if USE_SSE || USE_NEON || USE_OPENCV_HAL
641 size_t cpt = 0;
642 if (getNbFeatures() >= 2) {
643 double *ptr_point_cloud = &m_pointCloudFace[0];
644 double *ptr_L = L.data;
645 double *ptr_error = error.data;
646
647#if USE_OPENCV_HAL
648 const cv::v_float64x2 vnx = cv::v_setall_f64(nx);
649 const cv::v_float64x2 vny = cv::v_setall_f64(ny);
650 const cv::v_float64x2 vnz = cv::v_setall_f64(nz);
651 const cv::v_float64x2 vd = cv::v_setall_f64(D);
652#elif USE_SSE
653 const __m128d vnx = _mm_set1_pd(nx);
654 const __m128d vny = _mm_set1_pd(ny);
655 const __m128d vnz = _mm_set1_pd(nz);
656 const __m128d vd = _mm_set1_pd(D);
657#else
658 const float64x2_t vnx = vdupq_n_f64(nx);
659 const float64x2_t vny = vdupq_n_f64(ny);
660 const float64x2_t vnz = vdupq_n_f64(nz);
661 const float64x2_t vd = vdupq_n_f64(D);
662#endif
663
664 for (; cpt <= m_pointCloudFace.size() - 6; cpt += 6, ptr_point_cloud += 6) {
665#if USE_OPENCV_HAL
666 cv::v_float64x2 vx, vy, vz;
667 cv::v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
668
669#if defined(VISP_HAVE_OPENCV) && (VISP_HAVE_OPENCV_VERSION >= 0x040900)
670 cv::v_float64x2 va1 = cv::v_sub(cv::v_mul(vnz, vy), cv::v_mul(vny, vz)); // vnz*vy - vny*vz
671 cv::v_float64x2 va2 = cv::v_sub(cv::v_mul(vnx, vz), cv::v_mul(vnz, vx)); // vnx*vz - vnz*vx
672 cv::v_float64x2 va3 = cv::v_sub(cv::v_mul(vny, vx), cv::v_mul(vnx, vy)); // vny*vx - vnx*vy
673#elif defined(VISP_HAVE_OPENCV)
674 cv::v_float64x2 va1 = vnz*vy - vny*vz;
675 cv::v_float64x2 va2 = vnx*vz - vnz*vx;
676 cv::v_float64x2 va3 = vny*vx - vnx*vy;
677#endif
678
679 cv::v_float64x2 vnxy = cv::v_combine_low(vnx, vny);
680 cv::v_store(ptr_L, vnxy);
681 ptr_L += 2;
682 vnxy = cv::v_combine_low(vnz, va1);
683 cv::v_store(ptr_L, vnxy);
684 ptr_L += 2;
685 vnxy = cv::v_combine_low(va2, va3);
686 cv::v_store(ptr_L, vnxy);
687 ptr_L += 2;
688
689 vnxy = cv::v_combine_high(vnx, vny);
690 cv::v_store(ptr_L, vnxy);
691 ptr_L += 2;
692 vnxy = cv::v_combine_high(vnz, va1);
693 cv::v_store(ptr_L, vnxy);
694 ptr_L += 2;
695 vnxy = cv::v_combine_high(va2, va3);
696 cv::v_store(ptr_L, vnxy);
697 ptr_L += 2;
698
699#if (VISP_HAVE_OPENCV_VERSION >= 0x040900)
700 cv::v_float64x2 verr = cv::v_add(vd, cv::v_muladd(vnx, vx, cv::v_muladd(vny, vy, cv::v_mul(vnz, vz))));
701#else
702 cv::v_float64x2 verr = vd + cv::v_muladd(vnx, vx, cv::v_muladd(vny, vy, vnz*vz));
703#endif
704
705 cv::v_store(ptr_error, verr);
706 ptr_error += 2;
707#elif USE_SSE
708 __m128d vx, vy, vz;
709 v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
710
711 __m128d va1 = _mm_sub_pd(_mm_mul_pd(vnz, vy), _mm_mul_pd(vny, vz));
712 __m128d va2 = _mm_sub_pd(_mm_mul_pd(vnx, vz), _mm_mul_pd(vnz, vx));
713 __m128d va3 = _mm_sub_pd(_mm_mul_pd(vny, vx), _mm_mul_pd(vnx, vy));
714
715 __m128d vnxy = v_combine_low(vnx, vny);
716 _mm_storeu_pd(ptr_L, vnxy);
717 ptr_L += 2;
718 vnxy = v_combine_low(vnz, va1);
719 _mm_storeu_pd(ptr_L, vnxy);
720 ptr_L += 2;
721 vnxy = v_combine_low(va2, va3);
722 _mm_storeu_pd(ptr_L, vnxy);
723 ptr_L += 2;
724
725 vnxy = v_combine_high(vnx, vny);
726 _mm_storeu_pd(ptr_L, vnxy);
727 ptr_L += 2;
728 vnxy = v_combine_high(vnz, va1);
729 _mm_storeu_pd(ptr_L, vnxy);
730 ptr_L += 2;
731 vnxy = v_combine_high(va2, va3);
732 _mm_storeu_pd(ptr_L, vnxy);
733 ptr_L += 2;
734
735 const __m128d verror = _mm_add_pd(vd, v_fma(vnx, vx, v_fma(vny, vy, _mm_mul_pd(vnz, vz))));
736 _mm_storeu_pd(ptr_error, verror);
737 ptr_error += 2;
738#else
739 float64x2_t vx, vy, vz;
740 v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
741
742 float64x2_t va1 = vsubq_f64(vmulq_f64(vnz, vy), vmulq_f64(vny, vz));
743 float64x2_t va2 = vsubq_f64(vmulq_f64(vnx, vz), vmulq_f64(vnz, vx));
744 float64x2_t va3 = vsubq_f64(vmulq_f64(vny, vx), vmulq_f64(vnx, vy));
745
746 float64x2_t vnxy = v_combine_low(vnx, vny);
747 vst1q_f64(ptr_L, vnxy);
748 ptr_L += 2;
749 vnxy = v_combine_low(vnz, va1);
750 vst1q_f64(ptr_L, vnxy);
751 ptr_L += 2;
752 vnxy = v_combine_low(va2, va3);
753 vst1q_f64(ptr_L, vnxy);
754 ptr_L += 2;
755
756 vnxy = v_combine_high(vnx, vny);
757 vst1q_f64(ptr_L, vnxy);
758 ptr_L += 2;
759 vnxy = v_combine_high(vnz, va1);
760 vst1q_f64(ptr_L, vnxy);
761 ptr_L += 2;
762 vnxy = v_combine_high(va2, va3);
763 vst1q_f64(ptr_L, vnxy);
764 ptr_L += 2;
765
766 const float64x2_t verror = vaddq_f64(vd, v_fma(vnx, vx, v_fma(vny, vy, vmulq_f64(vnz, vz))));
767 vst1q_f64(ptr_error, verror);
768 ptr_error += 2;
769#endif
770 }
771 }
772
773 for (; cpt < m_pointCloudFace.size(); cpt += 3) {
774 double x = m_pointCloudFace[cpt];
775 double y = m_pointCloudFace[cpt + 1];
776 double z = m_pointCloudFace[cpt + 2];
777
778 double _a1 = (nz * y) - (ny * z);
779 double _a2 = (nx * z) - (nz * x);
780 double _a3 = (ny * x) - (nx * y);
781
782 // L
783 L[static_cast<unsigned int>(cpt / 3)][0] = nx;
784 L[static_cast<unsigned int>(cpt / 3)][1] = ny;
785 L[static_cast<unsigned int>(cpt / 3)][2] = nz;
786 L[static_cast<unsigned int>(cpt / 3)][3] = _a1;
787 L[static_cast<unsigned int>(cpt / 3)][4] = _a2;
788 L[static_cast<unsigned int>(cpt / 3)][5] = _a3;
789
790 vpColVector normal(3);
791 normal[0] = nx;
792 normal[1] = ny;
793 normal[2] = nz;
794
795 vpColVector pt(3);
796 pt[0] = x;
797 pt[1] = y;
798 pt[2] = z;
799
800 // Error
801 error[static_cast<unsigned int>(cpt / 3)] = D + (normal.t() * pt);
802 }
803#endif
804 }
805 else {
806 vpColVector normal(3);
807 normal[0] = nx;
808 normal[1] = ny;
809 normal[2] = nz;
810 vpColVector pt(3);
811
812 unsigned int idx = 0;
813 for (size_t i = 0; i < m_pointCloudFace.size(); i += 3, idx++) {
814 double x = m_pointCloudFace[i];
815 double y = m_pointCloudFace[i + 1];
816 double z = m_pointCloudFace[i + 2];
817
818 double _a1 = (nz * y) - (ny * z);
819 double _a2 = (nx * z) - (nz * x);
820 double _a3 = (ny * x) - (nx * y);
821
822 // L
823 L[idx][0] = nx;
824 L[idx][1] = ny;
825 L[idx][2] = nz;
826 L[idx][3] = _a1;
827 L[idx][4] = _a2;
828 L[idx][5] = _a3;
829
830 pt[0] = x;
831 pt[1] = y;
832 pt[2] = z;
833 // Error
834 error[idx] = D + (normal.t() * pt);
835 }
836 }
837}
838
839void vpMbtFaceDepthDense::computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height,
840 std::vector<vpImagePoint> &roiPts
841#if DEBUG_DISPLAY_DEPTH_DENSE
842 ,
843 std::vector<std::vector<vpImagePoint> > &roiPts_vec
844#endif
845 ,
846 double &distanceToFace)
847{
848 if (m_useScanLine || m_clippingFlag > 2)
849 m_cam.computeFov(width, height);
850
851 if (m_useScanLine) {
852 for (std::vector<PolygonLine>::iterator it = m_polygonLines.begin(); it != m_polygonLines.end(); ++it) {
853 it->m_p1->changeFrame(cMo);
854 it->m_p2->changeFrame(cMo);
855
856 vpImagePoint ip1, ip2;
857
858 it->m_poly.changeFrame(cMo);
859 it->m_poly.computePolygonClipped(m_cam);
860
861 if (it->m_poly.polyClipped.size() == 2 &&
862 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::NEAR_CLIPPING) == 0) &&
863 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::FAR_CLIPPING) == 0) &&
864 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::DOWN_CLIPPING) == 0) &&
865 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::UP_CLIPPING) == 0) &&
866 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::LEFT_CLIPPING) == 0) &&
867 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::RIGHT_CLIPPING) == 0)) {
868
869 std::vector<std::pair<vpPoint, vpPoint> > linesLst;
870 m_hiddenFace->computeScanLineQuery(it->m_poly.polyClipped[0].first, it->m_poly.polyClipped[1].first, linesLst,
871 true);
872
873 vpPoint faceCentroid;
874
875 for (unsigned int i = 0; i < linesLst.size(); i++) {
876 linesLst[i].first.project();
877 linesLst[i].second.project();
878
879 vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].first.get_x(), linesLst[i].first.get_y(), ip1);
880 vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].second.get_x(), linesLst[i].second.get_y(), ip2);
881
882 it->m_imPt1 = ip1;
883 it->m_imPt2 = ip2;
884
885 roiPts.push_back(ip1);
886 roiPts.push_back(ip2);
887
888 faceCentroid.set_X(faceCentroid.get_X() + linesLst[i].first.get_X() + linesLst[i].second.get_X());
889 faceCentroid.set_Y(faceCentroid.get_Y() + linesLst[i].first.get_Y() + linesLst[i].second.get_Y());
890 faceCentroid.set_Z(faceCentroid.get_Z() + linesLst[i].first.get_Z() + linesLst[i].second.get_Z());
891
892#if DEBUG_DISPLAY_DEPTH_DENSE
893 std::vector<vpImagePoint> roiPts_;
894 roiPts_.push_back(ip1);
895 roiPts_.push_back(ip2);
896 roiPts_vec.push_back(roiPts_);
897#endif
898 }
899
900 if (linesLst.empty()) {
901 distanceToFace = std::numeric_limits<double>::max();
902 }
903 else {
904 faceCentroid.set_X(faceCentroid.get_X() / (2 * linesLst.size()));
905 faceCentroid.set_Y(faceCentroid.get_Y() / (2 * linesLst.size()));
906 faceCentroid.set_Z(faceCentroid.get_Z() / (2 * linesLst.size()));
907
908 distanceToFace =
909 sqrt(faceCentroid.get_X() * faceCentroid.get_X() + faceCentroid.get_Y() * faceCentroid.get_Y() +
910 faceCentroid.get_Z() * faceCentroid.get_Z());
911 }
912 }
913 }
914 }
915 else {
916 // Get polygon clipped
917 m_polygon->getRoiClipped(m_cam, roiPts, cMo);
918
919 // Get 3D polygon clipped
920 std::vector<vpPoint> polygonsClipped;
921 m_polygon->getPolygonClipped(polygonsClipped);
922
923 if (polygonsClipped.empty()) {
924 distanceToFace = std::numeric_limits<double>::max();
925 }
926 else {
927 vpPoint faceCentroid;
928
929 for (size_t i = 0; i < polygonsClipped.size(); i++) {
930 faceCentroid.set_X(faceCentroid.get_X() + polygonsClipped[i].get_X());
931 faceCentroid.set_Y(faceCentroid.get_Y() + polygonsClipped[i].get_Y());
932 faceCentroid.set_Z(faceCentroid.get_Z() + polygonsClipped[i].get_Z());
933 }
934
935 faceCentroid.set_X(faceCentroid.get_X() / polygonsClipped.size());
936 faceCentroid.set_Y(faceCentroid.get_Y() / polygonsClipped.size());
937 faceCentroid.set_Z(faceCentroid.get_Z() / polygonsClipped.size());
938
939 distanceToFace = sqrt(faceCentroid.get_X() * faceCentroid.get_X() + faceCentroid.get_Y() * faceCentroid.get_Y() +
940 faceCentroid.get_Z() * faceCentroid.get_Z());
941 }
942
943#if DEBUG_DISPLAY_DEPTH_DENSE
944 roiPts_vec.push_back(roiPts);
945#endif
946 }
947}
948
950 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
951 bool displayFullModel)
952{
953 std::vector<std::vector<double> > models =
954 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
955
956 for (size_t i = 0; i < models.size(); i++) {
957 vpImagePoint ip1(models[i][1], models[i][2]);
958 vpImagePoint ip2(models[i][3], models[i][4]);
959 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
960 }
961}
962
964 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
965 bool displayFullModel)
966{
967 std::vector<std::vector<double> > models =
968 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
969
970 for (size_t i = 0; i < models.size(); i++) {
971 vpImagePoint ip1(models[i][1], models[i][2]);
972 vpImagePoint ip2(models[i][3], models[i][4]);
973 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
974 }
975}
976
978 const vpCameraParameters & /*cam*/, const double /*scale*/,
979 const unsigned int /*thickness*/)
980{ }
981
983 const vpCameraParameters & /*cam*/, const double /*scale*/,
984 const unsigned int /*thickness*/)
985{ }
986
998std::vector<std::vector<double> > vpMbtFaceDepthDense::getModelForDisplay(unsigned int width, unsigned int height,
999 const vpHomogeneousMatrix &cMo,
1000 const vpCameraParameters &cam,
1001 bool displayFullModel)
1002{
1003 std::vector<std::vector<double> > models;
1004
1005 if ((m_polygon->isVisible() && m_isTrackedDepthDenseFace) || displayFullModel) {
1007
1008 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1009 ++it) {
1010 vpMbtDistanceLine *line = *it;
1011 std::vector<std::vector<double> > lineModels =
1012 line->getModelForDisplay(width, height, cMo, cam, displayFullModel);
1013 models.insert(models.end(), lineModels.begin(), lineModels.end());
1014 }
1015 }
1016
1017 return models;
1018}
1019
1029bool vpMbtFaceDepthDense::samePoint(const vpPoint &P1, const vpPoint &P2) const
1030{
1031 double dx = fabs(P1.get_oX() - P2.get_oX());
1032 double dy = fabs(P1.get_oY() - P2.get_oY());
1033 double dz = fabs(P1.get_oZ() - P2.get_oZ());
1034
1035 if (dx <= std::numeric_limits<double>::epsilon() && dy <= std::numeric_limits<double>::epsilon() &&
1036 dz <= std::numeric_limits<double>::epsilon())
1037 return true;
1038 else
1039 return false;
1040}
1041
1043{
1044 m_cam = camera;
1045
1046 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1047 ++it) {
1048 (*it)->setCameraParameters(camera);
1049 }
1050}
1051
1053{
1054 m_useScanLine = v;
1055
1056 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
1057 ++it) {
1058 (*it)->useScanLine = v;
1059 }
1060}
1061END_VISP_NAMESPACE
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
vpRowVector t() const
Class to define RGB colors available for display functionalities.
Definition vpColor.h:157
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:131
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:175
Implementation of the polygons management for the model-based trackers.
bool isVisible(unsigned int i)
Manage the line of a polygon used in the model-based tracker.
void setIndex(unsigned int i)
vpPoint * p2
The second extremity.
std::list< int > Lindex_polygon
Index of the faces which contain the line.
void buildFrom(vpPoint &_p1, vpPoint &_p2, vpUniRand &rand_gen)
vpMbHiddenFaces< vpMbtPolygon > * hiddenface
Pointer to the list of faces.
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
vpMbtPolygon & getPolygon()
bool useScanLine
Use scanline rendering.
vpPoint * p1
The first extremity.
void setCameraParameters(const vpCameraParameters &camera)
void setName(const std::string &line_name)
void setVisible(bool _isvisible)
void addPolygon(const int &index)
double m_depthDenseFilteringMinDist
Minimum distance threshold.
vpMbHiddenFaces< vpMbtPolygon > * m_hiddenFace
Pointer to the list of faces.
bool m_isVisible
Visibility flag.
double m_distFarClip
Distance for near clipping.
std::vector< double > m_pointCloudFace
List of depth points inside the face.
vpPlane m_planeObject
Plane equation described in the object frame.
void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
void setCameraParameters(const vpCameraParameters &camera)
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, std::vector< vpImagePoint > &roiPts, double &distanceToFace)
unsigned int getNbFeatures() const
bool samePoint(const vpPoint &P1, const vpPoint &P2) const
void computeInteractionMatrixAndResidu(const vpHomogeneousMatrix &cMo, vpMatrix &L, vpColVector &error)
vpMbtPolygon * m_polygon
Polygon defining the face.
bool m_useScanLine
Scan line visibility.
bool m_isTrackedDepthDenseFace
Flag to define if the face should be tracked or not.
double m_depthDenseFilteringMaxDist
Maximum distance threshold.
vpMbtFaceDepthDense & operator=(const vpMbtFaceDepthDense &mbt_face)
bool computeDesiredFeatures(const vpHomogeneousMatrix &cMo, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, unsigned int stepX, unsigned int stepY, const vpImage< bool > *mask=nullptr)
std::vector< PolygonLine > m_polygonLines
Polygon lines used for scan-line visibility.
int m_depthDenseFilteringMethod
Method to use to consider or not the face.
unsigned int m_clippingFlag
Flags specifying which clipping to used.
std::vector< vpMbtDistanceLine * > m_listOfFaceLines
vpCameraParameters m_cam
Camera intrinsic parameters.
double m_depthDenseFilteringOccupancyRatio
Ratio between available depth points and theoretical number of points.
double m_distNearClip
Distance for near clipping.
void displayFeature(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05, unsigned int thickness=1)
void addLine(vpPoint &p1, vpPoint &p2, vpMbHiddenFaces< vpMbtPolygon > *const faces, vpUniRand &rand_gen, int polygon=-1, std::string name="")
static bool inRoiMask(const vpImage< bool > *mask, unsigned int i, unsigned int j)
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:79
double get_oX() const
Get the point oX coordinate in the object frame.
Definition vpPoint.cpp:418
double get_Y() const
Get the point cY coordinate in the camera frame.
Definition vpPoint.cpp:411
double get_oZ() const
Get the point oZ coordinate in the object frame.
Definition vpPoint.cpp:422
void set_X(double cX)
Set the point cX coordinate in the camera frame.
Definition vpPoint.cpp:453
void set_Y(double cY)
Set the point cY coordinate in the camera frame.
Definition vpPoint.cpp:455
double get_Z() const
Get the point cZ coordinate in the camera frame.
Definition vpPoint.cpp:413
void set_Z(double cZ)
Set the point cZ coordinate in the camera frame.
Definition vpPoint.cpp:457
double get_oY() const
Get the point oY coordinate in the object frame.
Definition vpPoint.cpp:420
double get_X() const
Get the point cX coordinate in the camera frame.
Definition vpPoint.cpp:409
Implements a 3D polygon with render functionalities like clipping.
Definition vpPolygon3D.h:57
void setFarClippingDistance(const double &dist)
void setNearClippingDistance(const double &dist)
vpPoint * p
corners in the object frame
Definition vpPolygon3D.h:79
virtual void setNbPoint(unsigned int nb)
void setClipping(const unsigned int &flags)
void addPoint(unsigned int n, const vpPoint &P)
Defines a generic 2D polygon.
Definition vpPolygon.h:103
vpRect getBoundingBox() const
Definition vpPolygon.h:164
bool isInside(const vpImagePoint &iP, const PointInPolygonMethod &method=PnPolyRayCasting) const
Defines a rectangle in the plane.
Definition vpRect.h:79
Class for generating random numbers with uniform probability density.
Definition vpUniRand.h:127
VISP_EXPORT bool checkSSE2()
VISP_EXPORT bool checkNeon()