Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
vpMbtFaceDepthDense.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * Manage depth dense features for a particular face.
33 *
34*****************************************************************************/
35
36#include <visp3/core/vpCPUFeatures.h>
37#include <visp3/mbt/vpMbtFaceDepthDense.h>
38
39#ifdef VISP_HAVE_PCL
40#include <pcl/common/point_tests.h>
41#endif
42
43#if defined __SSE2__ || defined _M_X64 || (defined _M_IX86_FP && _M_IX86_FP >= 2)
44#include <emmintrin.h>
45#define VISP_HAVE_SSE2 1
46#endif
47
48// https://stackoverflow.com/a/40765925
49#if !defined(__FMA__) && defined(__AVX2__)
50 #define __FMA__ 1
51#endif
52
53#if defined _WIN32 && defined(_M_ARM64)
54#define _ARM64_DISTINCT_NEON_TYPES
55#include <Intrin.h>
56#include <arm_neon.h>
57#define VISP_HAVE_NEON 1
58#elif (defined(__ARM_NEON__) || defined (__ARM_NEON)) && defined(__aarch64__)
59#include <arm_neon.h>
60#define VISP_HAVE_NEON 1
61#endif
62
63#define USE_SIMD_CODE 1
64
65#if VISP_HAVE_SSE2 && USE_SIMD_CODE
66#define USE_SSE 1
67#else
68#define USE_SSE 0
69#endif
70
71#if VISP_HAVE_NEON && USE_SIMD_CODE
72#define USE_NEON 1
73#else
74#define USE_NEON 0
75#endif
76
77#if (VISP_HAVE_OPENCV_VERSION >= 0x040101 || (VISP_HAVE_OPENCV_VERSION < 0x040000 && VISP_HAVE_OPENCV_VERSION >= 0x030407)) && USE_SIMD_CODE
78
79// See: https://github.com/lagadic/visp/issues/1606
80// 0x040B00 --> (4<<16 | 11<<8 | 0)
81// Only starting from OpenCV 4.11 cv::v_mul is available for all the platforms
82// So if OpenCV >= 4.11 || OpenCV < 4.9 --> use OpenCV HAL API
83// Otherwise, only if between >= 4.9 && < 4.11 and on regular platform (X86 && ARM64) --> use OpenCV HAL API
84#if (VISP_HAVE_OPENCV_VERSION >= 0x040B00) || (VISP_HAVE_OPENCV_VERSION < 0x040900) || \
85 ( (VISP_HAVE_OPENCV_VERSION >= 0x040900) && (VISP_HAVE_OPENCV_VERSION < 0x040B00) && (USE_SSE || USE_NEON) )
86#define USE_OPENCV_HAL 1
87#include <opencv2/core/simd_intrinsics.hpp>
88#include <opencv2/core/hal/intrin.hpp>
89#endif
90
91#endif
92
93#if !USE_OPENCV_HAL && (USE_SSE || USE_NEON)
94#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
95#include <cstdint>
96#endif
97
98namespace
99{
100#if USE_SSE
101inline void v_load_deinterleave(const uint64_t *ptr, __m128i& a, __m128i& b, __m128i& c)
102{
103 __m128i t0 = _mm_loadu_si128((const __m128i*)ptr); // a0, b0
104 __m128i t1 = _mm_loadu_si128((const __m128i*)(ptr + 2)); // c0, a1
105 __m128i t2 = _mm_loadu_si128((const __m128i*)(ptr + 4)); // b1, c1
106
107 t1 = _mm_shuffle_epi32(t1, 0x4e); // a1, c0
108
109 a = _mm_unpacklo_epi64(t0, t1);
110 b = _mm_unpacklo_epi64(_mm_unpackhi_epi64(t0, t0), t2);
111 c = _mm_unpackhi_epi64(t1, t2);
112}
113
114inline void v_load_deinterleave(const double* ptr, __m128d& a0, __m128d& b0, __m128d& c0)
115{
116 __m128i a1, b1, c1;
117 v_load_deinterleave((const uint64_t*)ptr, a1, b1, c1);
118 a0 = _mm_castsi128_pd(a1);
119 b0 = _mm_castsi128_pd(b1);
120 c0 = _mm_castsi128_pd(c1);
121}
122
123inline __m128d v_combine_low(const __m128d& a, const __m128d& b)
124{
125 __m128i a1 = _mm_castpd_si128(a), b1 = _mm_castpd_si128(b);
126 return _mm_castsi128_pd(_mm_unpacklo_epi64(a1, b1));
127}
128
129inline __m128d v_combine_high(const __m128d& a, const __m128d& b)
130{
131 __m128i a1 = _mm_castpd_si128(a), b1 = _mm_castpd_si128(b);
132 return _mm_castsi128_pd(_mm_unpackhi_epi64(a1, b1));
133}
134
135inline __m128d v_fma(const __m128d& a, const __m128d& b, const __m128d& c)
136{
137#if __FMA__
138 return _mm_fmadd_pd(a, b, c);
139#else
140 return _mm_add_pd(_mm_mul_pd(a, b), c);
141#endif
142}
143#else
144inline void v_load_deinterleave(const double* ptr, float64x2_t& a0, float64x2_t& b0, float64x2_t& c0)
145{
146 float64x2x3_t v = vld3q_f64(ptr);
147 a0 = v.val[0];
148 b0 = v.val[1];
149 c0 = v.val[2];
150}
151
152inline float64x2_t v_combine_low(const float64x2_t& a, const float64x2_t& b)
153{
154 return vcombine_f64(vget_low_f64(a), vget_low_f64(b));
155}
156
157inline float64x2_t v_combine_high(const float64x2_t& a, const float64x2_t& b)
158{
159 return vcombine_f64(vget_high_f64(a), vget_high_f64(b));
160}
161
162inline float64x2_t v_fma(const float64x2_t& a, const float64x2_t& b, const float64x2_t& c)
163{
164 return vfmaq_f64(c, a, b);
165}
166#endif
167}
168#endif // !USE_OPENCV_HAL && (USE_SSE || USE_NEON)
169
171 : m_cam(), m_clippingFlag(vpPolygon3D::NO_CLIPPING), m_distFarClip(100), m_distNearClip(0.001), m_hiddenFace(NULL),
172 m_planeObject(), m_polygon(NULL), m_useScanLine(false),
173 m_depthDenseFilteringMethod(DEPTH_OCCUPANCY_RATIO_FILTERING), m_depthDenseFilteringMaxDist(3.0),
174 m_depthDenseFilteringMinDist(0.8), m_depthDenseFilteringOccupancyRatio(0.3), m_isTrackedDepthDenseFace(true),
175 m_isVisible(false), m_listOfFaceLines(), m_planeCamera(), m_pointCloudFace(), m_polygonLines()
176{
177}
178
180{
181 for (size_t i = 0; i < m_listOfFaceLines.size(); i++) {
182 delete m_listOfFaceLines[i];
183 }
184}
185
201 vpUniRand &rand_gen, int polygon, std::string name)
202{
203 // Build a PolygonLine to be able to easily display the lines model
204 PolygonLine polygon_line;
205
206 // Add polygon
207 polygon_line.m_poly.setNbPoint(2);
208 polygon_line.m_poly.addPoint(0, P1);
209 polygon_line.m_poly.addPoint(1, P2);
210
211 polygon_line.m_poly.setClipping(m_clippingFlag);
212 polygon_line.m_poly.setNearClippingDistance(m_distNearClip);
213 polygon_line.m_poly.setFarClippingDistance(m_distFarClip);
214
215 polygon_line.m_p1 = &polygon_line.m_poly.p[0];
216 polygon_line.m_p2 = &polygon_line.m_poly.p[1];
217
218 m_polygonLines.push_back(polygon_line);
219
220 // suppress line already in the model
221 bool already_here = false;
223
224 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
225 ++it) {
226 l = *it;
227 if ((samePoint(*(l->p1), P1) && samePoint(*(l->p2), P2)) || (samePoint(*(l->p1), P2) && samePoint(*(l->p2), P1))) {
228 already_here = true;
229 l->addPolygon(polygon);
230 l->hiddenface = faces;
232 }
233 }
234
235 if (!already_here) {
236 l = new vpMbtDistanceLine;
237
239 l->buildFrom(P1, P2, rand_gen);
240 l->addPolygon(polygon);
241 l->hiddenface = faces;
243
244 l->setIndex((unsigned int)m_listOfFaceLines.size());
245 l->setName(name);
246
249
252
255
256 m_listOfFaceLines.push_back(l);
257 }
258}
259
260#ifdef VISP_HAVE_PCL
262 const pcl::PointCloud<pcl::PointXYZ>::ConstPtr &point_cloud,
263 unsigned int stepX, unsigned int stepY
264#if DEBUG_DISPLAY_DEPTH_DENSE
265 ,
266 vpImage<unsigned char> &debugImage,
267 std::vector<std::vector<vpImagePoint> > &roiPts_vec
268#endif
269 ,
270 const vpImage<bool> *mask)
271{
272 unsigned int width = point_cloud->width, height = point_cloud->height;
273 m_pointCloudFace.clear();
274
275 if (point_cloud->width == 0 || point_cloud->height == 0)
276 return false;
277
278 std::vector<vpImagePoint> roiPts;
279 double distanceToFace;
280 computeROI(cMo, width, height, roiPts
281#if DEBUG_DISPLAY_DEPTH_DENSE
282 ,
283 roiPts_vec
284#endif
285 ,
286 distanceToFace);
287
288 if (roiPts.size() <= 2) {
289#ifndef NDEBUG
290 std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
291#endif
292 return false;
293 }
294
297 return false;
298 }
299
300 vpPolygon polygon_2d(roiPts);
301 vpRect bb = polygon_2d.getBoundingBox();
302
303 unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
304 unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
305 unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
306 unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
307
308 bb.setTop(top);
309 bb.setBottom(bottom);
310 bb.setLeft(left);
311 bb.setRight(right);
312
313 if (bb.getHeight() < 0 || bb.getWidth() < 0) {
314 return false;
315 }
316
317 m_pointCloudFace.reserve((size_t)(bb.getWidth() * bb.getHeight()));
318
319 int totalTheoreticalPoints = 0, totalPoints = 0;
320 for (unsigned int i = top; i < bottom; i += stepY) {
321 for (unsigned int j = left; j < right; j += stepX) {
322 if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
323 j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
324 m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
325 : polygon_2d.isInside(vpImagePoint(i, j)))) {
326 totalTheoreticalPoints++;
327
328 if (vpMeTracker::inMask(mask, i, j) && pcl::isFinite((*point_cloud)(j, i)) && (*point_cloud)(j, i).z > 0) {
329 totalPoints++;
330
331 m_pointCloudFace.push_back((*point_cloud)(j, i).x);
332 m_pointCloudFace.push_back((*point_cloud)(j, i).y);
333 m_pointCloudFace.push_back((*point_cloud)(j, i).z);
334
335#if DEBUG_DISPLAY_DEPTH_DENSE
336 debugImage[i][j] = 255;
337#endif
338 }
339 }
340 }
341 }
342
344 totalPoints / (double)totalTheoreticalPoints < m_depthDenseFilteringOccupancyRatio)) {
345 return false;
346 }
347
348 return true;
349}
350#endif
351
353 unsigned int height, const std::vector<vpColVector> &point_cloud,
354 unsigned int stepX, unsigned int stepY
355#if DEBUG_DISPLAY_DEPTH_DENSE
356 ,
357 vpImage<unsigned char> &debugImage,
358 std::vector<std::vector<vpImagePoint> > &roiPts_vec
359#endif
360 ,
361 const vpImage<bool> *mask)
362{
363 m_pointCloudFace.clear();
364
365 if (width == 0 || height == 0)
366 return 0;
367
368 std::vector<vpImagePoint> roiPts;
369 double distanceToFace;
370 computeROI(cMo, width, height, roiPts
371#if DEBUG_DISPLAY_DEPTH_DENSE
372 ,
373 roiPts_vec
374#endif
375 ,
376 distanceToFace);
377
378 if (roiPts.size() <= 2) {
379#ifndef NDEBUG
380 std::cerr << "Error: roiPts.size() <= 2 in computeDesiredFeatures" << std::endl;
381#endif
382 return false;
383 }
384
387 return false;
388 }
389
390 vpPolygon polygon_2d(roiPts);
391 vpRect bb = polygon_2d.getBoundingBox();
392
393 unsigned int top = (unsigned int)std::max(0.0, bb.getTop());
394 unsigned int bottom = (unsigned int)std::min((double)height, std::max(0.0, bb.getBottom()));
395 unsigned int left = (unsigned int)std::max(0.0, bb.getLeft());
396 unsigned int right = (unsigned int)std::min((double)width, std::max(0.0, bb.getRight()));
397
398 bb.setTop(top);
399 bb.setBottom(bottom);
400 bb.setLeft(left);
401 bb.setRight(right);
402
403 m_pointCloudFace.reserve((size_t)(bb.getWidth() * bb.getHeight()));
404
405 int totalTheoreticalPoints = 0, totalPoints = 0;
406 for (unsigned int i = top; i < bottom; i += stepY) {
407 for (unsigned int j = left; j < right; j += stepX) {
408 if ((m_useScanLine ? (i < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getHeight() &&
409 j < m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs().getWidth() &&
410 m_hiddenFace->getMbScanLineRenderer().getPrimitiveIDs()[i][j] == m_polygon->getIndex())
411 : polygon_2d.isInside(vpImagePoint(i, j)))) {
412 totalTheoreticalPoints++;
413
414 if (vpMeTracker::inMask(mask, i, j) && point_cloud[i * width + j][2] > 0) {
415 totalPoints++;
416
417 m_pointCloudFace.push_back(point_cloud[i * width + j][0]);
418 m_pointCloudFace.push_back(point_cloud[i * width + j][1]);
419 m_pointCloudFace.push_back(point_cloud[i * width + j][2]);
420
421#if DEBUG_DISPLAY_DEPTH_DENSE
422 debugImage[i][j] = 255;
423#endif
424 }
425 }
426 }
427 }
428
430 totalPoints / (double)totalTheoreticalPoints < m_depthDenseFilteringOccupancyRatio)) {
431 return false;
432 }
433
434 return true;
435}
436
438
440{
441 // Compute lines visibility, only for display
442 vpMbtDistanceLine *line;
443 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
444 ++it) {
445 line = *it;
446 bool isvisible = false;
447
448 for (std::list<int>::const_iterator itindex = line->Lindex_polygon.begin(); itindex != line->Lindex_polygon.end();
449 ++itindex) {
450 int index = *itindex;
451 if (index == -1) {
452 isvisible = true;
453 } else {
454 if (line->hiddenface->isVisible((unsigned int)index)) {
455 isvisible = true;
456 }
457 }
458 }
459
460 // Si la ligne n'appartient a aucune face elle est tout le temps visible
461 if (line->Lindex_polygon.empty())
462 isvisible = true; // Not sure that this can occur
463
464 if (isvisible) {
465 line->setVisible(true);
466 } else {
467 line->setVisible(false);
468 }
469 }
470}
471
473 vpColVector &error)
474{
475 if (m_pointCloudFace.empty()) {
476 L.resize(0, 0);
477 error.resize(0);
478 return;
479 }
480
481 L.resize(getNbFeatures(), 6, false, false);
482 error.resize(getNbFeatures(), false);
483
484 // Transform the plane equation for the current pose
487
488 double nx = m_planeCamera.getA();
489 double ny = m_planeCamera.getB();
490 double nz = m_planeCamera.getC();
491 double D = m_planeCamera.getD();
492
494#if USE_OPENCV_HAL
495 useSIMD = true;
496#endif
497#if !USE_SSE && !USE_NEON && !USE_OPENCV_HAL
498 useSIMD = false;
499#endif
500
501 if (useSIMD) {
502#if USE_SSE || USE_NEON|| USE_OPENCV_HAL
503 size_t cpt = 0;
504 if (getNbFeatures() >= 2) {
505 double *ptr_point_cloud = &m_pointCloudFace[0];
506 double *ptr_L = L.data;
507 double *ptr_error = error.data;
508
509#if USE_OPENCV_HAL
510 const cv::v_float64x2 vnx = cv::v_setall_f64(nx);
511 const cv::v_float64x2 vny = cv::v_setall_f64(ny);
512 const cv::v_float64x2 vnz = cv::v_setall_f64(nz);
513 const cv::v_float64x2 vd = cv::v_setall_f64(D);
514#elif USE_SSE
515 const __m128d vnx = _mm_set1_pd(nx);
516 const __m128d vny = _mm_set1_pd(ny);
517 const __m128d vnz = _mm_set1_pd(nz);
518 const __m128d vd = _mm_set1_pd(D);
519#else
520 const float64x2_t vnx = vdupq_n_f64(nx);
521 const float64x2_t vny = vdupq_n_f64(ny);
522 const float64x2_t vnz = vdupq_n_f64(nz);
523 const float64x2_t vd = vdupq_n_f64(D);
524#endif
525
526 for (; cpt <= m_pointCloudFace.size() - 6; cpt += 6, ptr_point_cloud += 6) {
527#if USE_OPENCV_HAL
528 cv::v_float64x2 vx, vy, vz;
529 cv::v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
530
531#if (VISP_HAVE_OPENCV_VERSION >= 0x040900)
532 cv::v_float64x2 va1 = cv::v_sub(cv::v_mul(vnz, vy), cv::v_mul(vny, vz)); // vnz*vy - vny*vz
533 cv::v_float64x2 va2 = cv::v_sub(cv::v_mul(vnx, vz), cv::v_mul(vnz, vx)); // vnx*vz - vnz*vx
534 cv::v_float64x2 va3 = cv::v_sub(cv::v_mul(vny, vx), cv::v_mul(vnx, vy)); // vny*vx - vnx*vy
535#else
536 cv::v_float64x2 va1 = vnz*vy - vny*vz;
537 cv::v_float64x2 va2 = vnx*vz - vnz*vx;
538 cv::v_float64x2 va3 = vny*vx - vnx*vy;
539#endif
540
541 cv::v_float64x2 vnxy = cv::v_combine_low(vnx, vny);
542 cv::v_store(ptr_L, vnxy);
543 ptr_L += 2;
544 vnxy = cv::v_combine_low(vnz, va1);
545 cv::v_store(ptr_L, vnxy);
546 ptr_L += 2;
547 vnxy = cv::v_combine_low(va2, va3);
548 cv::v_store(ptr_L, vnxy);
549 ptr_L += 2;
550
551 vnxy = cv::v_combine_high(vnx, vny);
552 cv::v_store(ptr_L, vnxy);
553 ptr_L += 2;
554 vnxy = cv::v_combine_high(vnz, va1);
555 cv::v_store(ptr_L, vnxy);
556 ptr_L += 2;
557 vnxy = cv::v_combine_high(va2, va3);
558 cv::v_store(ptr_L, vnxy);
559 ptr_L += 2;
560
561#if (VISP_HAVE_OPENCV_VERSION >= 0x040900)
562 cv::v_float64x2 verr = cv::v_add(vd, cv::v_muladd(vnx, vx, cv::v_muladd(vny, vy, cv::v_mul(vnz, vz))));
563#else
564 cv::v_float64x2 verr = vd + cv::v_muladd(vnx, vx, cv::v_muladd(vny, vy, vnz*vz));
565#endif
566
567 cv::v_store(ptr_error, verr);
568 ptr_error += 2;
569#elif USE_SSE
570 __m128d vx, vy, vz;
571 v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
572
573 __m128d va1 = _mm_sub_pd(_mm_mul_pd(vnz, vy), _mm_mul_pd(vny, vz));
574 __m128d va2 = _mm_sub_pd(_mm_mul_pd(vnx, vz), _mm_mul_pd(vnz, vx));
575 __m128d va3 = _mm_sub_pd(_mm_mul_pd(vny, vx), _mm_mul_pd(vnx, vy));
576
577 __m128d vnxy = v_combine_low(vnx, vny);
578 _mm_storeu_pd(ptr_L, vnxy);
579 ptr_L += 2;
580 vnxy = v_combine_low(vnz, va1);
581 _mm_storeu_pd(ptr_L, vnxy);
582 ptr_L += 2;
583 vnxy = v_combine_low(va2, va3);
584 _mm_storeu_pd(ptr_L, vnxy);
585 ptr_L += 2;
586
587 vnxy = v_combine_high(vnx, vny);
588 _mm_storeu_pd(ptr_L, vnxy);
589 ptr_L += 2;
590 vnxy = v_combine_high(vnz, va1);
591 _mm_storeu_pd(ptr_L, vnxy);
592 ptr_L += 2;
593 vnxy = v_combine_high(va2, va3);
594 _mm_storeu_pd(ptr_L, vnxy);
595 ptr_L += 2;
596
597 const __m128d verror = _mm_add_pd(vd, v_fma(vnx, vx, v_fma(vny, vy, _mm_mul_pd(vnz, vz))));
598 _mm_storeu_pd(ptr_error, verror);
599 ptr_error += 2;
600#else
601 float64x2_t vx, vy, vz;
602 v_load_deinterleave(ptr_point_cloud, vx, vy, vz);
603
604 float64x2_t va1 = vsubq_f64(vmulq_f64(vnz, vy), vmulq_f64(vny, vz));
605 float64x2_t va2 = vsubq_f64(vmulq_f64(vnx, vz), vmulq_f64(vnz, vx));
606 float64x2_t va3 = vsubq_f64(vmulq_f64(vny, vx), vmulq_f64(vnx, vy));
607
608 float64x2_t vnxy = v_combine_low(vnx, vny);
609 vst1q_f64(ptr_L, vnxy);
610 ptr_L += 2;
611 vnxy = v_combine_low(vnz, va1);
612 vst1q_f64(ptr_L, vnxy);
613 ptr_L += 2;
614 vnxy = v_combine_low(va2, va3);
615 vst1q_f64(ptr_L, vnxy);
616 ptr_L += 2;
617
618 vnxy = v_combine_high(vnx, vny);
619 vst1q_f64(ptr_L, vnxy);
620 ptr_L += 2;
621 vnxy = v_combine_high(vnz, va1);
622 vst1q_f64(ptr_L, vnxy);
623 ptr_L += 2;
624 vnxy = v_combine_high(va2, va3);
625 vst1q_f64(ptr_L, vnxy);
626 ptr_L += 2;
627
628 const float64x2_t verror = vaddq_f64(vd, v_fma(vnx, vx, v_fma(vny, vy, vmulq_f64(vnz, vz))));
629 vst1q_f64(ptr_error, verror);
630 ptr_error += 2;
631#endif
632 }
633 }
634
635 for (; cpt < m_pointCloudFace.size(); cpt += 3) {
636 double x = m_pointCloudFace[cpt];
637 double y = m_pointCloudFace[cpt + 1];
638 double z = m_pointCloudFace[cpt + 2];
639
640 double _a1 = (nz * y) - (ny * z);
641 double _a2 = (nx * z) - (nz * x);
642 double _a3 = (ny * x) - (nx * y);
643
644 // L
645 L[(unsigned int)(cpt / 3)][0] = nx;
646 L[(unsigned int)(cpt / 3)][1] = ny;
647 L[(unsigned int)(cpt / 3)][2] = nz;
648 L[(unsigned int)(cpt / 3)][3] = _a1;
649 L[(unsigned int)(cpt / 3)][4] = _a2;
650 L[(unsigned int)(cpt / 3)][5] = _a3;
651
652 vpColVector normal(3);
653 normal[0] = nx;
654 normal[1] = ny;
655 normal[2] = nz;
656
657 vpColVector pt(3);
658 pt[0] = x;
659 pt[1] = y;
660 pt[2] = z;
661
662 // Error
663 error[(unsigned int)(cpt / 3)] = D + (normal.t() * pt);
664 }
665#endif
666 } else {
667 vpColVector normal(3);
668 normal[0] = nx;
669 normal[1] = ny;
670 normal[2] = nz;
671 vpColVector pt(3);
672
673 unsigned int idx = 0;
674 for (size_t i = 0; i < m_pointCloudFace.size(); i += 3, idx++) {
675 double x = m_pointCloudFace[i];
676 double y = m_pointCloudFace[i + 1];
677 double z = m_pointCloudFace[i + 2];
678
679 double _a1 = (nz * y) - (ny * z);
680 double _a2 = (nx * z) - (nz * x);
681 double _a3 = (ny * x) - (nx * y);
682
683 // L
684 L[idx][0] = nx;
685 L[idx][1] = ny;
686 L[idx][2] = nz;
687 L[idx][3] = _a1;
688 L[idx][4] = _a2;
689 L[idx][5] = _a3;
690
691 pt[0] = x;
692 pt[1] = y;
693 pt[2] = z;
694 // Error
695 error[idx] = D + (normal.t() * pt);
696 }
697 }
698}
699
700void vpMbtFaceDepthDense::computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height,
701 std::vector<vpImagePoint> &roiPts
702#if DEBUG_DISPLAY_DEPTH_DENSE
703 ,
704 std::vector<std::vector<vpImagePoint> > &roiPts_vec
705#endif
706 ,
707 double &distanceToFace)
708{
709 if (m_useScanLine || m_clippingFlag > 2)
710 m_cam.computeFov(width, height);
711
712 if (m_useScanLine) {
713 for (std::vector<PolygonLine>::iterator it = m_polygonLines.begin(); it != m_polygonLines.end(); ++it) {
714 it->m_p1->changeFrame(cMo);
715 it->m_p2->changeFrame(cMo);
716
717 vpImagePoint ip1, ip2;
718
719 it->m_poly.changeFrame(cMo);
720 it->m_poly.computePolygonClipped(m_cam);
721
722 if (it->m_poly.polyClipped.size() == 2 &&
723 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::NEAR_CLIPPING) == 0) &&
724 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::FAR_CLIPPING) == 0) &&
725 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::DOWN_CLIPPING) == 0) &&
726 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::UP_CLIPPING) == 0) &&
727 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::LEFT_CLIPPING) == 0) &&
728 ((it->m_poly.polyClipped[1].second & it->m_poly.polyClipped[0].second & vpPolygon3D::RIGHT_CLIPPING) == 0)) {
729
730 std::vector<std::pair<vpPoint, vpPoint> > linesLst;
731 m_hiddenFace->computeScanLineQuery(it->m_poly.polyClipped[0].first, it->m_poly.polyClipped[1].first, linesLst,
732 true);
733
734 vpPoint faceCentroid;
735
736 for (unsigned int i = 0; i < linesLst.size(); i++) {
737 linesLst[i].first.project();
738 linesLst[i].second.project();
739
740 vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].first.get_x(), linesLst[i].first.get_y(), ip1);
741 vpMeterPixelConversion::convertPoint(m_cam, linesLst[i].second.get_x(), linesLst[i].second.get_y(), ip2);
742
743 it->m_imPt1 = ip1;
744 it->m_imPt2 = ip2;
745
746 roiPts.push_back(ip1);
747 roiPts.push_back(ip2);
748
749 faceCentroid.set_X(faceCentroid.get_X() + linesLst[i].first.get_X() + linesLst[i].second.get_X());
750 faceCentroid.set_Y(faceCentroid.get_Y() + linesLst[i].first.get_Y() + linesLst[i].second.get_Y());
751 faceCentroid.set_Z(faceCentroid.get_Z() + linesLst[i].first.get_Z() + linesLst[i].second.get_Z());
752
753#if DEBUG_DISPLAY_DEPTH_DENSE
754 std::vector<vpImagePoint> roiPts_;
755 roiPts_.push_back(ip1);
756 roiPts_.push_back(ip2);
757 roiPts_vec.push_back(roiPts_);
758#endif
759 }
760
761 if (linesLst.empty()) {
762 distanceToFace = std::numeric_limits<double>::max();
763 } else {
764 faceCentroid.set_X(faceCentroid.get_X() / (2 * linesLst.size()));
765 faceCentroid.set_Y(faceCentroid.get_Y() / (2 * linesLst.size()));
766 faceCentroid.set_Z(faceCentroid.get_Z() / (2 * linesLst.size()));
767
768 distanceToFace =
769 sqrt(faceCentroid.get_X() * faceCentroid.get_X() + faceCentroid.get_Y() * faceCentroid.get_Y() +
770 faceCentroid.get_Z() * faceCentroid.get_Z());
771 }
772 }
773 }
774 } else {
775 // Get polygon clipped
776 m_polygon->getRoiClipped(m_cam, roiPts, cMo);
777
778 // Get 3D polygon clipped
779 std::vector<vpPoint> polygonsClipped;
780 m_polygon->getPolygonClipped(polygonsClipped);
781
782 if (polygonsClipped.empty()) {
783 distanceToFace = std::numeric_limits<double>::max();
784 } else {
785 vpPoint faceCentroid;
786
787 for (size_t i = 0; i < polygonsClipped.size(); i++) {
788 faceCentroid.set_X(faceCentroid.get_X() + polygonsClipped[i].get_X());
789 faceCentroid.set_Y(faceCentroid.get_Y() + polygonsClipped[i].get_Y());
790 faceCentroid.set_Z(faceCentroid.get_Z() + polygonsClipped[i].get_Z());
791 }
792
793 faceCentroid.set_X(faceCentroid.get_X() / polygonsClipped.size());
794 faceCentroid.set_Y(faceCentroid.get_Y() / polygonsClipped.size());
795 faceCentroid.set_Z(faceCentroid.get_Z() / polygonsClipped.size());
796
797 distanceToFace = sqrt(faceCentroid.get_X() * faceCentroid.get_X() + faceCentroid.get_Y() * faceCentroid.get_Y() +
798 faceCentroid.get_Z() * faceCentroid.get_Z());
799 }
800
801#if DEBUG_DISPLAY_DEPTH_DENSE
802 roiPts_vec.push_back(roiPts);
803#endif
804 }
805}
806
808 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
809 bool displayFullModel)
810{
811 std::vector<std::vector<double> > models =
812 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
813
814 for (size_t i = 0; i < models.size(); i++) {
815 vpImagePoint ip1(models[i][1], models[i][2]);
816 vpImagePoint ip2(models[i][3], models[i][4]);
817 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
818 }
819}
820
822 const vpCameraParameters &cam, const vpColor &col, unsigned int thickness,
823 bool displayFullModel)
824{
825 std::vector<std::vector<double> > models =
826 getModelForDisplay(I.getWidth(), I.getHeight(), cMo, cam, displayFullModel);
827
828 for (size_t i = 0; i < models.size(); i++) {
829 vpImagePoint ip1(models[i][1], models[i][2]);
830 vpImagePoint ip2(models[i][3], models[i][4]);
831 vpDisplay::displayLine(I, ip1, ip2, col, thickness);
832 }
833}
834
836 const vpCameraParameters & /*cam*/, const double /*scale*/,
837 const unsigned int /*thickness*/)
838{
839}
840
842 const vpCameraParameters & /*cam*/, const double /*scale*/,
843 const unsigned int /*thickness*/)
844{
845}
846
858std::vector<std::vector<double> > vpMbtFaceDepthDense::getModelForDisplay(unsigned int width, unsigned int height,
859 const vpHomogeneousMatrix &cMo,
860 const vpCameraParameters &cam,
861 bool displayFullModel)
862{
863 std::vector<std::vector<double> > models;
864
865 if ((m_polygon->isVisible() && m_isTrackedDepthDenseFace) || displayFullModel) {
867
868 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
869 ++it) {
870 vpMbtDistanceLine *line = *it;
871 std::vector<std::vector<double> > lineModels =
872 line->getModelForDisplay(width, height, cMo, cam, displayFullModel);
873 models.insert(models.end(), lineModels.begin(), lineModels.end());
874 }
875 }
876
877 return models;
878}
879
889bool vpMbtFaceDepthDense::samePoint(const vpPoint &P1, const vpPoint &P2) const
890{
891 double dx = fabs(P1.get_oX() - P2.get_oX());
892 double dy = fabs(P1.get_oY() - P2.get_oY());
893 double dz = fabs(P1.get_oZ() - P2.get_oZ());
894
895 if (dx <= std::numeric_limits<double>::epsilon() && dy <= std::numeric_limits<double>::epsilon() &&
896 dz <= std::numeric_limits<double>::epsilon())
897 return true;
898 else
899 return false;
900}
901
903{
904 m_cam = camera;
905
906 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
907 ++it) {
908 (*it)->setCameraParameters(camera);
909 }
910}
911
913{
914 m_useScanLine = v;
915
916 for (std::vector<vpMbtDistanceLine *>::const_iterator it = m_listOfFaceLines.begin(); it != m_listOfFaceLines.end();
917 ++it) {
918 (*it)->useScanLine = v;
919 }
920}
Type * data
Address of the first element of the data array.
Definition vpArray2D.h:144
Generic class defining intrinsic camera parameters.
void computeFov(const unsigned int &w, const unsigned int &h)
Implementation of column vector and the associated operations.
vpRowVector t() const
void resize(unsigned int i, bool flagNullify=true)
Class to define RGB colors available for display functionalities.
Definition vpColor.h:152
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
Implementation of an homogeneous matrix and operations on such kind of matrices.
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
Definition vpImage.h:135
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getHeight() const
Definition vpImage.h:184
Implementation of a matrix and operations on matrices.
Definition vpMatrix.h:152
Implementation of the polygons management for the model-based trackers.
bool isVisible(unsigned int i)
void computeScanLineQuery(const vpPoint &a, const vpPoint &b, std::vector< std::pair< vpPoint, vpPoint > > &lines, const bool &displayResults=false)
vpMbScanLine & getMbScanLineRenderer()
Manage the line of a polygon used in the model-based tracker.
void setIndex(unsigned int i)
vpPoint * p2
The second extremity.
std::list< int > Lindex_polygon
Index of the faces which contain the line.
void buildFrom(vpPoint &_p1, vpPoint &_p2, vpUniRand &rand_gen)
vpMbHiddenFaces< vpMbtPolygon > * hiddenface
Pointer to the list of faces.
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
vpMbtPolygon & getPolygon()
bool useScanLine
Use scanline rendering.
vpPoint * p1
The first extremity.
void setCameraParameters(const vpCameraParameters &camera)
void setName(const std::string &line_name)
void setVisible(bool _isvisible)
void addPolygon(const int &index)
double m_depthDenseFilteringMinDist
Minimum distance threshold.
vpMbHiddenFaces< vpMbtPolygon > * m_hiddenFace
Pointer to the list of faces.
bool m_isVisible
Visibility flag.
double m_distFarClip
Distance for near clipping.
std::vector< double > m_pointCloudFace
List of depth points inside the face.
vpPlane m_planeObject
Plane equation described in the object frame.
void display(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, const vpColor &col, unsigned int thickness=1, bool displayFullModel=false)
void setCameraParameters(const vpCameraParameters &camera)
bool computeDesiredFeatures(const vpHomogeneousMatrix &cMo, const pcl::PointCloud< pcl::PointXYZ >::ConstPtr &point_cloud, unsigned int stepX, unsigned int stepY, const vpImage< bool > *mask=NULL)
std::vector< std::vector< double > > getModelForDisplay(unsigned int width, unsigned int height, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, bool displayFullModel=false)
void computeROI(const vpHomogeneousMatrix &cMo, unsigned int width, unsigned int height, std::vector< vpImagePoint > &roiPts, double &distanceToFace)
unsigned int getNbFeatures() const
bool samePoint(const vpPoint &P1, const vpPoint &P2) const
void computeInteractionMatrixAndResidu(const vpHomogeneousMatrix &cMo, vpMatrix &L, vpColVector &error)
void setScanLineVisibilityTest(bool v)
vpMbtPolygon * m_polygon
Polygon defining the face.
bool m_useScanLine
Scan line visibility.
bool m_isTrackedDepthDenseFace
Flag to define if the face should be tracked or not.
double m_depthDenseFilteringMaxDist
Maximum distance threshold.
std::vector< PolygonLine > m_polygonLines
Polygon lines used for scan-line visibility.
int m_depthDenseFilteringMethod
Method to use to consider or not the face.
unsigned int m_clippingFlag
Flags specifying which clipping to used.
std::vector< vpMbtDistanceLine * > m_listOfFaceLines
vpCameraParameters m_cam
Camera intrinsic parameters.
double m_depthDenseFilteringOccupancyRatio
Ratio between available depth points and theoretical number of points.
double m_distNearClip
Distance for near clipping.
void displayFeature(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double scale=0.05, unsigned int thickness=1)
void addLine(vpPoint &p1, vpPoint &p2, vpMbHiddenFaces< vpMbtPolygon > *const faces, vpUniRand &rand_gen, int polygon=-1, std::string name="")
virtual bool isVisible(const vpHomogeneousMatrix &cMo, double alpha, const bool &modulo=false, const vpCameraParameters &cam=vpCameraParameters(), unsigned int width=0, unsigned int height=0)
int getIndex() const
static bool inMask(const vpImage< bool > *mask, unsigned int i, unsigned int j)
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
void changeFrame(const vpHomogeneousMatrix &cMo)
Definition vpPlane.cpp:361
double getD() const
Definition vpPlane.h:106
double getA() const
Definition vpPlane.h:100
double getC() const
Definition vpPlane.h:104
double getB() const
Definition vpPlane.h:102
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:77
double get_oX() const
Get the point oX coordinate in the object frame.
Definition vpPoint.cpp:458
double get_Y() const
Get the point cY coordinate in the camera frame.
Definition vpPoint.cpp:451
double get_oZ() const
Get the point oZ coordinate in the object frame.
Definition vpPoint.cpp:462
void set_X(double cX)
Set the point cX coordinate in the camera frame.
Definition vpPoint.cpp:490
void set_Y(double cY)
Set the point cY coordinate in the camera frame.
Definition vpPoint.cpp:492
double get_Z() const
Get the point cZ coordinate in the camera frame.
Definition vpPoint.cpp:453
void set_Z(double cZ)
Set the point cZ coordinate in the camera frame.
Definition vpPoint.cpp:494
double get_oY() const
Get the point oY coordinate in the object frame.
Definition vpPoint.cpp:460
double get_X() const
Get the point cX coordinate in the camera frame.
Definition vpPoint.cpp:449
Implements a 3D polygon with render functionalities like clipping.
Definition vpPolygon3D.h:55
void setFarClippingDistance(const double &dist)
void setNearClippingDistance(const double &dist)
vpPoint * p
corners in the object frame
Definition vpPolygon3D.h:76
virtual void setNbPoint(unsigned int nb)
void setClipping(const unsigned int &flags)
void getRoiClipped(const vpCameraParameters &cam, std::vector< vpImagePoint > &roi)
void addPoint(unsigned int n, const vpPoint &P)
void getPolygonClipped(std::vector< std::pair< vpPoint, unsigned int > > &poly)
Defines a generic 2D polygon.
Definition vpPolygon.h:97
vpRect getBoundingBox() const
Definition vpPolygon.h:171
bool isInside(const vpImagePoint &iP, const PointInPolygonMethod &method=PnPolyRayCasting) const
Defines a rectangle in the plane.
Definition vpRect.h:76
double getWidth() const
Definition vpRect.h:224
void setTop(double pos)
Definition vpRect.h:354
double getLeft() const
Definition vpRect.h:170
void setLeft(double pos)
Definition vpRect.h:318
void setRight(double pos)
Definition vpRect.h:345
double getRight() const
Definition vpRect.h:176
double getBottom() const
Definition vpRect.h:94
double getHeight() const
Definition vpRect.h:163
void setBottom(double pos)
Definition vpRect.h:285
double getTop() const
Definition vpRect.h:189
Class for generating random numbers with uniform probability density.
Definition vpUniRand.h:122
VISP_EXPORT bool checkSSE2()
VISP_EXPORT bool checkNeon()