Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
tutorial-mb-generic-tracker-apriltag-webcam.cpp
1
2#include <iostream>
3
4#include <visp3/core/vpConfig.h>
5
7// #undef VISP_HAVE_V4L2
8// #undef HAVE_OPENCV_HIGHGUI
9// #undef HAVE_OPENCV_VIDEOIO
11
13#if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_MODULE_MBT) && \
14 (defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_OPENCV) && \
15 (((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)) || \
16 ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO))))
18
19#include <fstream>
20#include <ios>
21
22#ifdef VISP_HAVE_MODULE_SENSOR
23#include <visp3/sensor/vpV4l2Grabber.h>
24#endif
25#include <visp3/core/vpXmlParserCamera.h>
26#include <visp3/detection/vpDetectorAprilTag.h>
27#include <visp3/gui/vpDisplayFactory.h>
28#include <visp3/mbt/vpMbGenericTracker.h>
29
30#if (VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI)
31#include <opencv2/highgui/highgui.hpp> // for cv::VideoCapture
32#elif (VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO)
33#include <opencv2/videoio/videoio.hpp> // for cv::VideoCapture
34#endif
35
36#ifdef ENABLE_VISP_NAMESPACE
37using namespace VISP_NAMESPACE_NAME;
38#endif
39
40typedef enum { state_detection, state_tracking, state_quit } state_t;
41
42// Creates a cube.cao file in your current directory
43// cubeEdgeSize : size of cube edges in meters
44void createCaoFile(double cubeEdgeSize)
45{
46 std::ofstream fileStream;
47 fileStream.open("cube.cao", std::ofstream::out | std::ofstream::trunc);
48 fileStream << "V1\n";
49 fileStream << "# 3D Points\n";
50 fileStream << "8 # Number of points\n";
51 fileStream << cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << 0 << " # Point 0: (X, Y, Z)\n";
52 fileStream << cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << 0 << " # Point 1\n";
53 fileStream << -cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << 0 << " # Point 2\n";
54 fileStream << -cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << 0 << " # Point 3\n";
55 fileStream << -cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 4\n";
56 fileStream << -cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 5\n";
57 fileStream << cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 6\n";
58 fileStream << cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 7\n";
59 fileStream << "# 3D Lines\n";
60 fileStream << "0 # Number of lines\n";
61 fileStream << "# Faces from 3D lines\n";
62 fileStream << "0 # Number of faces\n";
63 fileStream << "# Faces from 3D points\n";
64 fileStream << "6 # Number of faces\n";
65 fileStream << "4 0 3 2 1 # Face 0: [number of points] [index of the 3D points]...\n";
66 fileStream << "4 1 2 5 6\n";
67 fileStream << "4 4 7 6 5\n";
68 fileStream << "4 0 7 4 3\n";
69 fileStream << "4 5 2 3 4\n";
70 fileStream << "4 0 1 6 7 # Face 5\n";
71 fileStream << "# 3D cylinders\n";
72 fileStream << "0 # Number of cylinders\n";
73 fileStream << "# 3D circles\n";
74 fileStream << "0 # Number of circles\n";
75 fileStream.close();
76}
77
78state_t detectAprilTag(const vpImage<unsigned char> &I, vpDetectorAprilTag &detector, double tagSize,
80{
81 std::vector<vpHomogeneousMatrix> cMo_vec;
82
83 // Detection
84 bool ret = detector.detect(I, tagSize, cam, cMo_vec);
85
86 // Display camera pose
87 for (size_t i = 0; i < cMo_vec.size(); i++) {
88 vpDisplay::displayFrame(I, cMo_vec[i], cam, tagSize / 2, vpColor::none, 3);
89 }
90
91 vpDisplay::displayText(I, 40, 20, "State: waiting tag detection", vpColor::red);
92
93 if (ret && detector.getNbObjects() > 0) { // if tag detected, we pick the first one
94 cMo = cMo_vec[0];
95 return state_tracking;
96 }
97
98 return state_detection;
99}
100
101state_t track(const vpImage<unsigned char> &I, vpMbGenericTracker &tracker, double projection_error_threshold,
103{
105 tracker.getCameraParameters(cam);
106
107 // Track the object
108 try {
109 tracker.track(I);
110 }
111 catch (...) {
112 return state_detection;
113 }
114
115 tracker.getPose(cMo);
116
117 // Detect tracking error
118 double projection_error = tracker.computeCurrentProjectionError(I, cMo, cam);
119 if (projection_error > projection_error_threshold) {
120 return state_detection;
121 }
122
123 // Display
124 tracker.display(I, cMo, cam, vpColor::red, 2);
125 vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, 3);
126 vpDisplay::displayText(I, 40, 20, "State: tracking in progress", vpColor::red);
127 {
128 std::stringstream ss;
129 ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt();
130 vpDisplay::displayText(I, 60, 20, ss.str(), vpColor::red);
131 }
132
133 return state_tracking;
134}
135
136void usage(const char **argv, int error)
137{
138 std::cout << "Synopsis" << std::endl
139 << " " << argv[0]
140 << " [--device <id>]"
141 << " [--tag-size <size>]"
142 << " [--tag-family <family>]"
143 << " [--tag-decision-margin-threshold <threshold>]"
144 << " [--tag-hamming-distance-threshold <threshold>]"
145 << " [--tag-quad-decimate <factor>]"
146 << " [--tag-n-threads <number>]"
147#if defined(VISP_HAVE_PUGIXML)
148 << " [--intrinsic <xmlfile>]"
149 << " [--camera-name <name>]"
150#endif
151#if defined(VISP_HAVE_DISPLAY)
152 << " [--display-off]"
153#endif
154 << " [--cube-size <size]"
155 << " [--use-texture]"
156 << " [--projection-error-threshold <threshold>]"
157 << " [--help, -h]" << std::endl
158 << std::endl;
159 std::cout << "Description" << std::endl
160 << " Live execution on images acquired by a webcam of the generic model-based tracker" << std::endl
161 << " The considered object is a cube to which an Apriltag is attached on one of its" << std::endl
162 << " faces. Once detected, the pose of the Apriltag is used to initialise the tracker." << std::endl
163 << " The Apriltag must be centred on a face of the cube. If the tracker fails, the " << std::endl
164 << " tag is used to reset the tracker." << std::endl
165 << std::endl
166 << " --device <id>" << std::endl
167 << " Camera id." << std::endl
168 << " Default: 0" << std::endl
169 << std::endl
170 << " --tag-size <size>" << std::endl
171 << " Apriltag size in [m]." << std::endl
172 << " Default: 0.03" << std::endl
173 << std::endl
174 << " --tag-family <family>" << std::endl
175 << " Apriltag family. Supported values are:" << std::endl
176 << " 0: TAG_36h11" << std::endl
177 << " 1: TAG_36h10 (DEPRECATED)" << std::endl
178 << " 2: TAG_36ARTOOLKIT (DEPRECATED)" << std::endl
179 << " 3: TAG_25h9" << std::endl
180 << " 4: TAG_25h7 (DEPRECATED)" << std::endl
181 << " 5: TAG_16h5" << std::endl
182 << " 6: TAG_CIRCLE21h7" << std::endl
183 << " 7: TAG_CIRCLE49h12" << std::endl
184 << " 8: TAG_CUSTOM48h12" << std::endl
185 << " 9: TAG_STANDARD41h12" << std::endl
186 << " 10: TAG_STANDARD52h13" << std::endl
187 << " 11: TAG_ARUCO_4x4_50" << std::endl
188 << " 12: TAG_ARUCO_4x4_100" << std::endl
189 << " 13: TAG_ARUCO_4x4_250" << std::endl
190 << " 14: TAG_ARUCO_4x4_1000" << std::endl
191 << " 15: TAG_ARUCO_5x5_50" << std::endl
192 << " 16: TAG_ARUCO_5x5_100" << std::endl
193 << " 17: TAG_ARUCO_5x5_250" << std::endl
194 << " 18: TAG_ARUCO_5x5_1000" << std::endl
195 << " 19: TAG_ARUCO_6x6_50" << std::endl
196 << " 20: TAG_ARUCO_6x6_100" << std::endl
197 << " 21: TAG_ARUCO_6x6_250" << std::endl
198 << " 22: TAG_ARUCO_6x6_1000" << std::endl
199 << " 23: TAG_ARUCO_7x7_50" << std::endl
200 << " 24: TAG_ARUCO_7x7_100" << std::endl
201 << " 25: TAG_ARUCO_7x7_250" << std::endl
202 << " 26: TAG_ARUCO_7x7_1000" << std::endl
203 << " 27: TAG_ARUCO_MIP_36h12" << std::endl
204 << " Default: 0 (36h11)" << std::endl
205 << std::endl
206 << " --tag-decision-margin-threshold <threshold>" << std::endl
207 << " Threshold used to discard low-confident detections. A typical value is " << std::endl
208 << " around 100. The higher this value, the more false positives will be filtered" << std::endl
209 << " out. When this value is set to -1, false positives are not filtered out." << std::endl
210 << " Default: 50" << std::endl
211 << std::endl
212 << " --tag-hamming-distance-threshold <threshold>" << std::endl
213 << " Threshold used to discard low-confident detections with corrected bits." << std::endl
214 << " A typical value is between 0 and 3. The lower this value, the more false" << std::endl
215 << " positives will be filtered out." << std::endl
216 << " Default: 0" << std::endl
217 << std::endl
218 << " --tag-quad-decimate <factor>" << std::endl
219 << " Decimation factor used to detect a tag. " << std::endl
220 << " Default: 1" << std::endl
221 << std::endl
222 << " --tag-n-threads <number>" << std::endl
223 << " Number of threads used to detect a tag." << std::endl
224 << " Default: 1" << std::endl
225 << std::endl
226#if defined(VISP_HAVE_PUGIXML)
227 << " --intrinsic <xmlfile>" << std::endl
228 << " Camera intrinsic parameters file in xml format." << std::endl
229 << " Default: empty" << std::endl
230 << std::endl
231 << " --camera-name <name>" << std::endl
232 << " Camera name in the intrinsic parameters file in xml format." << std::endl
233 << " Default: empty" << std::endl
234 << std::endl
235#endif
236#if defined(VISP_HAVE_DISPLAY)
237 << " --display-off" << std::endl
238 << " Flag used to turn display off." << std::endl
239 << " Default: enabled" << std::endl
240 << std::endl
241#endif
242 << " --cube-size <size>" << std::endl
243 << " Cube size in meter." << std::endl
244 << " Default: 0.125" << std::endl
245 << std::endl
246#if defined(VISP_HAVE_OPENCV)
247 << " --use-texture" << std::endl
248 << " Flag to enable usage of keypoint features." << std::endl
249 << " Default: disabled" << std::endl
250 << std::endl
251#endif
252 << " --projection-error-threshold <threshold>" << std::endl
253 << " Threshold in the range [0:90] deg used to restart the tracker when the projection"
254 << " error is below this threshold." << std::endl
255 << " Default: 40" << std::endl
256 << std::endl
257 << " --help, -h" << std::endl
258 << " Print this helper message." << std::endl
259 << std::endl;
260
261 if (error) {
262 std::cout << "Error" << std::endl
263 << " "
264 << "Unsupported parameter " << argv[error] << std::endl;
265 }
266}
267
268int main(int argc, const char **argv)
269{
270 int opt_device = 0;
272 double opt_tag_size = 0.08;
273 float opt_tag_quad_decimate = 1.0;
274 float opt_tag_decision_margin_threshold = 50;
275 int opt_tag_hamming_distance_threshold = 2;
276 int opt_tag_nthreads = 1;
277 std::string opt_intrinsic_file = "";
278 std::string opt_camera_name = "";
279 double opt_cube_size = 0.125; // 12.5cm by default
280#ifdef VISP_HAVE_OPENCV
281 bool opt_use_texture = false;
282#endif
283 double opt_projection_error_threshold = 40.;
284
285#if !(defined(VISP_HAVE_DISPLAY))
286 bool opt_display_off = true;
287#else
288 bool opt_display_off = false;
289#endif
290
291 for (int i = 1; i < argc; i++) {
292 if (std::string(argv[i]) == "--device" && i + 1 < argc) {
293 opt_device = atoi(argv[++i]);
294 }
295 else if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
296 opt_tag_size = atof(argv[++i]);
297 }
298 else if (std::string(argv[i]) == "--tag-family" && i + 1 < argc) {
299 opt_tag_family = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[i + 1]);
300 }
301 else if (std::string(argv[i]) == "--tag-decision-margin-threshold" && i + 1 < argc) {
302 opt_tag_decision_margin_threshold = static_cast<float>(atof(argv[++i]));
303 }
304 else if (std::string(argv[i]) == "--tag-hamming-distance-threshold" && i + 1 < argc) {
305 opt_tag_hamming_distance_threshold = atoi(argv[++i]);
306 }
307 else if (std::string(argv[i]) == "--tag-quad-decimate" && i + 1 < argc) {
308 opt_tag_quad_decimate = static_cast<float>(atof(argv[++i]));
309 }
310 else if (std::string(argv[i]) == "--tag-n-threads" && i + 1 < argc) {
311 opt_tag_nthreads = atoi(argv[++i]);
312 }
313#if defined(VISP_HAVE_PUGIXML)
314 else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
315 opt_intrinsic_file = std::string(argv[++i]);
316 }
317 else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
318 opt_camera_name = std::string(argv[++i]);
319 }
320#endif
321#if defined(VISP_HAVE_DISPLAY)
322 else if (std::string(argv[i]) == "--display-off") {
323 opt_display_off = true;
324 }
325#endif
326 else if (std::string(argv[i]) == "--cube-size" && i + 1 < argc) {
327 opt_cube_size = atof(argv[++i]);
328 }
329#ifdef VISP_HAVE_OPENCV
330 else if (std::string(argv[i]) == "--use-texture") {
331 opt_use_texture = true;
332 }
333#endif
334 else if (std::string(argv[i]) == "--projection-error-threshold" && i + 1 < argc) {
335 opt_projection_error_threshold = atof(argv[++i]);
336 }
337 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
338 usage(argv, 0);
339 return EXIT_SUCCESS;
340 }
341 else {
342 usage(argv, i);
343 return EXIT_FAILURE;
344 }
345 }
346
347 createCaoFile(opt_cube_size);
348
350 bool camIsInit = false;
351#if defined(VISP_HAVE_PUGIXML)
353 if (!opt_intrinsic_file.empty() && !opt_camera_name.empty()) {
354 parser.parse(cam, opt_intrinsic_file, opt_camera_name, vpCameraParameters::perspectiveProjWithoutDistortion);
355 camIsInit = true;
356 }
357#endif
358
359#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
360 std::shared_ptr<vpDisplay> display;
361#else
362 vpDisplay *display = nullptr;
363#endif
364
365 try {
367
369#if defined(VISP_HAVE_V4L2)
371 std::ostringstream device;
372 device << "/dev/video" << opt_device;
373 std::cout << "Use Video 4 Linux grabber on device " << device.str() << std::endl;
374 g.setDevice(device.str());
375 g.setScale(1);
376 g.open(I);
377#elif ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI))|| ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO))
378 std::cout << "Use OpenCV grabber on device " << opt_device << std::endl;
379 cv::VideoCapture g(opt_device); // Open the default camera
380 if (!g.isOpened()) { // Check if we succeeded
381 std::cout << "Failed to open the camera" << std::endl;
382 return EXIT_FAILURE;
383 }
384 cv::Mat frame;
385 g >> frame; // get a new frame from camera
386 vpImageConvert::convert(frame, I);
387#endif
389 if (!camIsInit) {
390 cam.initPersProjWithoutDistortion(600, 600, I.getWidth() / 2., I.getHeight() / 2.);
391 }
392
393 std::cout << "Cube size: " << opt_cube_size << std::endl;
394 std::cout << "AprilTag size: " << opt_tag_size << std::endl;
395 std::cout << "AprilTag family: " << opt_tag_family << std::endl;
396 std::cout << "Camera parameters:\n" << cam << std::endl;
397 std::cout << "Detection: " << std::endl;
398 std::cout << " Quad decimate: " << opt_tag_quad_decimate << std::endl;
399 std::cout << " Threads number: " << opt_tag_nthreads << std::endl;
400 std::cout << "Tracker: " << std::endl;
401 std::cout << " Use edges : 1" << std::endl;
402 std::cout << " Use texture: "
403#ifdef VISP_HAVE_OPENCV
404 << opt_use_texture << std::endl;
405#else
406 << " na" << std::endl;
407#endif
408 std::cout << " Projection error: " << opt_projection_error_threshold << std::endl;
409
410 // Construct display
411 if (!opt_display_off) {
412#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
414#else
416#endif
417 }
418
419 // Initialize AprilTag detector
420 vpDetectorAprilTag detector(opt_tag_family);
421 detector.setAprilTagQuadDecimate(opt_tag_quad_decimate);
422 detector.setAprilTagNbThreads(opt_tag_nthreads);
423 detector.setAprilTagDecisionMarginThreshold(opt_tag_decision_margin_threshold);
424 detector.setAprilTagHammingDistanceThreshold(opt_tag_hamming_distance_threshold);
425
426 // Prepare MBT
428#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
429 if (opt_use_texture)
431 else
432#endif
434 // edges
435 vpMe me;
436 me.setMaskSize(5);
437 me.setMaskNumber(180);
438 me.setRange(12);
440 me.setThreshold(20);
441 me.setMu1(0.5);
442 me.setMu2(0.5);
443 me.setSampleStep(4);
444 tracker.setMovingEdge(me);
445
446#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
447 if (opt_use_texture) {
448 vpKltOpencv klt_settings;
449 klt_settings.setMaxFeatures(300);
450 klt_settings.setWindowSize(5);
451 klt_settings.setQuality(0.015);
452 klt_settings.setMinDistance(8);
453 klt_settings.setHarrisFreeParameter(0.01);
454 klt_settings.setBlockSize(3);
455 klt_settings.setPyramidLevels(3);
456 tracker.setKltOpencv(klt_settings);
457 tracker.setKltMaskBorder(5);
458 }
459#endif
460
461 // camera calibration params
462 tracker.setCameraParameters(cam);
463 // model definition
464 tracker.loadModel("cube.cao");
465 tracker.setDisplayFeatures(true);
466 tracker.setAngleAppear(vpMath::rad(70));
467 tracker.setAngleDisappear(vpMath::rad(80));
468
470 state_t state = state_detection;
471
472 // wait for a tag detection
473 while (state != state_quit) {
474
475#if defined(VISP_HAVE_V4L2)
476 g.acquire(I);
477#elif ((VISP_HAVE_OPENCV_VERSION < 0x030000) && defined(HAVE_OPENCV_HIGHGUI))|| ((VISP_HAVE_OPENCV_VERSION >= 0x030000) && defined(HAVE_OPENCV_VIDEOIO))
478 g >> frame;
479 vpImageConvert::convert(frame, I);
480#endif
481
483
484 if (state == state_detection) {
485 state = detectAprilTag(I, detector, opt_tag_size, cam, cMo);
486
487 // Initialize the tracker with the result of the detection
488 if (state == state_tracking) {
490 tracker.initFromPose(I, cMo);
492 }
493 }
494
495 if (state == state_tracking) {
496 state = track(I, tracker, opt_projection_error_threshold, cMo);
497 }
498
499 vpDisplay::displayText(I, 20, 20, "Click to quit...", vpColor::red);
500 if (vpDisplay::getClick(I, false)) { // exit
501 state = state_quit;
502 }
503
505 }
506 }
507 catch (const vpException &e) {
508 std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
509 }
510
511#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
512 if (!opt_display_off)
513 delete display;
514#endif
515 return EXIT_SUCCESS;
516}
517
518#else
519
520int main()
521{
522#if !defined(VISP_HAVE_APRILTAG)
523 std::cout << "ViSP is not build with Apriltag support" << std::endl;
524#endif
525#if !(defined(VISP_HAVE_V4L2) || defined(VISP_HAVE_OPENCV))
526 std::cout << "ViSP is not build with v4l2 or OpenCV support" << std::endl;
527#else
528 std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
529#endif
530
531 return EXIT_SUCCESS;
532}
533
534#endif
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
void setAprilTagQuadDecimate(float quadDecimate)
void setAprilTagHammingDistanceThreshold(int hammingDistanceThreshold)
bool detect(const vpImage< unsigned char > &I) VP_OVERRIDE
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
void setAprilTagNbThreads(int nThreads)
void setAprilTagDecisionMarginThreshold(float decisionMarginThreshold)
size_t getNbObjects() const
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition vpImage.h:131
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition vpKltOpencv.h:83
void setBlockSize(int blockSize)
void setQuality(double qualityLevel)
void setHarrisFreeParameter(double harris_k)
void setMaxFeatures(int maxCount)
void setMinDistance(double minDistance)
void setWindowSize(int winSize)
void setPyramidLevels(int pyrMaxLevel)
static double rad(double deg)
Definition vpMath.h:129
Real-time 6D object pose tracking using its CAD model.
Definition vpMe.h:143
void setMu1(const double &mu_1)
Definition vpMe.h:408
void setRange(const unsigned int &range)
Definition vpMe.h:438
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition vpMe.h:531
void setMaskNumber(const unsigned int &mask_number)
Definition vpMe.cpp:555
void setThreshold(const double &threshold)
Definition vpMe.h:489
void setSampleStep(const double &sample_step)
Definition vpMe.h:445
void setMaskSize(const unsigned int &mask_size)
Definition vpMe.cpp:563
void setMu2(const double &mu_2)
Definition vpMe.h:415
@ NORMALIZED_THRESHOLD
Definition vpMe.h:154
Class that is a wrapper over the Video4Linux2 (V4L2) driver.
void open(vpImage< unsigned char > &I)
void setScale(unsigned scale=vpV4l2Grabber::DEFAULT_SCALE)
void setDevice(const std::string &devname)
void acquire(vpImage< unsigned char > &I)
XML parser to load and save intrinsic camera parameters.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.