Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
tutorial-mb-generic-tracker-apriltag-rs2.cpp
1
2
3#include <iostream>
4
5#include <visp3/core/vpConfig.h>
6
8#if defined(VISP_HAVE_APRILTAG) && defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_MODULE_MBT)
10
11#include <fstream>
12#include <ios>
13
14#include <visp3/detection/vpDetectorAprilTag.h>
15#include <visp3/gui/vpDisplayFactory.h>
16#include <visp3/mbt/vpMbGenericTracker.h>
17#include <visp3/sensor/vpRealSense2.h>
18
19#ifdef ENABLE_VISP_NAMESPACE
20using namespace VISP_NAMESPACE_NAME;
21#endif
22
23typedef enum { state_detection, state_tracking, state_quit } state_t;
24
25// Creates a cube.cao file in your current directory
26// cubeEdgeSize : size of cube edges in meters
27void createCaoFile(double cubeEdgeSize)
28{
29 std::ofstream fileStream;
30 fileStream.open("cube.cao", std::ofstream::out | std::ofstream::trunc);
31 fileStream << "V1\n";
32 fileStream << "# 3D Points\n";
33 fileStream << "8 # Number of points\n";
34 fileStream << cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << 0 << " # Point 0: (X, Y, Z)\n";
35 fileStream << cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << 0 << " # Point 1\n";
36 fileStream << -cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << 0 << " # Point 2\n";
37 fileStream << -cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << 0 << " # Point 3\n";
38 fileStream << -cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 4\n";
39 fileStream << -cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 5\n";
40 fileStream << cubeEdgeSize / 2 << " " << -cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 6\n";
41 fileStream << cubeEdgeSize / 2 << " " << cubeEdgeSize / 2 << " " << -cubeEdgeSize << " # Point 7\n";
42 fileStream << "# 3D Lines\n";
43 fileStream << "0 # Number of lines\n";
44 fileStream << "# Faces from 3D lines\n";
45 fileStream << "0 # Number of faces\n";
46 fileStream << "# Faces from 3D points\n";
47 fileStream << "6 # Number of faces\n";
48 fileStream << "4 0 3 2 1 # Face 0: [number of points] [index of the 3D points]...\n";
49 fileStream << "4 1 2 5 6\n";
50 fileStream << "4 4 7 6 5\n";
51 fileStream << "4 0 7 4 3\n";
52 fileStream << "4 5 2 3 4\n";
53 fileStream << "4 0 1 6 7 # Face 5\n";
54 fileStream << "# 3D cylinders\n";
55 fileStream << "0 # Number of cylinders\n";
56 fileStream << "# 3D circles\n";
57 fileStream << "0 # Number of circles\n";
58 fileStream.close();
59}
60
61state_t detectAprilTag(const vpImage<unsigned char> &I, vpDetectorAprilTag &detector, double tagSize,
63{
64 std::vector<vpHomogeneousMatrix> cMo_vec;
65
66 // Detection
67 bool ret = detector.detect(I, tagSize, cam, cMo_vec);
68
69 // Display camera pose
70 for (size_t i = 0; i < cMo_vec.size(); i++) {
71 vpDisplay::displayFrame(I, cMo_vec[i], cam, tagSize / 2, vpColor::none, 3);
72 }
73
74 vpDisplay::displayText(I, 40, 20, "State: waiting tag detection", vpColor::red);
75
76 if (ret && detector.getNbObjects() > 0) { // if tag detected, we pick the first one
77 cMo = cMo_vec[0];
78 return state_tracking;
79 }
80
81 return state_detection;
82}
83
84state_t track(const vpImage<unsigned char> &I, vpMbGenericTracker &tracker, double projection_error_threshold,
86{
88 tracker.getCameraParameters(cam);
89
90 // Track the object
91 try {
92 tracker.track(I);
93 }
94 catch (...) {
95 return state_detection;
96 }
97
98 tracker.getPose(cMo);
99
100 // Detect tracking error
101 double projection_error = tracker.computeCurrentProjectionError(I, cMo, cam);
102 if (projection_error > projection_error_threshold) {
103 return state_detection;
104 }
105
106 // Display
107 tracker.display(I, cMo, cam, vpColor::red, 2);
108 vpDisplay::displayFrame(I, cMo, cam, 0.025, vpColor::none, 3);
109 vpDisplay::displayText(I, 40, 20, "State: tracking in progress", vpColor::red);
110 {
111 std::stringstream ss;
112 ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt();
113 vpDisplay::displayText(I, 60, 20, ss.str(), vpColor::red);
114 }
115
116 return state_tracking;
117}
118
119state_t track(std::map<std::string, const vpImage<unsigned char> *> mapOfImages,
120#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
121 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> mapOfPointclouds,
122#else
123 std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds,
124 std::map<std::string, unsigned int> mapOfWidths, std::map<std::string, unsigned int> mapOfHeights,
125#endif
126 const vpImage<unsigned char> &I_gray, const vpImage<unsigned char> &I_depth,
127 const vpHomogeneousMatrix &depth_M_color, vpMbGenericTracker &tracker, double projection_error_threshold,
129{
131 tracker.getCameraParameters(cam_color, cam_depth);
132
133 // Track the object
134 try {
135#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
136 tracker.track(mapOfImages, mapOfPointclouds);
137#else
138 tracker.track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights);
139#endif
140 }
141 catch (...) {
142 return state_detection;
143 }
144
145 tracker.getPose(cMo);
146
147 // Detect tracking error
148 double projection_error = tracker.computeCurrentProjectionError(I_gray, cMo, cam_color);
149 if (projection_error > projection_error_threshold) {
150 return state_detection;
151 }
152
153 // Display
154 tracker.display(I_gray, I_depth, cMo, depth_M_color * cMo, cam_color, cam_depth, vpColor::red, 3);
155 vpDisplay::displayFrame(I_gray, cMo, cam_color, 0.05, vpColor::none, 3);
156 vpDisplay::displayFrame(I_depth, depth_M_color * cMo, cam_depth, 0.05, vpColor::none, 3);
157
158 return state_tracking;
159}
160
161void usage(const char **argv, int error)
162{
163 std::cout << "Synopsis" << std::endl
164 << " " << argv[0]
165 << " [--tag-size <size>]"
166 << " [--tag-family <family>]"
167 << " [--tag-decision-margin-threshold <threshold>]"
168 << " [--tag-hamming-distance-threshold <threshold>]"
169 << " [--tag-quad-decimate <factor>]"
170 << " [--tag-n-threads <number>]"
171#if defined(VISP_HAVE_DISPLAY)
172 << " [--display-off]"
173#endif
174 << " [--cube-size <size]"
175 << " [--use-texture]"
176 << " [--use-depth]"
177 << " [--projection-error-threshold <threshold>]"
178 << " [--help, -h]" << std::endl
179 << std::endl;
180 std::cout << "Description" << std::endl
181 << " Live execution on images acquired by a realsense camera of the generic model-based" << std::endl
182 << " tracker. The considered object is a cube to which an Apriltag is attached on one of its" << std::endl
183 << " faces. Once detected, the pose of the Apriltag is used to initialise the tracker." << std::endl
184 << " The Apriltag must be centred on a face of the cube. If the tracker fails, the " << std::endl
185 << " tag is used to reset the tracker." << std::endl
186 << std::endl
187 << " --tag-size <size>" << std::endl
188 << " Apriltag size in [m]." << std::endl
189 << " Default: 0.03" << std::endl
190 << std::endl
191 << " --tag-family <family>" << std::endl
192 << " Apriltag family. Supported values are:" << std::endl
193 << " 0: TAG_36h11" << std::endl
194 << " 1: TAG_36h10 (DEPRECATED)" << std::endl
195 << " 2: TAG_36ARTOOLKIT (DEPRECATED)" << std::endl
196 << " 3: TAG_25h9" << std::endl
197 << " 4: TAG_25h7 (DEPRECATED)" << std::endl
198 << " 5: TAG_16h5" << std::endl
199 << " 6: TAG_CIRCLE21h7" << std::endl
200 << " 7: TAG_CIRCLE49h12" << std::endl
201 << " 8: TAG_CUSTOM48h12" << std::endl
202 << " 9: TAG_STANDARD41h12" << std::endl
203 << " 10: TAG_STANDARD52h13" << std::endl
204 << " 11: TAG_ARUCO_4x4_50" << std::endl
205 << " 12: TAG_ARUCO_4x4_100" << std::endl
206 << " 13: TAG_ARUCO_4x4_250" << std::endl
207 << " 14: TAG_ARUCO_4x4_1000" << std::endl
208 << " 15: TAG_ARUCO_5x5_50" << std::endl
209 << " 16: TAG_ARUCO_5x5_100" << std::endl
210 << " 17: TAG_ARUCO_5x5_250" << std::endl
211 << " 18: TAG_ARUCO_5x5_1000" << std::endl
212 << " 19: TAG_ARUCO_6x6_50" << std::endl
213 << " 20: TAG_ARUCO_6x6_100" << std::endl
214 << " 21: TAG_ARUCO_6x6_250" << std::endl
215 << " 22: TAG_ARUCO_6x6_1000" << std::endl
216 << " 23: TAG_ARUCO_7x7_50" << std::endl
217 << " 24: TAG_ARUCO_7x7_100" << std::endl
218 << " 25: TAG_ARUCO_7x7_250" << std::endl
219 << " 26: TAG_ARUCO_7x7_1000" << std::endl
220 << " 27: TAG_ARUCO_MIP_36h12" << std::endl
221 << " Default: 0 (36h11)" << std::endl
222 << std::endl
223 << " --tag-decision-margin-threshold <threshold>" << std::endl
224 << " Threshold used to discard low-confident detections. A typical value is " << std::endl
225 << " around 100. The higher this value, the more false positives will be filtered" << std::endl
226 << " out. When this value is set to -1, false positives are not filtered out." << std::endl
227 << " Default: 50" << std::endl
228 << std::endl
229 << " --tag-hamming-distance-threshold <threshold>" << std::endl
230 << " Threshold used to discard low-confident detections with corrected bits." << std::endl
231 << " A typical value is between 0 and 3. The lower this value, the more false" << std::endl
232 << " positives will be filtered out." << std::endl
233 << " Default: 0" << std::endl
234 << std::endl
235 << " --tag-quad-decimate <factor>" << std::endl
236 << " Decimation factor used to detect a tag. " << std::endl
237 << " Default: 1" << std::endl
238 << std::endl
239 << " --tag-n-threads <number>" << std::endl
240 << " Number of threads used to detect a tag." << std::endl
241 << " Default: 1" << std::endl
242 << std::endl
243#if defined(VISP_HAVE_DISPLAY)
244 << " --display-off" << std::endl
245 << " Flag used to turn display off." << std::endl
246 << " Default: enabled" << std::endl
247 << std::endl
248#endif
249 << " --cube-size <size>" << std::endl
250 << " Cube size in meter." << std::endl
251 << " Default: 0.125" << std::endl
252 << std::endl
253#if defined(VISP_HAVE_OPENCV)
254 << " --use-texture" << std::endl
255 << " Flag to enable usage of keypoint features." << std::endl
256 << " Default: disabled" << std::endl
257 << std::endl
258#endif
259 << " --use-depth" << std::endl
260 << " Flag to enable usage of depth map as features." << std::endl
261 << " Default: disabled" << std::endl
262 << std::endl
263 << " --projection-error-threshold <threshold>" << std::endl
264 << " Threshold in the range [0:90] deg used to restart the tracker when the projection"
265 << " error is below this threshold." << std::endl
266 << " Default: 40" << std::endl
267 << std::endl
268 << " --help, -h" << std::endl
269 << " Print this helper message." << std::endl
270 << std::endl;
271
272 if (error) {
273 std::cout << "Error" << std::endl
274 << " "
275 << "Unsupported parameter " << argv[error] << std::endl;
276 }
277}
278
279int main(int argc, const char **argv)
280{
282 double opt_tag_size = 0.08;
283 float opt_tag_quad_decimate = 1.0;
284 float opt_tag_decision_margin_threshold = 50;
285 int opt_tag_hamming_distance_threshold = 2;
286 int opt_tag_nthreads = 1;
287 double opt_cube_size = 0.125; // 12.5cm by default
288#ifdef VISP_HAVE_OPENCV
289 bool opt_use_texture = false;
290#endif
291 bool opt_use_depth = false;
292 double opt_projection_error_threshold = 40.;
293
294#if !(defined(VISP_HAVE_DISPLAY))
295 bool opt_display_off = true;
296#else
297 bool opt_display_off = false;
298#endif
299
300 for (int i = 1; i < argc; i++) {
301 if (std::string(argv[i]) == "--tag-size" && i + 1 < argc) {
302 opt_tag_size = atof(argv[++i]);
303 }
304 else if (std::string(argv[i]) == "--tag-family" && i + 1 < argc) {
305 opt_tag_family = (vpDetectorAprilTag::vpAprilTagFamily)atoi(argv[++i]);
306 }
307 else if (std::string(argv[i]) == "--tag-decision-margin-threshold" && i + 1 < argc) {
308 opt_tag_decision_margin_threshold = static_cast<float>(atof(argv[++i]));
309 }
310 else if (std::string(argv[i]) == "--tag-hamming-distance-threshold" && i + 1 < argc) {
311 opt_tag_hamming_distance_threshold = atoi(argv[++i]);
312 }
313 else if (std::string(argv[i]) == "--tag-quad-decimate" && i + 1 < argc) {
314 opt_tag_quad_decimate = static_cast<float>(atof(argv[++i]));
315 }
316 else if (std::string(argv[i]) == "--tag-n-threads" && i + 1 < argc) {
317 opt_tag_nthreads = atoi(argv[++i]);
318 }
319#if defined(VISP_HAVE_DISPLAY)
320 else if (std::string(argv[i]) == "--display-off") {
321 opt_display_off = true;
322 }
323#endif
324 else if (std::string(argv[i]) == "--cube-size" && i + 1 < argc) {
325 opt_cube_size = atof(argv[++i]);
326 }
327#ifdef VISP_HAVE_OPENCV
328 else if (std::string(argv[i]) == "--use-texture") {
329 opt_use_texture = true;
330 }
331#endif
332 else if (std::string(argv[i]) == "--use-depth") {
333 opt_use_depth = true;
334 }
335 else if (std::string(argv[i]) == "--projection-error-threshold" && i + 1 < argc) {
336 opt_projection_error_threshold = atof(argv[++i]);
337 }
338
339 else if (std::string(argv[i]) == "--help" || std::string(argv[i]) == "-h") {
340 usage(argv, 0);
341 return EXIT_SUCCESS;
342 }
343 else {
344 usage(argv, i);
345 return EXIT_FAILURE;
346 }
347 }
348
349 createCaoFile(opt_cube_size);
350
351#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
352 std::shared_ptr<vpDisplay> d_gray;
353 std::shared_ptr<vpDisplay> d_depth;
354#else
355 vpDisplay *d_gray = nullptr;
356 vpDisplay *d_depth = nullptr;
357#endif
358
359 try {
361 vpRealSense2 realsense;
362 rs2::config config;
363 int width = 640, height = 480, stream_fps = 30;
364 config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, stream_fps);
365 config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, stream_fps);
366 config.disable_stream(RS2_STREAM_INFRARED);
367 realsense.open(config);
368
372 if (opt_use_depth) {
374 depth_M_color = realsense.getTransformation(RS2_STREAM_COLOR, RS2_STREAM_DEPTH);
375 }
376
377 vpImage<vpRGBa> I_color(height, width);
378 vpImage<unsigned char> I_gray(height, width);
379 vpImage<unsigned char> I_depth(height, width);
380 vpImage<uint16_t> I_depth_raw(height, width);
381 std::map<std::string, vpHomogeneousMatrix> mapOfCameraTransformations;
382 std::map<std::string, const vpImage<unsigned char> *> mapOfImages;
383#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
384 std::map<std::string, pcl::PointCloud<pcl::PointXYZ>::ConstPtr> mapOfPointclouds;
385 pcl::PointCloud<pcl::PointXYZ>::Ptr pointcloud(new pcl::PointCloud<pcl::PointXYZ>());
386#else
387 std::map<std::string, const std::vector<vpColVector> *> mapOfPointclouds;
388 std::map<std::string, unsigned int> mapOfWidths, mapOfHeights;
389 std::vector<vpColVector> pointcloud;
390#endif
391
392 std::map<std::string, vpHomogeneousMatrix> mapOfCameraPoses;
393
394 std::cout << "Cube size : " << opt_cube_size << std::endl;
395 std::cout << "AprilTag size : " << opt_tag_size << std::endl;
396 std::cout << "AprilTag family : " << opt_tag_family << std::endl;
397 std::cout << "Camera parameters" << std::endl;
398 std::cout << " Color :\n" << cam_color << std::endl;
399 if (opt_use_depth) {
400 std::cout << " Depth :\n" << cam_depth << std::endl;
401 }
402 std::cout << "Detection " << std::endl;
403 std::cout << " Quad decimate : " << opt_tag_quad_decimate << std::endl;
404 std::cout << " Threads number : " << opt_tag_nthreads << std::endl;
405 std::cout << " Decision margin : " << opt_tag_decision_margin_threshold << " (applied to ArUco tags only)" << std::endl;
406 std::cout << "Tracker " << std::endl;
407 std::cout << " Use edges : 1" << std::endl;
408 std::cout << " Use texture : "
409#ifdef VISP_HAVE_OPENCV
410 << opt_use_texture << std::endl;
411#else
412 << " na" << std::endl;
413#endif
414 std::cout << " Use depth : " << opt_use_depth << std::endl;
415 std::cout << " Projection error: " << opt_projection_error_threshold << std::endl;
416
417 // Construct display
418 if (!opt_display_off) {
419#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
420 d_gray = vpDisplayFactory::createDisplay(I_gray, 50, 50, "Color stream");
421 if (opt_use_depth)
422 d_depth = vpDisplayFactory::createDisplay(I_depth, 80 + I_gray.getWidth(), 50, "Depth stream");
423#else
424 d_gray = vpDisplayFactory::allocateDisplay(I_gray, 50, 50, "Color stream");
425 if (opt_use_depth)
426 d_depth = vpDisplayFactory::allocateDisplay(I_depth, 80 + I_gray.getWidth(), 50, "Depth stream");
427#endif
428 }
429
430 // Initialize AprilTag detector
431 vpDetectorAprilTag detector(opt_tag_family);
432 detector.setAprilTagQuadDecimate(opt_tag_quad_decimate);
433 detector.setAprilTagNbThreads(opt_tag_nthreads);
434 detector.setAprilTagDecisionMarginThreshold(opt_tag_decision_margin_threshold);
435 detector.setAprilTagHammingDistanceThreshold(opt_tag_hamming_distance_threshold);
436
437 // Prepare MBT
438 std::vector<int> trackerTypes;
439#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
440 if (opt_use_texture)
442 else
443#endif
444 trackerTypes.push_back(vpMbGenericTracker::EDGE_TRACKER);
445
446 if (opt_use_depth)
447 trackerTypes.push_back(vpMbGenericTracker::DEPTH_DENSE_TRACKER);
448
449 vpMbGenericTracker tracker(trackerTypes);
450 // edges
451 vpMe me;
452 me.setMaskSize(5);
453 me.setMaskNumber(180);
455 me.setRange(12);
458 me.setThreshold(20);
459 me.setMu1(0.5);
460 me.setMu2(0.5);
461 me.setSampleStep(4);
462 tracker.setMovingEdge(me);
463
464#if defined(VISP_HAVE_OPENCV) && defined(HAVE_OPENCV_IMGPROC) && defined(HAVE_OPENCV_VIDEO)
465 if (opt_use_texture) {
466 vpKltOpencv klt_settings;
467 klt_settings.setMaxFeatures(300);
468 klt_settings.setWindowSize(5);
469 klt_settings.setQuality(0.015);
470 klt_settings.setMinDistance(8);
471 klt_settings.setHarrisFreeParameter(0.01);
472 klt_settings.setBlockSize(3);
473 klt_settings.setPyramidLevels(3);
474 tracker.setKltOpencv(klt_settings);
475 tracker.setKltMaskBorder(5);
476 }
477#endif
478
479 if (opt_use_depth) {
480 // camera calibration params
481 tracker.setCameraParameters(cam_color, cam_depth);
482 // model definition
483 tracker.loadModel("cube.cao", "cube.cao");
484 mapOfCameraTransformations["Camera2"] = depth_M_color;
485 tracker.setCameraTransformationMatrix(mapOfCameraTransformations);
486 tracker.setAngleAppear(vpMath::rad(70), vpMath::rad(70));
487 tracker.setAngleDisappear(vpMath::rad(80), vpMath::rad(80));
488 }
489 else {
490 // camera calibration params
491 tracker.setCameraParameters(cam_color);
492 // model definition
493 tracker.loadModel("cube.cao");
494 tracker.setAngleAppear(vpMath::rad(70));
495 tracker.setAngleDisappear(vpMath::rad(80));
496 }
497 tracker.setDisplayFeatures(true);
498
500 state_t state = state_detection;
501
502 // wait for a tag detection
503 while (state != state_quit) {
504 if (opt_use_depth) {
505#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
506 realsense.acquire((unsigned char *)I_color.bitmap, (unsigned char *)I_depth_raw.bitmap, nullptr, pointcloud, nullptr);
507#else
508 realsense.acquire((unsigned char *)I_color.bitmap, (unsigned char *)I_depth_raw.bitmap, &pointcloud, nullptr,
509 nullptr);
510#endif
511 vpImageConvert::convert(I_color, I_gray);
512 vpImageConvert::createDepthHistogram(I_depth_raw, I_depth);
513 vpDisplay::display(I_gray);
514 vpDisplay::display(I_depth);
515
516 mapOfImages["Camera1"] = &I_gray;
517 mapOfImages["Camera2"] = &I_depth;
518#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
519 mapOfPointclouds["Camera2"] = pointcloud;
520#else
521 mapOfPointclouds["Camera2"] = &pointcloud;
522 mapOfWidths["Camera2"] = width;
523 mapOfHeights["Camera2"] = height;
524#endif
525 }
526 else {
527 realsense.acquire(I_gray);
528 vpDisplay::display(I_gray);
529 }
530
531 if (state == state_detection) {
532 state = detectAprilTag(I_gray, detector, opt_tag_size, cam_color, cMo);
533
534 // Initialize the tracker with the result of the detection
535 if (state == state_tracking) {
536 if (opt_use_depth) {
537 mapOfCameraPoses["Camera1"] = cMo;
538 mapOfCameraPoses["Camera2"] = depth_M_color * cMo;
539 tracker.initFromPose(mapOfImages, mapOfCameraPoses);
540 }
541 else {
542 tracker.initFromPose(I_gray, cMo);
543 }
544 }
545 }
546
547 if (state == state_tracking) {
548 if (opt_use_depth) {
549#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
550 state = track(mapOfImages, mapOfPointclouds, I_gray, I_depth, depth_M_color, tracker,
551 opt_projection_error_threshold, cMo);
552#else
553 state = track(mapOfImages, mapOfPointclouds, mapOfWidths, mapOfHeights, I_gray, I_depth, depth_M_color,
554 tracker, opt_projection_error_threshold, cMo);
555#endif
556 }
557 else {
558 state = track(I_gray, tracker, opt_projection_error_threshold, cMo);
559 }
560 {
561 std::stringstream ss;
562 ss << "Features: edges " << tracker.getNbFeaturesEdge() << ", klt " << tracker.getNbFeaturesKlt()
563 << ", depth " << tracker.getNbFeaturesDepthDense();
564 vpDisplay::displayText(I_gray, I_gray.getHeight() - 30, 20, ss.str(), vpColor::red);
565 }
566 }
567
568 vpDisplay::displayText(I_gray, 20, 20, "Click to quit...", vpColor::red);
569 if (vpDisplay::getClick(I_gray, false)) { // exit
570 state = state_quit;
571 }
572 vpDisplay::flush(I_gray);
573
574 if (opt_use_depth) {
575 vpDisplay::displayText(I_depth, 20, 20, "Click to quit...", vpColor::red);
576 if (vpDisplay::getClick(I_depth, false)) { // exit
577 state = state_quit;
578 }
579 vpDisplay::flush(I_depth);
580 }
581 }
582 }
583 catch (const vpException &e) {
584 std::cerr << "Catch an exception: " << e.getMessage() << std::endl;
585 }
586
587#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
588 if (!opt_display_off) {
589 delete d_gray;
590 if (opt_use_depth)
591 delete d_depth;
592 }
593#endif
594
595 return EXIT_SUCCESS;
596}
597
598#else
599
600int main()
601{
602#ifndef VISP_HAVE_APRILTAG
603 std::cout << "ViSP is not build with Apriltag support" << std::endl;
604#endif
605#ifndef VISP_HAVE_REALSENSE2
606 std::cout << "ViSP is not build with librealsense2 support" << std::endl;
607#endif
608 std::cout << "Install missing 3rd parties, configure and build ViSP to run this tutorial" << std::endl;
609
610 return EXIT_SUCCESS;
611}
612
613#endif
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
void setAprilTagQuadDecimate(float quadDecimate)
void setAprilTagHammingDistanceThreshold(int hammingDistanceThreshold)
bool detect(const vpImage< unsigned char > &I) VP_OVERRIDE
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
void setAprilTagNbThreads(int nThreads)
void setAprilTagDecisionMarginThreshold(float decisionMarginThreshold)
size_t getNbObjects() const
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void createDepthHistogram(const vpImage< uint16_t > &src_depth, vpImage< vpRGBa > &dest_rgba)
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Definition of the vpImage class member functions.
Definition vpImage.h:131
Wrapper for the KLT (Kanade-Lucas-Tomasi) feature tracker implemented in OpenCV. Thus to enable this ...
Definition vpKltOpencv.h:83
void setBlockSize(int blockSize)
void setQuality(double qualityLevel)
void setHarrisFreeParameter(double harris_k)
void setMaxFeatures(int maxCount)
void setMinDistance(double minDistance)
void setWindowSize(int winSize)
void setPyramidLevels(int pyrMaxLevel)
static double rad(double deg)
Definition vpMath.h:129
Real-time 6D object pose tracking using its CAD model.
Definition vpMe.h:143
void setMu1(const double &mu_1)
Definition vpMe.h:408
void setRange(const unsigned int &range)
Definition vpMe.h:438
void setLikelihoodThresholdType(const vpLikelihoodThresholdType likelihood_threshold_type)
Definition vpMe.h:531
void setMaskNumber(const unsigned int &mask_number)
Definition vpMe.cpp:555
void setThreshold(const double &threshold)
Definition vpMe.h:489
void setSampleStep(const double &sample_step)
Definition vpMe.h:445
void setMaskSize(const unsigned int &mask_size)
Definition vpMe.cpp:563
void setMu2(const double &mu_2)
Definition vpMe.h:415
@ NORMALIZED_THRESHOLD
Definition vpMe.h:154
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.