Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
servoFrankaPBVS.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * Data acquisition with RealSense RGB-D sensor and Franka robot.
32 */
33
56
57#include <iostream>
58
59#include <visp3/core/vpConfig.h>
60#include <visp3/core/vpCameraParameters.h>
61#include <visp3/core/vpIoTools.h>
62#include <visp3/core/vpXmlParserCamera.h>
63#include <visp3/detection/vpDetectorAprilTag.h>
64#include <visp3/gui/vpDisplayFactory.h>
65#include <visp3/gui/vpPlot.h>
66#include <visp3/io/vpImageIo.h>
67#include <visp3/robot/vpRobotFranka.h>
68#include <visp3/sensor/vpRealSense2.h>
69#include <visp3/visual_features/vpFeatureThetaU.h>
70#include <visp3/visual_features/vpFeatureTranslation.h>
71#include <visp3/vs/vpServo.h>
72#include <visp3/vs/vpServoDisplay.h>
73
74#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_FRANKA) && defined(VISP_HAVE_PUGIXML)
75
76#ifdef ENABLE_VISP_NAMESPACE
77using namespace VISP_NAMESPACE_NAME;
78#endif
79
80void display_point_trajectory(const vpImage<unsigned char> &I, const std::vector<vpImagePoint> &vip,
81 std::vector<vpImagePoint> *traj_vip)
82{
83 for (size_t i = 0; i < vip.size(); ++i) {
84 if (traj_vip[i].size()) {
85 // Add the point only if distance with the previous > 1 pixel
86 if (vpImagePoint::distance(vip[i], traj_vip[i].back()) > 1.) {
87 traj_vip[i].push_back(vip[i]);
88 }
89 }
90 else {
91 traj_vip[i].push_back(vip[i]);
92 }
93 }
94 for (size_t i = 0; i < vip.size(); ++i) {
95 for (size_t j = 1; j < traj_vip[i].size(); j++) {
96 vpDisplay::displayLine(I, traj_vip[i][j - 1], traj_vip[i][j], vpColor::green, 2);
97 }
98 }
99}
100
101int main(int argc, char **argv)
102{
103 double opt_tag_size = 0.120;
104 bool opt_tag_z_aligned = false;
105 std::string opt_robot_ip = "192.168.1.1";
106 std::string opt_eMc_filename = "";
107 std::string opt_intrinsic_filename = "";
108 std::string opt_camera_name = "Camera";
109 bool display_tag = true;
110 int opt_quad_decimate = 2;
111 bool opt_verbose = false;
112 bool opt_plot = false;
113 bool opt_adaptive_gain = false;
114 bool opt_task_sequencing = false;
115 double convergence_threshold_t = 0.0005; // Value in [m]
116 double convergence_threshold_tu = 0.5; // Value in [deg]
117
118 for (int i = 1; i < argc; ++i) {
119 if ((std::string(argv[i]) == "--tag-size") && (i + 1 < argc)) {
120 opt_tag_size = std::stod(argv[++i]);
121 }
122 else if ((std::string(argv[i]) == "--tag-quad-decimate") && (i + 1 < argc)) {
123 opt_quad_decimate = std::stoi(argv[++i]);
124 }
125 else if (std::string(argv[i]) == "--tag-z-aligned") {
126 opt_tag_z_aligned = true;
127 }
128 else if ((std::string(argv[i]) == "--ip") && (i + 1 < argc)) {
129 opt_robot_ip = std::string(argv[++i]);
130 }
131 else if (std::string(argv[i]) == "--intrinsic" && i + 1 < argc) {
132 opt_intrinsic_filename = std::string(argv[++i]);
133 }
134 else if (std::string(argv[i]) == "--camera-name" && i + 1 < argc) {
135 opt_camera_name = std::string(argv[++i]);
136 }
137 else if ((std::string(argv[i]) == "--eMc") && (i + 1 < argc)) {
138 opt_eMc_filename = std::string(argv[++i]);
139 }
140 else if (std::string(argv[i]) == "--verbose") {
141 opt_verbose = true;
142 }
143 else if (std::string(argv[i]) == "--plot") {
144 opt_plot = true;
145 }
146 else if (std::string(argv[i]) == "--adaptive-gain") {
147 opt_adaptive_gain = true;
148 }
149 else if (std::string(argv[i]) == "--task-sequencing") {
150 opt_task_sequencing = true;
151 }
152 else if (std::string(argv[i]) == "--no-convergence-threshold") {
153 convergence_threshold_t = 0.;
154 convergence_threshold_tu = 0.;
155 }
156 else if ((std::string(argv[i]) == "--help") || (std::string(argv[i]) == "-h")) {
157 std::cout << "SYNOPSYS" << std::endl
158 << " " << argv[0]
159 << " [--ip <controller ip>]"
160 << " [--intrinsic <xml file>]"
161 << " [--camera-name <name>]"
162 << " [--tag-size <size>]"
163 << " [--tag-quad-decimate <decimation factor>]"
164 << " [--tag-z-aligned]"
165 << " [--eMc <extrinsic transformation file>]"
166 << " [--adaptive-gain]"
167 << " [--plot]"
168 << " [--task-sequencing]"
169 << " [--no-convergence-threshold]"
170 << " [--verbose]"
171 << " [--help] [-h]\n"
172 << std::endl;
173 std::cout << "DESCRIPTION" << std::endl
174 << " Use a position-based visual-servoing scheme to position the camera in front of an Apriltag." << std::endl
175 << std::endl
176 << " --ip <controller ip>" << std::endl
177 << " Franka controller ip address" << std::endl
178 << " Default: " << opt_robot_ip << std::endl
179 << std::endl
180 << " --intrinsic <xml file>" << std::endl
181 << " XML file that contains camera intrinsic parameters. " << std::endl
182 << " If no file is specified, use Realsense camera factory intrinsic parameters." << std::endl
183 << std::endl
184 << " --camera-name <name>" << std::endl
185 << " Camera name in the XML file that contains camera intrinsic parameters." << std::endl
186 << " Default: \"Camera\"" << std::endl
187 << std::endl
188 << " --tag-size <size>" << std::endl
189 << " Apriltag size in [m]." << std::endl
190 << " Default: " << opt_tag_size << " [m]" << std::endl
191 << std::endl
192 << " --tag-quad-decimate <decimation factor>" << std::endl
193 << " Decimation factor used during Apriltag detection." << std::endl
194 << " Default: " << opt_quad_decimate << std::endl
195 << std::endl
196 << " --tag-z-aligned" << std::endl
197 << " When enabled, tag z-axis and camera z-axis are aligned." << std::endl
198 << " Default: false" << std::endl
199 << std::endl
200 << " --eMc <extrinsic transformation file>" << std::endl
201 << " File containing the homogeneous transformation matrix between" << std::endl
202 << " robot end-effector and camera frame." << std::endl
203 << std::endl
204 << " --adaptive-gain" << std::endl
205 << " Flag to enable adaptive gain to speed up visual servo near convergence." << std::endl
206 << std::endl
207 << " --plot" << std::endl
208 << " Flag to enable curve plotter." << std::endl
209 << std::endl
210 << " --task-sequencing" << std::endl
211 << " Flag to enable task sequencing scheme." << std::endl
212 << std::endl
213 << " --no-convergence-threshold" << std::endl
214 << " Flag to disable convergence threshold used to stop the visual servo." << std::endl
215 << std::endl
216 << " --verbose" << std::endl
217 << " Flag to enable extra verbosity." << std::endl
218 << std::endl
219 << " --help, -h" << std::endl
220 << " Print this helper message." << std::endl
221 << std::endl;
222 return EXIT_SUCCESS;
223 }
224 else {
225 std::cout << "\nERROR" << std::endl
226 << std::string(argv[i]) << " command line option is not supported." << std::endl
227 << "Use " << std::string(argv[0]) << " --help" << std::endl
228 << std::endl;
229 return EXIT_FAILURE;
230 }
231 }
232
233 vpRealSense2 rs;
234 rs2::config config;
235 unsigned int width = 640, height = 480;
236 config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, 30);
237 config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30);
238 config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, 30);
239 rs.open(config);
240
241 vpImage<unsigned char> I(height, width);
242
243#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
244 std::shared_ptr<vpDisplay> display = vpDisplayFactory::createDisplay(I, 10, 10, "Current image");
245#else
246 vpDisplay *display = vpDisplayFactory::allocateDisplay(I, 10, 10, "Current image");
247#endif
248
249 std::cout << "Parameters:" << std::endl;
250 std::cout << " Apriltag " << std::endl;
251 std::cout << " Size [m] : " << opt_tag_size << std::endl;
252 std::cout << " Z aligned : " << (opt_tag_z_aligned ? "true" : "false") << std::endl;
253 std::cout << " Camera intrinsics " << std::endl;
254 std::cout << " Factory parameters : " << (opt_intrinsic_filename.empty() ? "yes" : "no") << std::endl;
255
256 // Get camera intrinsics
258 if (opt_intrinsic_filename.empty()) {
259 std::cout << "Use Realsense camera intrinsic factory parameters: " << std::endl;
261 std::cout << "cam:\n" << cam << std::endl;
262 }
263 else if (!vpIoTools::checkFilename(opt_intrinsic_filename)) {
264 std::cout << "Camera parameters file " << opt_intrinsic_filename << " doesn't exist." << std::endl;
265 return EXIT_FAILURE;
266 }
267 else {
269 if (!opt_camera_name.empty()) {
270
271 std::cout << " Param file name [.xml]: " << opt_intrinsic_filename << std::endl;
272 std::cout << " Camera name : " << opt_camera_name << std::endl;
273
274 if (parser.parse(cam, opt_intrinsic_filename, opt_camera_name, vpCameraParameters::perspectiveProjWithDistortion) !=
276 std::cout << "Unable to parse parameters with distortion for camera \"" << opt_camera_name << "\" from "
277 << opt_intrinsic_filename << " file" << std::endl;
278 std::cout << "Attempt to find parameters without distortion" << std::endl;
279
280 if (parser.parse(cam, opt_intrinsic_filename, opt_camera_name,
282 std::cout << "Unable to parse parameters without distortion for camera \"" << opt_camera_name << "\" from "
283 << opt_intrinsic_filename << " file" << std::endl;
284 return EXIT_FAILURE;
285 }
286 }
287 }
288 }
289
290 std::cout << "Camera parameters used to compute the pose:\n" << cam << std::endl;
291
292 // Setup Apriltag detector
295 // vpDetectorAprilTag::vpPoseEstimationMethod poseEstimationMethod = vpDetectorAprilTag::BEST_RESIDUAL_VIRTUAL_VS;
296 vpDetectorAprilTag detector(tagFamily);
297 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
298 detector.setDisplayTag(display_tag);
299 detector.setAprilTagQuadDecimate(opt_quad_decimate);
300 detector.setZAlignedWithCameraAxis(opt_tag_z_aligned);
301
302 // Setup camera extrinsics
303 vpPoseVector e_P_c;
304 // Set camera extrinsics default values
305 e_P_c[0] = 0.0337731;
306 e_P_c[1] = -0.00535012;
307 e_P_c[2] = -0.0523339;
308 e_P_c[3] = -0.247294;
309 e_P_c[4] = -0.306729;
310 e_P_c[5] = 1.53055;
311
312 // If provided, read camera extrinsics from --eMc <file>
313 if (!opt_eMc_filename.empty()) {
314 e_P_c.loadYAML(opt_eMc_filename, e_P_c);
315 }
316 else {
317 std::cout << "Warning, opt_eMc_filename is empty! Use hard coded values." << std::endl;
318 }
319 vpHomogeneousMatrix e_M_c(e_P_c);
320 std::cout << "e_M_c:\n" << e_M_c << std::endl;
321
322 // Desired pose to reach
323 vpHomogeneousMatrix cd_M_o(vpTranslationVector(0, 0, opt_tag_size * 3), // 3 times tag with along camera z axis
324 vpRotationMatrix({ 1, 0, 0, 0, -1, 0, 0, 0, -1 }));
325
326 vpRobotFranka robot;
327
328 try {
329 robot.connect(opt_robot_ip);
330
331 // Create visual features based on cd_M_c
334
337
338 // Setup PBVS
340 task.addFeature(t, td);
341 task.addFeature(tu, tud);
343 task.setInteractionMatrixType(vpServo::CURRENT);
344
345 if (opt_adaptive_gain) {
346 vpAdaptiveGain lambda(1.5, 0.4, 30); // lambda(0)=4, lambda(oo)=0.4 and lambda'(0)=30
347 task.setLambda(lambda);
348 }
349 else {
350 task.setLambda(0.5);
351 }
352
353 vpPlot *plotter = nullptr;
354 int iter_plot = 0;
355
356 if (opt_plot) {
357 plotter = new vpPlot(2, static_cast<int>(250 * 2), 500, static_cast<int>(I.getWidth()) + 80, 10,
358 "Real time curves plotter");
359 plotter->setTitle(0, "Visual features error");
360 plotter->setTitle(1, "Camera velocities");
361 plotter->initGraph(0, 6);
362 plotter->initGraph(1, 6);
363 plotter->setLegend(0, 0, "error_feat_tx");
364 plotter->setLegend(0, 1, "error_feat_ty");
365 plotter->setLegend(0, 2, "error_feat_tz");
366 plotter->setLegend(0, 3, "error_feat_theta_ux");
367 plotter->setLegend(0, 4, "error_feat_theta_uy");
368 plotter->setLegend(0, 5, "error_feat_theta_uz");
369 plotter->setLegend(1, 0, "vc_x");
370 plotter->setLegend(1, 1, "vc_y");
371 plotter->setLegend(1, 2, "vc_z");
372 plotter->setLegend(1, 3, "wc_x");
373 plotter->setLegend(1, 4, "wc_y");
374 plotter->setLegend(1, 5, "wc_z");
375 }
376
377 bool final_quit = false;
378 bool has_converged = false;
379 bool send_velocities = false;
380 bool servo_started = false;
381 std::vector<vpImagePoint> *traj_vip = nullptr; // To memorize point trajectory
382
383 static double t_init_servo = vpTime::measureTimeMs();
384
385 robot.set_eMc(e_M_c); // Set location of the camera wrt end-effector frame
386 robot.setRobotState(vpRobot::STATE_VELOCITY_CONTROL);
387
388 vpHomogeneousMatrix cd_M_c, c_M_o, o_M_o;
389
390 while (!has_converged && !final_quit) {
391 double t_start = vpTime::measureTimeMs();
392
393 rs.acquire(I);
394
396
397 std::vector<vpHomogeneousMatrix> c_M_o_vec;
398 bool ret = detector.detect(I, opt_tag_size, cam, c_M_o_vec);
399
400 std::stringstream ss;
401 ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot") << ", right click to quit.";
402 vpDisplay::displayText(I, 20, 20, ss.str(), vpColor::red);
403
404 vpColVector v_c(6);
405
406 // Only one tag is detected
407 if (ret && (c_M_o_vec.size() == 1)) {
408 c_M_o = c_M_o_vec[0];
409
410 static bool first_time = true;
411 if (first_time) {
412 // Introduce security wrt tag positioning in order to avoid PI rotation
413 std::vector<vpHomogeneousMatrix> secure_o_M_o(2), secure_cd_M_c(2);
414 secure_o_M_o[1].buildFrom(0, 0, 0, 0, 0, M_PI);
415 for (size_t i = 0; i < 2; ++i) {
416 secure_cd_M_c[i] = cd_M_o * secure_o_M_o[i] * c_M_o.inverse();
417 }
418 if (std::fabs(secure_cd_M_c[0].getThetaUVector().getTheta()) < std::fabs(secure_cd_M_c[1].getThetaUVector().getTheta())) {
419 o_M_o = secure_o_M_o[0];
420 }
421 else {
422 std::cout << "Desired frame modified to avoid PI rotation of the camera" << std::endl;
423 o_M_o = secure_o_M_o[1]; // Introduce PI rotation
424 }
425 }
426
427 // Update visual features
428 cd_M_c = cd_M_o * o_M_o * c_M_o.inverse();
429 t.buildFrom(cd_M_c);
430 tu.buildFrom(cd_M_c);
431
432 if (opt_task_sequencing) {
433 if (!servo_started) {
434 if (send_velocities) {
435 servo_started = true;
436 }
437 t_init_servo = vpTime::measureTimeMs();
438 }
439 v_c = task.computeControlLaw((vpTime::measureTimeMs() - t_init_servo) / 1000.);
440 }
441 else {
442 v_c = task.computeControlLaw();
443 }
444
445 // Display desired and current pose features
446 vpDisplay::displayFrame(I, cd_M_o * o_M_o, cam, opt_tag_size / 1.5, vpColor::yellow, 2);
447 vpDisplay::displayFrame(I, c_M_o, cam, opt_tag_size / 2, vpColor::none, 3);
448 // Get tag corners
449 std::vector<vpImagePoint> vip = detector.getPolygon(0);
450 // Get the tag cog corresponding to the projection of the tag frame in the image
451 vip.push_back(detector.getCog(0));
452 // Display the trajectory of the points
453 if (first_time) {
454 traj_vip = new std::vector<vpImagePoint>[vip.size()];
455 }
456 display_point_trajectory(I, vip, traj_vip);
457
458 if (opt_plot) {
459 plotter->plot(0, iter_plot, task.getError());
460 plotter->plot(1, iter_plot, v_c);
461 iter_plot++;
462 }
463
464 if (opt_verbose) {
465 std::cout << "v_c: " << v_c.t() << std::endl;
466 }
467
469 vpThetaUVector cd_tu_c = cd_M_c.getThetaUVector();
470 double error_tr = sqrt(cd_t_c.sumSquare());
471 double error_tu = vpMath::deg(sqrt(cd_tu_c.sumSquare()));
472
473 ss.str("");
474 ss << "error_t: " << error_tr;
475 vpDisplay::displayText(I, 20, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
476 ss.str("");
477 ss << "error_tu: " << error_tu;
478 vpDisplay::displayText(I, 40, static_cast<int>(I.getWidth()) - 150, ss.str(), vpColor::red);
479
480 if (opt_verbose)
481 std::cout << "error translation: " << error_tr << " ; error rotation: " << error_tu << std::endl;
482
483 if (error_tr < convergence_threshold_t && error_tu < convergence_threshold_tu) {
484 has_converged = true;
485 std::cout << "Servo task has converged" << std::endl;
486 ;
487 vpDisplay::displayText(I, 100, 20, "Servo task has converged", vpColor::red);
488 }
489
490 if (first_time) {
491 first_time = false;
492 }
493 } // end if (c_M_o_vec.size() == 1)
494 else {
495 v_c = 0;
496 }
497
498 if (!send_velocities) {
499 v_c = 0;
500 }
501
502 // Send to the robot
503 robot.setVelocity(vpRobot::CAMERA_FRAME, v_c);
504
505 ss.str("");
506 ss << "Loop time: " << vpTime::measureTimeMs() - t_start << " ms";
507 vpDisplay::displayText(I, 40, 20, ss.str(), vpColor::red);
509
511 if (vpDisplay::getClick(I, button, false)) {
512 switch (button) {
514 send_velocities = !send_velocities;
515 break;
516
518 final_quit = true;
519 v_c = 0;
520 break;
521
522 default:
523 break;
524 }
525 }
526 }
527 std::cout << "Stop the robot " << std::endl;
528 robot.setRobotState(vpRobot::STATE_STOP);
529
530 if (opt_plot && plotter != nullptr) {
531 delete plotter;
532 plotter = nullptr;
533 }
534
535 if (!final_quit) {
536 while (!final_quit) {
537 rs.acquire(I);
539
540 vpDisplay::displayText(I, 20, 20, "Click to quit the program.", vpColor::red);
541 vpDisplay::displayText(I, 40, 20, "Visual servo converged.", vpColor::red);
542
543 if (vpDisplay::getClick(I, false)) {
544 final_quit = true;
545 }
546
548 }
549 }
550
551 if (traj_vip) {
552 delete[] traj_vip;
553 }
554 }
555 catch (const vpException &e) {
556 std::cout << "ViSP exception: " << e.what() << std::endl;
557 std::cout << "Stop the robot " << std::endl;
558 robot.setRobotState(vpRobot::STATE_STOP);
559#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
560 if (display != nullptr) {
561 delete display;
562 }
563#endif
564 return EXIT_FAILURE;
565 }
566 catch (const franka::NetworkException &e) {
567 std::cout << "Franka network exception: " << e.what() << std::endl;
568 std::cout << "Check if you are connected to the Franka robot"
569 << " or if you specified the right IP using --ip command line option set by default to 192.168.1.1. "
570 << std::endl;
571#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
572 if (display != nullptr) {
573 delete display;
574 }
575#endif
576 return EXIT_FAILURE;
577 }
578 catch (const std::exception &e) {
579 std::cout << "Franka exception: " << e.what() << std::endl;
580#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
581 if (display != nullptr) {
582 delete display;
583 }
584#endif
585 return EXIT_FAILURE;
586 }
587
588#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
589 if (display != nullptr) {
590 delete display;
591 }
592#endif
593 return EXIT_SUCCESS;
594}
595#else
596int main()
597{
598#if !defined(VISP_HAVE_REALSENSE2)
599 std::cout << "Install librealsense-2.x and rebuild ViSP." << std::endl;
600#endif
601#if !defined(VISP_HAVE_FRANKA)
602 std::cout << "Install libfranka and rebuild ViSP." << std::endl;
603#endif
604#if !defined(VISP_HAVE_PUGIXML)
605 std::cout << "Build ViSP with pugixml support enabled." << std::endl;
606#endif
607 return EXIT_SUCCESS;
608}
609#endif
Adaptive gain computation.
static bool loadYAML(const std::string &filename, vpArray2D< Type > &A, char *header=nullptr)
Definition vpArray2D.h:874
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor none
Definition vpColor.h:210
static const vpColor yellow
Definition vpColor.h:206
static const vpColor green
Definition vpColor.h:201
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
Definition vpDisplay.h:171
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayFrame(const vpImage< unsigned char > &I, const vpHomogeneousMatrix &cMo, const vpCameraParameters &cam, double size, const vpColor &color=vpColor::none, unsigned int thickness=1, const vpImagePoint &offset=vpImagePoint(0, 0), const std::string &frameName="", const vpColor &textColor=vpColor::black, const vpImagePoint &textOffset=vpImagePoint(15, 15))
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
Definition vpException.h:60
Class that defines a 3D visual feature from a axis/angle parametrization that represent the rotati...
Class that defines the translation visual feature .
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpThetaUVector getThetaUVector() const
vpHomogeneousMatrix inverse() const
vpTranslationVector getTranslationVector() const
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
Definition vpImage.h:131
static bool checkFilename(const std::string &filename)
static double deg(double rad)
Definition vpMath.h:119
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Definition vpPlot.h:117
Implementation of a pose vector and operations on poses.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
@ CAMERA_FRAME
Definition vpRobot.h:81
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
Definition vpRobot.h:64
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
Definition vpRobot.h:63
Implementation of a rotation matrix and operations on such kind of matrices.
double sumSquare() const
@ EYEINHAND_CAMERA
Definition vpServo.h:176
@ CURRENT
Definition vpServo.h:217
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
XML parser to load and save intrinsic camera parameters.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()