54#include <visp3/core/vpConfig.h>
56#if defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_AFMA6)
58#include <visp3/core/vpCameraParameters.h>
59#include <visp3/detection/vpDetectorAprilTag.h>
60#include <visp3/gui/vpDisplayFactory.h>
61#include <visp3/gui/vpPlot.h>
62#include <visp3/io/vpImageIo.h>
63#include <visp3/robot/vpRobotAfma6.h>
64#include <visp3/sensor/vpRealSense2.h>
65#include <visp3/visual_features/vpFeatureBuilder.h>
66#include <visp3/visual_features/vpFeaturePoint.h>
67#include <visp3/vs/vpServo.h>
68#include <visp3/vs/vpServoDisplay.h>
70#ifdef ENABLE_VISP_NAMESPACE
75 const std::vector<vpImagePoint> &vip,
76 std::vector<vpImagePoint> *traj_vip)
78 for (
size_t i = 0;
i < vip.size(); ++
i) {
79 if (traj_vip[i].size()) {
82 traj_vip[
i].push_back(vip[i]);
86 traj_vip[
i].push_back(vip[i]);
89 for (
size_t i = 0;
i < vip.size(); ++
i) {
90 for (
size_t j = 1;
j < traj_vip[
i].size(); ++
j) {
96int main(
int argc,
char **argv)
98 double opt_tagSize = 0.120;
99 int opt_quad_decimate = 2;
100 bool opt_verbose =
false;
101 bool opt_plot =
false;
102 bool opt_adaptive_gain =
false;
103 bool opt_task_sequencing =
false;
104 double opt_convergence_threshold = 0.00005;
106 bool display_tag =
true;
108 for (
int i = 1;
i < argc; ++
i) {
109 if ((std::string(argv[i]) ==
"--tag-size") && (i + 1 < argc)) {
110 opt_tagSize = std::stod(argv[++i]);
112 else if ((std::string(argv[i]) ==
"--tag-quad-decimate") && (i + 1 < argc)) {
113 opt_quad_decimate = std::stoi(argv[++i]);
115 else if (std::string(argv[i]) ==
"--verbose") {
118 else if (std::string(argv[i]) ==
"--plot") {
121 else if (std::string(argv[i]) ==
"--adaptive-gain") {
122 opt_adaptive_gain =
true;
124 else if (std::string(argv[i]) ==
"--task-sequencing") {
125 opt_task_sequencing =
true;
127 else if (std::string(argv[i]) ==
"--no-convergence-threshold") {
128 opt_convergence_threshold = 0.;
130 else if ((std::string(argv[i]) ==
"--help") || (std::string(argv[i]) ==
"-h")) {
133 <<
" [--tag-size <marker size in meter; default " << opt_tagSize <<
">]"
134 <<
" [--tag-quad-decimate <decimation; default " << opt_quad_decimate <<
">]"
135 <<
" [--adaptive-gain]"
137 <<
" [--task-sequencing]"
138 <<
" [--no-convergence-threshold]"
154 std::cout <<
"WARNING: This example will move the robot! "
155 <<
"Please make sure to have the user stop button at hand!" << std::endl
156 <<
"Press Enter to continue..." << std::endl;
162 config.enable_stream(RS2_STREAM_COLOR, width, height, RS2_FORMAT_RGBA8, fps);
163 config.enable_stream(RS2_STREAM_DEPTH, width, height, RS2_FORMAT_Z16, fps);
164 config.enable_stream(RS2_STREAM_INFRARED, width, height, RS2_FORMAT_Y8, fps);
169 for (
size_t i = 0;
i < 10; ++
i) {
175 robot.getCameraParameters(cam, I);
176 std::cout <<
"cam:\n" <<
cam << std::endl;
184 detector.setAprilTagPoseEstimationMethod(poseEstimationMethod);
185 detector.setDisplayTag(display_tag);
186 detector.setAprilTagQuadDecimate(opt_quad_decimate);
187 detector.setZAlignedWithCameraAxis(
true);
238 if (!detector.isZAlignedWithCameraAxis()) {
246 std::vector<vpFeaturePoint> s_point(4), s_point_d(4);
249 std::vector<vpPoint> point(4);
250 if (detector.isZAlignedWithCameraAxis()) {
251 point[0].setWorldCoordinates(-opt_tagSize / 2., +opt_tagSize / 2., 0);
252 point[1].setWorldCoordinates(+opt_tagSize / 2., +opt_tagSize / 2., 0);
253 point[2].setWorldCoordinates(+opt_tagSize / 2., -opt_tagSize / 2., 0);
254 point[3].setWorldCoordinates(-opt_tagSize / 2., -opt_tagSize / 2., 0);
257 point[0].setWorldCoordinates(-opt_tagSize / 2., -opt_tagSize / 2., 0);
258 point[1].setWorldCoordinates(+opt_tagSize / 2., -opt_tagSize / 2., 0);
259 point[2].setWorldCoordinates(+opt_tagSize / 2., +opt_tagSize / 2., 0);
260 point[3].setWorldCoordinates(-opt_tagSize / 2., +opt_tagSize / 2., 0);
265 for (
size_t i = 0;
i < s_point.size(); ++
i) {
266 task.addFeature(s_point[i], s_point_d[i]);
272 if (opt_adaptive_gain) {
274 task.setLambda(lambda);
284 plotter =
new vpPlot(2,
static_cast<int>(250 * 2), 500,
static_cast<int>(I.getWidth()) + 80, 10,
285 "Real time curves plotter");
286 plotter->setTitle(0,
"Visual features error");
287 plotter->setTitle(1,
"Camera velocities");
290 plotter->setLegend(0, 0,
"error_feat_p1_x");
291 plotter->setLegend(0, 1,
"error_feat_p1_y");
292 plotter->setLegend(0, 2,
"error_feat_p2_x");
293 plotter->setLegend(0, 3,
"error_feat_p2_y");
294 plotter->setLegend(0, 4,
"error_feat_p3_x");
295 plotter->setLegend(0, 5,
"error_feat_p3_y");
296 plotter->setLegend(0, 6,
"error_feat_p4_x");
297 plotter->setLegend(0, 7,
"error_feat_p4_y");
298 plotter->setLegend(1, 0,
"vc_x");
299 plotter->setLegend(1, 1,
"vc_y");
300 plotter->setLegend(1, 2,
"vc_z");
301 plotter->setLegend(1, 3,
"wc_x");
302 plotter->setLegend(1, 4,
"wc_y");
303 plotter->setLegend(1, 5,
"wc_z");
306 bool final_quit =
false;
307 bool has_converged =
false;
308 bool send_velocities =
false;
309 bool servo_started =
false;
310 std::vector<vpImagePoint> *traj_corners =
nullptr;
316 while (!has_converged && !final_quit) {
323 std::vector<vpHomogeneousMatrix> c_M_o_vec;
324 detector.detect(I, opt_tagSize, cam, c_M_o_vec);
327 std::stringstream ss;
328 ss <<
"Left click to " << (send_velocities ?
"stop the robot" :
"servo the robot") <<
", right click to quit.";
335 if (c_M_o_vec.size() == 1) {
336 c_M_o = c_M_o_vec[0];
338 static bool first_time =
true;
341 std::vector<vpHomogeneousMatrix> v_o_M_o(2), v_cd_M_c(2);
342 v_o_M_o[1].buildFrom(0, 0, 0, 0, 0, M_PI);
343 for (
size_t i = 0;
i < 2; ++
i) {
344 v_cd_M_c[
i] = cd_M_o * v_o_M_o[
i] * c_M_o.
inverse();
346 if (std::fabs(v_cd_M_c[0].getThetaUVector().getTheta()) < std::fabs(v_cd_M_c[1].getThetaUVector().getTheta())) {
350 std::cout <<
"Desired frame modified to avoid PI rotation of the camera" << std::endl;
355 for (
size_t i = 0;
i < point.size(); ++
i) {
357 point[
i].changeFrame(cd_M_o * o_M_o, cP);
358 point[
i].projection(cP, p);
360 s_point_d[
i].set_x(p[0]);
361 s_point_d[
i].set_y(p[1]);
362 s_point_d[
i].set_Z(cP[2]);
367 std::vector<vpImagePoint> corners = detector.getPolygon(0);
370 for (
size_t i = 0;
i < corners.size(); ++
i) {
375 point[
i].changeFrame(c_M_o, c_P);
377 s_point[
i].set_Z(c_P[2]);
380 if (opt_task_sequencing) {
381 if (!servo_started) {
382 if (send_velocities) {
383 servo_started =
true;
390 v_c =
task.computeControlLaw();
395 for (
size_t i = 0;
i < corners.size(); ++
i) {
396 std::stringstream ss;
406 traj_corners =
new std::vector<vpImagePoint>[corners.size()];
409 display_point_trajectory(I, corners, traj_corners);
413 plotter->plot(1, iter_plot, v_c);
418 std::cout <<
"v_c: " << v_c.t() << std::endl;
421 double error =
task.getError().sumSquare();
422 std::stringstream ss;
423 ss <<
"error: " <<
error;
427 std::cout <<
"error: " <<
error << std::endl;
429 if (error < opt_convergence_threshold) {
430 has_converged =
true;
431 std::cout <<
"Servo task has converged" << std::endl;
442 if (!send_velocities) {
450 std::stringstream ss;
460 send_velocities = !send_velocities;
472 std::cout <<
"Stop the robot " << std::endl;
475 if (opt_plot && plotter !=
nullptr) {
481 while (!final_quit) {
496 delete[] traj_corners;
500 std::cout <<
"ViSP exception: " <<
e.what() << std::endl;
501 std::cout <<
"Stop the robot " << std::endl;
511#if !defined(VISP_HAVE_REALSENSE2)
512 std::cout <<
"Install librealsense-2.x" << std::endl;
514#if !defined(VISP_HAVE_AFMA6)
515 std::cout <<
"ViSP is not build with Afma6 robot support..." << std::endl;
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
vpCameraParametersProjType
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
Implementation of column vector and the associated operations.
static const vpColor green
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
error that can be emitted by ViSP classes.
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
static double distance(const vpImagePoint &iP1, const vpImagePoint &iP2)
Definition of the vpImage class member functions.
static double rad(double deg)
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
Control of Irisa's gantry robot named Afma6.
@ STATE_VELOCITY_CONTROL
Initialize the velocity controller.
@ STATE_STOP
Stops robot motion especially in velocity and acceleration control.
static void display(const vpServo &s, const vpCameraParameters &cam, const vpImage< unsigned char > &I, vpColor currentColor=vpColor::green, vpColor desiredColor=vpColor::red, unsigned int thickness=1)
Implementation of a rotation vector as axis-angle minimal representation.
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()