Example that shows how to how to achieve an image-based visual servo a drone equipped with a Pixhawk connected to a Jetson TX2. An Intel Realsense camera is also attached to the drone and connected to the Jetson. The drone is localized thanks to Qualisys Mocap. Communication between the Jetson and the Pixhawk is based on Mavlink using MAVSDK 3rd party.
This program makes the drone detect and follow an AprilTag from the 36h11 family.
#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_MAVSDK) && ((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) && \
defined(VISP_HAVE_REALSENSE2) && defined(VISP_HAVE_PUGIXML)
#include <visp3/core/vpImageConvert.h>
#include <visp3/core/vpMomentAreaNormalized.h>
#include <visp3/core/vpMomentBasic.h>
#include <visp3/core/vpMomentCentered.h>
#include <visp3/core/vpMomentDatabase.h>
#include <visp3/core/vpMomentGravityCenter.h>
#include <visp3/core/vpMomentGravityCenterNormalized.h>
#include <visp3/core/vpMomentObject.h>
#include <visp3/core/vpPixelMeterConversion.h>
#include <visp3/core/vpPoint.h>
#include <visp3/core/vpTime.h>
#include <visp3/core/vpXmlParserCamera.h>
#include <visp3/detection/vpDetectorAprilTag.h>
#include <visp3/gui/vpDisplayFactory.h>
#include <visp3/gui/vpPlot.h>
#include <visp3/robot/vpRobotMavsdk.h>
#include <visp3/sensor/vpRealSense2.h>
#include <visp3/visual_features/vpFeatureBuilder.h>
#include <visp3/visual_features/vpFeatureMomentAreaNormalized.h>
#include <visp3/visual_features/vpFeatureMomentGravityCenterNormalized.h>
#include <visp3/visual_features/vpFeatureVanishingPoint.h>
#include <visp3/vs/vpServo.h>
#include <visp3/vs/vpServoDisplay.h>
#define CONTROL_UAV
#ifdef ENABLE_VISP_NAMESPACE
#endif
bool compareImagePoint(std::pair<size_t, vpImagePoint> p1, std::pair<size_t, vpImagePoint> p2)
{
return (p1.second.get_v() < p2.second.get_v());
};
int main(int argc, char **argv)
{
#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
std::shared_ptr<vpDisplay> display;
#else
#endif
try {
std::string opt_connecting_info = "udp://:192.168.30.111:14552";
double tagSize = -1;
double opt_distance_to_tag = -1;
bool opt_has_distance_to_tag = false;
int opt_display_fps = 10;
bool opt_verbose = false;
int acq_fps = 30;
if (argc >= 3 && std::string(argv[1]) == "--tag-size") {
tagSize = std::atof(argv[2]);
if (tagSize <= 0) {
std::cout << "Error : invalid tag size." << std::endl << "See " << argv[0] << " --help" << std::endl;
return EXIT_FAILURE;
}
for (
int i = 3;
i < argc;
i++) {
if (std::string(argv[i]) == "--co" && i + 1 < argc) {
opt_connecting_info = std::string(argv[i + 1]);
}
else if (std::string(argv[i]) == "--distance-to-tag" && i + 1 < argc) {
opt_distance_to_tag = std::atof(argv[i + 1]);
if (opt_distance_to_tag <= 0) {
std::cout << "Error : invalid distance to tag." << std::endl << "See " << argv[0] << " --help" << std::endl;
return EXIT_FAILURE;
}
opt_has_distance_to_tag = true;
}
else if (std::string(argv[i]) == "--display-fps" && i + 1 < argc) {
opt_display_fps = std::stoi(std::string(argv[i + 1]));
}
else if (std::string(argv[i]) == "--verbose" || std::string(argv[i]) == "-v") {
opt_verbose = true;
}
else {
std::cout <<
"Error : unknown parameter " << argv[
i] << std::endl
<< "See " << argv[0] << " --help" << std::endl;
return EXIT_FAILURE;
}
}
}
else if (argc >= 2 && (std::string(argv[1]) == "--help" || std::string(argv[1]) == "-h")) {
std::cout << "\nUsage:\n"
<< " " << argv[0]
<< " [--tag-size <tag size [m]>] [--co <connection information>] [--distance-to-tag <distance>]"
<< " [--display-fps <display fps>] [--verbose] [-v] [--help] [-h]\n"
<< std::endl
<< "Description:\n"
<< " --tag-size <size>\n"
<< " The size of the tag to detect in meters, required.\n\n"
<< " --co <connection information>\n"
<< " - UDP: udp://[host][:port]\n"
<< " - TCP: tcp://[host][:port]\n"
<< " - serial: serial://[path][:baudrate]\n"
<< " - Default: udp://192.168.30.111:14552).\n\n"
<< " --distance-to-tag <distance>\n"
<< " The desired distance to the tag in meters (default: 1 meter).\n\n"
<< " --display-fps <display_fps>\n"
<< " The desired fps rate for the video display (default: 10 fps).\n\n"
<< " --verbose, -v\n"
<< " Enables verbosity (drone information messages and velocity commands\n"
<< " are then displayed).\n\n"
<< " --help, -h\n"
<< " Print help message.\n"
<< std::endl;
return EXIT_SUCCESS;
}
else {
std::cout << "Error : tag size parameter required." << std::endl << "See " << argv[0] << " --help" << std::endl;
return EXIT_FAILURE;
}
std::cout << std::endl
<< "WARNING:" << std::endl
<< " - This program does no sensing or avoiding of obstacles, " << std::endl
<< " the drone WILL collide with any objects in the way! Make sure the " << std::endl
<< " drone has approximately 3 meters of free space on all sides." << std::endl
<< " - The drone uses a forward-facing camera for Apriltag detection," << std::endl
<< " make sure the drone flies above a non-uniform flooring," << std::endl
<< " or its movement will be inacurate and dangerous !" << std::endl
<< std::endl;
if (drone.isRunning()) {
if (opt_verbose) {
std::cout << "Product line: " << product_line << std::endl;
}
if (product_line == "T200") {
std::cout << "This example doesn't support T200 product line family !" << std::endl;
return EXIT_SUCCESS;
}
config.enable_stream(RS2_STREAM_COLOR, 640, 480, RS2_FORMAT_RGBA8, acq_fps);
config.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, acq_fps);
config.enable_stream(RS2_STREAM_INFRARED, 640, 480, RS2_FORMAT_Y8, acq_fps);
if (opt_verbose) {
}
#ifdef CONTROL_UAV
drone.doFlatTrim();
drone.takeOff();
#endif
int orig_displayX = 100;
int orig_displayY = 100;
#if (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
#else
#endif
vpPlot plotter(1, 700, 700, orig_displayX +
static_cast<int>(I.getWidth()) + 20, orig_displayY,
"Visual servoing tasks");
detector.setAprilTagQuadDecimate(4.0);
detector.setAprilTagNbThreads(4);
detector.setDisplayTag(true);
eJe[0][0] = 1;
eJe[1][1] = 1;
eJe[2][2] = 1;
eJe[5][3] = 1;
double Z_d = (opt_has_distance_to_tag ? opt_distance_to_tag : 1.);
double X[4] = { tagSize / 2., tagSize / 2., -tagSize / 2., -tagSize / 2. };
double Y[4] = { tagSize / 2., -tagSize / 2., -tagSize / 2., tagSize / 2. };
std::vector<vpPoint> vec_P, vec_P_d;
for (
int i = 0;
i < 4;
i++) {
P_d.track(cdMo);
vec_P_d.push_back(P_d);
}
m_obj_d.fromVector(vec_P_d);
man_d.linkTo(mdb_d);
double area = 0;
area = mb_d.
get(2, 0) + mb_d.
get(0, 2);
else
man_d.setDesiredArea(area);
man_d.compute();
double A = 0.0;
double B = 0.0;
double C = 1.0 / Z_d;
task.addFeature(s_mgn, s_mgn_d);
task.addFeature(s_man, s_man_d);
plotter.setLegend(0, 3,
"atan(1/rho)");
s_mgn_d.update(A, B, C);
s_mgn_d.compute_interaction();
s_man_d.update(A, B, C);
s_man_d.compute_interaction();
bool condition;
bool runLoop = true;
bool vec_ip_has_been_sorted = false;
bool send_velocities = false;
std::vector<std::pair<size_t, vpImagePoint> > vec_ip_sorted;
while (drone.isRunning() && runLoop) {
condition = (startTime - time_since_last_display) > 1000. / opt_display_fps ? true : false;
if (condition) {
}
std::vector<vpHomogeneousMatrix> cMo_vec;
detector.detect(I, tagSize, cam, cMo_vec);
if (condition) {
std::stringstream ss;
ss <<
"Detection time: " <<
t <<
" ms";
}
if (detector.getNbObjects() != 0) {
std::vector<vpImagePoint> vec_ip = detector.getPolygon(0);
vec_P.clear();
for (
size_t i = 0;
i < vec_ip.size();
i++) {
vec_P.push_back(P);
}
m_obj.fromVector(vec_P);
man.linkTo(mdb);
man.setDesiredArea(area);
man.compute();
s_mgn.update(A, B, C);
s_mgn.compute_interaction();
s_man.update(A, B, C);
s_man.compute_interaction();
if (!vec_ip_has_been_sorted) {
for (
size_t i = 0;
i < vec_ip.size();
i++) {
std::pair<size_t, vpImagePoint> index_pair = std::pair<size_t, vpImagePoint>(i, vec_ip[i]);
vec_ip_sorted.push_back(index_pair);
}
std::sort(vec_ip_sorted.begin(), vec_ip_sorted.end(), compareImagePoint);
vec_ip_has_been_sorted = true;
}
vec_ip[vec_ip_sorted[2].first], vec_ip[vec_ip_sorted[3].first],
if (!send_velocities) {
ve = 0;
}
if (opt_verbose) {
std::cout <<
"ve: " << ve.
t() << std::endl;
}
#ifdef CONTROL_UAV
drone.setVelocity(ve);
#endif
if (condition) {
for (
size_t i = 0;
i < 4;
i++) {
std::stringstream ss;
}
3);
3);
3);
3);
false);
false);
}
}
else {
std::stringstream sserr;
sserr << "Failed to detect an Apriltag, or detected multiple ones";
if (condition) {
}
else {
std::cout << sserr.str() << std::endl;
}
#ifdef CONTROL_UAV
drone.stopMoving();
#endif
}
if (condition) {
{
std::stringstream ss;
ss << "Left click to " << (send_velocities ? "stop the robot" : "servo the robot")
<< ", right click to quit.";
}
}
switch (button) {
send_velocities = !send_velocities;
break;
drone.land();
runLoop = false;
break;
default:
break;
}
}
std::stringstream sstime;
sstime << "Total time: " << totalTime << " ms";
if (condition) {
}
}
#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
if (display != nullptr) {
delete display;
}
#endif
return EXIT_SUCCESS;
}
else {
std::cout << "ERROR : failed to setup drone control." << std::endl;
#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
if (display != nullptr) {
delete display;
}
#endif
return EXIT_FAILURE;
}
}
std::cout <<
"Caught an exception: " <<
e << std::endl;
#if (VISP_CXX_STANDARD < VISP_CXX_STANDARD_11)
if (display != nullptr) {
delete display;
}
#endif
return EXIT_FAILURE;
}
}
#else
int main()
{
#ifndef VISP_HAVE_MAVSDK
std::cout << "\nThis example requires mavsdk library. You should install it, configure and rebuild ViSP.\n" << std::endl;
#endif
#ifndef VISP_HAVE_REALSENSE2
std::cout << "\nThis example requires librealsense2 library. You should install it, configure and rebuild ViSP.\n" << std::endl;
#endif
#if !defined(VISP_HAVE_PUGIXML)
std::cout << "\nThis example requires pugixml built-in 3rdparty." << std::endl;
#endif
#if !((__cplusplus >= 201703L) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L)))
std::cout
<< "\nThis example requires at least cxx17. You should enable cxx17 during ViSP configuration with cmake and "
"rebuild ViSP.\n"
<< std::endl;
#endif
return EXIT_SUCCESS;
}
#endif
Adaptive gain computation.
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static const vpColor green
@ TAG_36h11
AprilTag 36h11 pattern (recommended).
Class that defines generic functionalities for display.
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void displayLine(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color, unsigned int thickness=1, bool segment=true)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static void displayPolygon(const vpImage< unsigned char > &I, const std::vector< vpImagePoint > &vip, const vpColor &color, unsigned int thickness=1, bool closed=true)
error that can be emitted by ViSP classes.
static void create(vpFeaturePoint &s, const vpCameraParameters &cam, const vpDot &d)
Functionality computation for normalized surface moment feature. Computes the interaction matrix asso...
Functionality computation for centered and normalized moment feature. Computes the interaction matrix...
static unsigned int selectAtanOneOverRho()
void setAlpha(double alpha)
Set vanishing point feature value.
void setAtanOneOverRho(double atan_one_over_rho)
Set vanishing point feature value.
Implementation of an homogeneous matrix and operations on such kind of matrices.
vpHomogeneousMatrix inverse() const
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
static double rad(double deg)
Implementation of a matrix and operations on matrices.
Class handling the normalized surface moment that is invariant in scale and used to estimate depth.
This class defines the 2D basic moment . This class is a wrapper for vpMomentObject which allows to u...
const std::vector< double > & get() const
This class defines the double-indexed centered moment descriptor .
void compute() VP_OVERRIDE
This class allows to register all vpMoments so they can access each other according to their dependen...
virtual void updateAll(vpMomentObject &object)
Class describing 2D normalized gravity center moment.
void compute() VP_OVERRIDE
Class describing 2D gravity center moment.
void compute() VP_OVERRIDE
Class for generic objects.
void linkTo(vpMomentDatabase &moments)
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
This class enables real time drawing of 2D or 3D graphics. An instance of the class open a window whi...
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
void set_x(double x)
Set the point x coordinate in the image plane.
void set_y(double y)
Set the point y coordinate in the image plane.
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
void acquire(vpImage< unsigned char > &grey, double *ts=nullptr)
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
std::string getProductLine()
Implementation of a rotation matrix and operations on such kind of matrices.
Implementation of a rotation vector as Euler angle minimal representation.
Class that consider the case of a translation vector.
std::shared_ptr< vpDisplay > createDisplay()
Return a smart pointer vpDisplay specialization if a GUI library is available or nullptr otherwise.
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()
VISP_EXPORT int wait(double t0, double t)