Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
tutorial-panda3d-renderer.cpp
#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_PANDA3D) && defined(VISP_HAVE_DISPLAY) && defined(VISP_HAVE_MODULE_IO)
#include <visp3/core/vpException.h>
#include <visp3/core/vpExponentialMap.h>
#include <visp3/gui/vpDisplayX.h>
#include <visp3/gui/vpDisplayGDI.h>
#include <visp3/gui/vpDisplayD3D.h>
#include <visp3/gui/vpDisplayOpenCV.h>
#include <visp3/gui/vpDisplayGTK.h>
#include <visp3/io/vpParseArgv.h>
#include <visp3/io/vpImageIo.h>
#include <visp3/ar/vpPanda3DRGBRenderer.h>
#include <visp3/ar/vpPanda3DGeometryRenderer.h>
#include <visp3/ar/vpPanda3DRendererSet.h>
#include <visp3/ar/vpPanda3DCommonFilters.h>
#include <visp3/ar/vpPanda3DFrameworkManager.h>
#ifdef ENABLE_VISP_NAMESPACE
using namespace VISP_NAMESPACE_NAME;
#endif
void displayNormals(const vpImage<vpRGBf> &normalsImage,
vpImage<vpRGBa> &normalDisplayImage)
{
#if defined(_OPENMP)
#pragma omp parallel for
#endif
for (int i = 0; i < static_cast<int>(normalsImage.getSize()); ++i) {
normalDisplayImage.bitmap[i].R = static_cast<unsigned char>((normalsImage.bitmap[i].R + 1.0) * 127.5f);
normalDisplayImage.bitmap[i].G = static_cast<unsigned char>((normalsImage.bitmap[i].G + 1.0) * 127.5f);
normalDisplayImage.bitmap[i].B = static_cast<unsigned char>((normalsImage.bitmap[i].B + 1.0) * 127.5f);
}
vpDisplay::display(normalDisplayImage);
vpDisplay::flush(normalDisplayImage);
}
void displayDepth(const vpImage<float> &depthImage,
vpImage<unsigned char> &depthDisplayImage, float nearV, float farV)
{
#if defined(_OPENMP)
#pragma omp parallel for
#endif
for (int i = 0; i < static_cast<int>(depthImage.getSize()); ++i) {
float val = std::max(0.f, (depthImage.bitmap[i] - nearV) / (farV - nearV));
depthDisplayImage.bitmap[i] = static_cast<unsigned char>(val * 255.f);
}
vpDisplay::display(depthDisplayImage);
vpDisplay::flush(depthDisplayImage);
}
void displayLightDifference(const vpImage<vpRGBa> &colorImage, const vpImage<vpRGBa> &colorDiffuseOnly, vpImage<unsigned char> &lightDifference)
{
#if defined(_OPENMP)
#pragma omp parallel for
#endif
for (int i = 0; i < static_cast<int>(colorImage.getSize()); ++i) {
float I1 = 0.299 * colorImage.bitmap[i].R + 0.587 * colorImage.bitmap[i].G + 0.114 * colorImage.bitmap[i].B;
float I2 = 0.299 * colorDiffuseOnly.bitmap[i].R + 0.587 * colorDiffuseOnly.bitmap[i].G + 0.114 * colorDiffuseOnly.bitmap[i].B;
lightDifference.bitmap[i] = static_cast<unsigned char>(round(abs(I1 - I2)));
}
vpDisplay::display(lightDifference);
vpDisplay::flush(lightDifference);
}
void displayCanny(const vpImage<vpRGBf> &cannyRawData,
{
#if defined(_OPENMP)
#pragma omp parallel for
#endif
for (int i = 0; i < static_cast<int>(cannyRawData.getSize()); ++i) {
vpRGBf &px = cannyRawData.bitmap[i];
canny.bitmap[i] = 255 * (px.R * px.R + px.G * px.G > 0);
//canny.bitmap[i] = static_cast<unsigned char>(127.5f + 127.5f * atan(px.B));
}
for (unsigned int i = 0; i < canny.getHeight(); i += 8) {
for (unsigned int j = 0; j < canny.getWidth(); j += 8) {
bool valid = (pow(cannyRawData[i][j].R, 2.f) + pow(cannyRawData[i][j].G, 2.f)) > 0;
if (!valid) continue;
float angle = cannyRawData[i][j].B;
unsigned x = j + 10 * cos(angle);
unsigned y = i + 10 * sin(angle);
}
}
}
int main(int argc, const char **argv)
{
bool stepByStep = false;
bool debug = false;
bool showLightContrib = false;
bool showCanny = false;
char *modelPathCstr = nullptr;
char *backgroundPathCstr = nullptr;
vpParseArgv::vpArgvInfo argTable[] =
{
{"-model", vpParseArgv::ARGV_STRING, (char *) nullptr, (char *)&modelPathCstr,
"Path to the model to load."},
{"-background", vpParseArgv::ARGV_STRING, (char *) nullptr, (char *)&backgroundPathCstr,
"Path to the background image to load for the rgb renderer."},
{"-step", vpParseArgv::ARGV_CONSTANT_BOOL, (char *) nullptr, (char *)&stepByStep,
"Show frames step by step."},
{"-specular", vpParseArgv::ARGV_CONSTANT_BOOL, (char *) nullptr, (char *)&showLightContrib,
"Show frames step by step."},
{"-canny", vpParseArgv::ARGV_CONSTANT_BOOL, (char *) nullptr, (char *)&showCanny,
"Show frames step by step."},
{"-debug", vpParseArgv::ARGV_CONSTANT_BOOL, (char *) nullptr, (char *)&debug,
"Show Opengl/Panda3D debug message."},
{"-h", vpParseArgv::ARGV_HELP, (char *) nullptr, (char *) nullptr,
"Print the help."},
{(char *) nullptr, vpParseArgv::ARGV_END, (char *) nullptr, (char *) nullptr, (char *) nullptr} };
// Read the command line options
if (vpParseArgv::parse(&argc, argv, argTable,
return (false);
}
if (PStatClient::is_connected()) {
PStatClient::disconnect();
}
std::string host = ""; // Empty = default config var value
int port = -1; // -1 = default config var value
if (!PStatClient::connect(host, port)) {
std::cout << "Could not connect to PStat server." << std::endl;
}
std::string modelPath;
if (modelPathCstr) {
modelPath = modelPathCstr;
}
else {
modelPath = "data/suzanne.bam";
}
std::string backgroundPath;
if (backgroundPathCstr) {
backgroundPath = backgroundPathCstr;
}
const std::string objectName = "object";
double factor = 1.0;
vpPanda3DRenderParameters renderParams(vpCameraParameters(600 * factor, 600 * factor, 320 * factor, 240 * factor), int(480 * factor), int(640 * factor), 0.01, 10.0);
unsigned h = renderParams.getImageHeight(), w = renderParams.getImageWidth();
renderer.setRenderParameters(renderParams);
renderer.setVerticalSyncEnabled(false);
renderer.setAbortOnPandaError(true);
if (debug) {
renderer.enableDebugLog();
}
std::shared_ptr<vpPanda3DGeometryRenderer> geometryRenderer = std::make_shared<vpPanda3DGeometryRenderer>(vpPanda3DGeometryRenderer::vpRenderType::OBJECT_NORMALS);
std::shared_ptr<vpPanda3DGeometryRenderer> cameraRenderer = std::make_shared<vpPanda3DGeometryRenderer>(vpPanda3DGeometryRenderer::vpRenderType::CAMERA_NORMALS);
std::shared_ptr<vpPanda3DRGBRenderer> rgbRenderer = std::make_shared<vpPanda3DRGBRenderer>();
std::shared_ptr<vpPanda3DRGBRenderer> rgbDiffuseRenderer = std::make_shared<vpPanda3DRGBRenderer>(false);
std::shared_ptr<vpPanda3DLuminanceFilter> grayscaleFilter = std::make_shared<vpPanda3DLuminanceFilter>("toGrayscale", rgbRenderer, false);
std::shared_ptr<vpPanda3DCanny> cannyFilter = std::make_shared<vpPanda3DCanny>("canny", grayscaleFilter, true, 10.f);
renderer.addSubRenderer(geometryRenderer);
renderer.addSubRenderer(cameraRenderer);
renderer.addSubRenderer(rgbRenderer);
if (showLightContrib) {
renderer.addSubRenderer(rgbDiffuseRenderer);
}
if (showCanny) {
renderer.addSubRenderer(grayscaleFilter);
renderer.addSubRenderer(cannyFilter);
}
std::cout << "Initializing Panda3D rendering framework" << std::endl;
renderer.initFramework();
NodePath object = renderer.loadObject(objectName, modelPath);
renderer.addNodeToScene(object);
vpPanda3DAmbientLight alight("Ambient", vpRGBf(0.2f));
renderer.addLight(alight);
vpPanda3DPointLight plight("Point", vpRGBf(1.0f), vpColVector({ 0.3, -0.4, -0.2 }), vpColVector({ 0.0, 0.0, 1.0 }));
renderer.addLight(plight);
vpPanda3DDirectionalLight dlight("Directional", vpRGBf(2.0f), vpColVector({ 1.0, 1.0, 0.0 }));
renderer.addLight(dlight);
if (!backgroundPath.empty()) {
vpImage<vpRGBa> background;
vpImageIo::read(background, backgroundPath);
rgbRenderer->setBackgroundImage(background);
}
rgbRenderer->printStructure();
renderer.setCameraPose(vpHomogeneousMatrix(0.0, 0.0, -5.0, 0.0, 0.0, 0.0));
std::cout << "Creating display and data images" << std::endl;
vpImage<vpRGBf> normalsImage;
vpImage<vpRGBf> cameraNormalsImage;
vpImage<vpRGBf> cannyRawData;
vpImage<float> depthImage;
vpImage<vpRGBa> colorImage(h, w);
vpImage<vpRGBa> colorDiffuseOnly(h, w);
vpImage<unsigned char> lightDifference(h, w);
vpImage<unsigned char> cannyImage(h, w);
vpImage<vpRGBa> normalDisplayImage(h, w);
vpImage<vpRGBa> cameraNormalDisplayImage(h, w);
vpImage<unsigned char> depthDisplayImage(h, w);
#if defined(VISP_HAVE_GTK)
using DisplayCls = vpDisplayGTK;
#elif defined(VISP_HAVE_X11)
using DisplayCls = vpDisplayX;
#elif defined(HAVE_OPENCV_HIGHGUI)
using DisplayCls = vpDisplayOpenCV;
#elif defined(VISP_HAVE_GDI)
using DisplayCls = vpDisplayGDI;
#elif defined(VISP_HAVE_D3D9)
using DisplayCls = vpDisplayD3D;
#endif
unsigned int padding = 80;
DisplayCls dNormals(normalDisplayImage, 0, 0, "normals in object space");
DisplayCls dNormalsCamera(cameraNormalDisplayImage, 0, h + padding, "normals in camera space");
DisplayCls dDepth(depthDisplayImage, w + padding, 0, "depth");
DisplayCls dColor(colorImage, w + padding, h + padding, "color");
DisplayCls dImageDiff;
if (showLightContrib) {
dImageDiff.init(lightDifference, w * 2 + padding, 0, "Specular/reflectance contribution");
}
DisplayCls dCanny;
if (showCanny) {
dCanny.init(cannyImage, w * 2 + padding, h + padding, "Canny");
}
renderer.renderFrame();
bool end = false;
std::vector<double> renderTime, fetchTime, displayTime;
while (!end) {
float nearV = 0, farV = 0;
geometryRenderer->computeNearAndFarPlanesFromNode(objectName, nearV, farV, true);
renderParams.setClippingDistance(nearV, farV);
renderer.setRenderParameters(renderParams);
const double beforeRender = vpTime::measureTimeMs();
renderer.renderFrame();
const double beforeFetch = vpTime::measureTimeMs();
renderer.getRenderer<vpPanda3DGeometryRenderer>(geometryRenderer->getName())->getRender(normalsImage, depthImage);
renderer.getRenderer<vpPanda3DGeometryRenderer>(cameraRenderer->getName())->getRender(cameraNormalsImage);
renderer.getRenderer<vpPanda3DRGBRenderer>(rgbRenderer->getName())->getRender(colorImage);
if (showLightContrib) {
renderer.getRenderer<vpPanda3DRGBRenderer>(rgbDiffuseRenderer->getName())->getRender(colorDiffuseOnly);
}
if (showCanny) {
renderer.getRenderer<vpPanda3DCanny>()->getRender(cannyRawData);
}
const double beforeConvert = vpTime::measureTimeMs();
displayNormals(normalsImage, normalDisplayImage);
displayNormals(cameraNormalsImage, cameraNormalDisplayImage);
displayDepth(depthImage, depthDisplayImage, nearV, farV);
if (showLightContrib) {
displayLightDifference(colorImage, colorDiffuseOnly, lightDifference);
}
if (showCanny) {
displayCanny(cannyRawData, cannyImage);
}
vpDisplay::display(colorImage);
vpDisplay::displayText(colorImage, 15, 15, "Click to quit", vpColor::red);
if (stepByStep) {
vpDisplay::displayText(colorImage, 50, 15, "Next frame: space", vpColor::red);
}
if (vpDisplay::getClick(colorImage, false)) {
end = true;
}
vpDisplay::flush(colorImage);
const double endDisplay = vpTime::measureTimeMs();
renderTime.push_back(beforeFetch - beforeRender);
fetchTime.push_back(beforeConvert - beforeFetch);
displayTime.push_back(endDisplay - beforeConvert);
std::string s;
if (stepByStep) {
bool next = false;
while (!next) {
vpDisplay::getKeyboardEvent(colorImage, s, true);
if (s == " ") {
next = true;
}
}
}
const double afterAll = vpTime::measureTimeMs();
const double delta = (afterAll - beforeRender) / 1000.0;
const vpHomogeneousMatrix wTo = renderer.getNodePose(objectName);
const vpHomogeneousMatrix oToo = vpExponentialMap::direct(vpColVector({ 0.0, 0.0, 0.0, 0.0, vpMath::rad(20.0), 0.0 }), delta);
renderer.setNodePose(objectName, wTo * oToo);
}
if (renderTime.size() > 0) {
std::cout << "Render time: " << vpMath::getMean(renderTime) << "ms +- " << vpMath::getStdev(renderTime) << "ms" << std::endl;
std::cout << "Panda3D -> vpImage time: " << vpMath::getMean(fetchTime) << "ms +- " << vpMath::getStdev(fetchTime) << "ms" << std::endl;
std::cout << "Display time: " << vpMath::getMean(displayTime) << "ms +- " << vpMath::getStdev(displayTime) << "ms" << std::endl;
}
return 0;
}
#else
int main()
{
std::cerr << "Recompile ViSP with Panda3D as a third party to run this tutorial" << std::endl;
return EXIT_FAILURE;
}
#endif
Generic class defining intrinsic camera parameters.
Implementation of column vector and the associated operations.
static const vpColor red
Definition vpColor.h:198
static const vpColor green
Definition vpColor.h:201
Display for windows using Direct3D 3rd party. Thus to enable this class Direct3D should be installed....
Display for windows using GDI (available on any windows 32 platform).
The vpDisplayGTK allows to display image using the GTK 3rd party library. Thus to enable this class G...
The vpDisplayOpenCV allows to display image using the OpenCV library. Thus to enable this class OpenC...
Use the X11 console to display images on unix-like OS. Thus to enable this class X11 should be instal...
Definition vpDisplayX.h:135
static bool getClick(const vpImage< unsigned char > &I, bool blocking=true)
static bool getKeyboardEvent(const vpImage< unsigned char > &I, bool blocking=true)
static void display(const vpImage< unsigned char > &I)
static void flush(const vpImage< unsigned char > &I)
static void displayArrow(const vpImage< unsigned char > &I, const vpImagePoint &ip1, const vpImagePoint &ip2, const vpColor &color=vpColor::white, unsigned int w=4, unsigned int h=2, unsigned int thickness=1)
static void displayText(const vpImage< unsigned char > &I, const vpImagePoint &ip, const std::string &s, const vpColor &color)
static vpHomogeneousMatrix direct(const vpColVector &v)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void read(vpImage< unsigned char > &I, const std::string &filename, int backend=IO_DEFAULT_BACKEND)
Definition of the vpImage class member functions.
Definition vpImage.h:131
unsigned int getWidth() const
Definition vpImage.h:242
unsigned int getSize() const
Definition vpImage.h:221
Type * bitmap
points toward the bitmap
Definition vpImage.h:135
unsigned int getHeight() const
Definition vpImage.h:181
static double rad(double deg)
Definition vpMath.h:129
static double getStdev(const std::vector< double > &v, bool useBesselCorrection=false)
Definition vpMath.cpp:374
static double getMean(const std::vector< double > &v)
Definition vpMath.cpp:323
Class representing an ambient light.
void setVerticalSyncEnabled(bool useVsync)
set whether vertical sync is enabled. When vertical sync is enabled, render speed will be limited by ...
void setAbortOnPandaError(bool abort)
Set the behaviour when a Panda3D assertion fails. If abort is true, the program will stop....
NodePath loadObject(const std::string &nodeName, const std::string &modelPath)
Load a 3D object. To load an .obj file, Panda3D must be compiled with assimp support.
Implementation of canny filtering, using Sobel kernels.
Class representing a directional light.
static vpPanda3DFrameworkManager & getInstance()
Renderer that outputs object geometric information.
void getRender(vpImage< vpRGBf > &colorData, vpImage< float > &depth) const
Get render results into ViSP readable structures.
@ CAMERA_NORMALS
Surface normals in the object frame.
Class representing a Point Light.
Implementation of a traditional RGB renderer in Panda3D.
void getRender(vpImage< vpRGBa > &I) const
Store the render resulting from calling renderFrame() into a vpImage.
Rendering parameters for a panda3D simulation.
Class that renders multiple datatypes, in a single pass. A renderer set contains multiple subrenderer...
void addNodeToScene(const NodePath &object) VP_OVERRIDE
void initFramework() VP_OVERRIDE
Initialize the framework and propagate the created panda3D framework to the subrenderers.
virtual void setRenderParameters(const vpPanda3DRenderParameters &params) VP_OVERRIDE
Set new rendering parameters. If the scene has already been initialized, the renderer camera is updat...
void addSubRenderer(std::shared_ptr< vpPanda3DBaseRenderer > renderer)
Add a new subrenderer: This subrenderer should have a unique name, not present in the set.
vpHomogeneousMatrix getNodePose(const std::string &name) VP_OVERRIDE
Retrieve the pose of a scene node. The pose is in the world frame, using a ViSP convention.
std::shared_ptr< RendererType > getRenderer()
Retrieve the first subrenderer with the specified template type.
void setNodePose(const std::string &name, const vpHomogeneousMatrix &wTo) VP_OVERRIDE
Set the pose of an object for all the subrenderers. The pose is specified using the ViSP convention T...
void addLight(const vpPanda3DLight &light) VP_OVERRIDE
Light this lightable object with a new light.
void setCameraPose(const vpHomogeneousMatrix &wTc) VP_OVERRIDE
Set the pose of the camera, using the ViSP convention. This change is propagated to all subrenderers.
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
@ ARGV_NO_DEFAULTS
No default options like -help.
@ ARGV_NO_LEFTOVERS
Print an error message if an option is not in the argument list.
@ ARGV_STRING
Argument is associated to a char * string.
@ ARGV_CONSTANT_BOOL
Stand alone argument associated to a bool var that is set to true.
@ ARGV_END
End of the argument list.
@ ARGV_HELP
Argument is for help displaying.
float G
Green component.
Definition vpRGBf.h:160
float R
Red component.
Definition vpRGBf.h:159
VISP_EXPORT double measureTimeMs()