This renderer can output a color image, with support for textures and lighting.
It can also be used in the Model-Based Tracker to check the visibility of the object (see Advanced visibility via Ogre3D). There is also the optional OIS library that can be used to animate the rendered object using the keyboard.
An example that shows how to exploit Ogre in ViSP to render a color image with support for textures and lighting in camera space is given in AROgre.cpp.
This example reads a sequence of images, on each image calculates the pose of the camera with respect the a frame located at the center of the 4 small blobs, then using the estimated pose, renders a new image with the image of the sequence in the background, augmented by a grass plate and an animated robot.
A light must be added in the scene for the rendering. The Ogre API must be used to do so:
Additionally, the robot that we want to insert in our AR application must be added to the scene:
To update the animation, the following method must be defined and will be called at the end of each frame:
Finally, a method to manage keyboard events is written in case of the OIS library is installed:
This pose is then used by the renderer in order to display the virtual object at the correct location in the image:
#include <iostream>
#include <visp3/core/vpConfig.h>
#if defined(VISP_HAVE_OGRE) && defined(VISP_HAVE_DISPLAY)
#include <visp3/ar/vpAROgre.h>
#include <visp3/blob/vpDot2.h>
#include <visp3/core/vpDebug.h>
#include <visp3/core/vpImagePoint.h>
#include <visp3/core/vpIoTools.h>
#include <visp3/core/vpPixelMeterConversion.h>
#include <visp3/core/vpPoint.h>
#include <visp3/gui/vpDisplayFactory.h>
#include <visp3/io/vpParseArgv.h>
#include <visp3/io/vpVideoReader.h>
#include <visp3/vision/vpPose.h>
#define GETOPTARGS "ci:p:h"
#ifdef ENABLE_VISP_NAMESPACE
#endif
void usage(const char *name, const char *badparam, const std::string &ipath, const std::string &ppath)
{
#if defined(VISP_HAVE_DATASET)
#if VISP_HAVE_DATASET_VERSION >= 0x030600
std::string ext("png");
#else
std::string ext("pgm");
#endif
#else
std::string ext("png");
#endif
fprintf(stdout, "\n\
Test augmented reality using the vpAROgre class.\n\
\n\
SYNOPSIS\n\
%s [-i <test image path>] [-p <personal image path>]\n\
[-c] [-h]\n", name);
fprintf(stdout, "\n\
OPTIONS: Default\n\
-i <input image path> %s\n\
Set image input path.\n\
From this path read images \n\
\"mire-2/image.%%04d.%s\". These \n\
images come from visp-images-x.y.z.tar.gz available \n\
on the ViSP website.\n\
Setting the VISP_INPUT_IMAGE_PATH environment\n\
variable produces the same behaviour than using\n\
this option.\n\
\n\
-p <personal image path> %s\n\
Specify a personal sequence containing images \n\
to process.\n\
By image sequence, we mean one file per image.\n\
Example : \"/Temp/visp-images/cube/image.%%04d.%s\"\n\
%%04d is for the image numbering.\n\
\n\
-c\n\
Disable the mouse click. Useful to automate the \n\
execution of this program without human intervention.\n\
\n\
-h\n\
Print the help.\n",
ipath.c_str(), ext.c_str(), ppath.c_str(), ext.c_str());
if (badparam)
fprintf(stdout, "\nERROR: Bad parameter [%s]\n", badparam);
}
bool getOptions(int argc, const char **argv, std::string &ipath, std::string &ppath, bool &click_allowed)
{
const char *optarg_;
int c;
switch (c) {
case 'c':
click_allowed = false;
break;
case 'i':
ipath = optarg_;
break;
case 'p':
ppath = optarg_;
break;
case 'h':
usage(argv[0], nullptr, ipath, ppath);
return false;
default:
usage(argv[0], optarg_, ipath, ppath);
return false;
}
}
if ((c == 1) || (c == -1)) {
usage(argv[0], nullptr, ipath, ppath);
std::cerr << "ERROR: " << std::endl;
std::cerr << " Bad argument " << optarg_ << std::endl << std::endl;
return false;
}
return true;
}
#ifndef DOXYGEN_SHOULD_SKIP_THIS
{
public:
vpAROgreExample(const vpCameraParameters &cam = vpCameraParameters(), unsigned int width = 640,
unsigned int height = 480, const char *resourcePath = nullptr)
{
if (resourcePath)
mResourcePath = resourcePath;
std::cout << "mResourcePath: " << mResourcePath << std::endl;
vecDevant = Ogre::Vector3(0, -1, 0);
robot = nullptr;
mAnimationState = nullptr;
}
protected:
Ogre::Vector3 vecDevant;
Ogre::AnimationState *mAnimationState;
Ogre::Entity *robot;
{
mSceneMgr->setAmbientLight(Ogre::ColourValue(static_cast<float>(0.6), static_cast<float>(0.6), static_cast<float>(0.6)));
Ogre::Light *light = mSceneMgr->createLight();
light->setDiffuseColour(1.0, 1.0, 1.0);
light->setSpecularColour(1.0, 1.0, 1.0);
#if (VISP_HAVE_OGRE_VERSION < (1 << 16 | 10 << 8 | 0))
light->setPosition(-5, -5, 10);
#else
Ogre::SceneNode *spotLightNode = mSceneMgr->getRootSceneNode()->createChildSceneNode();
spotLightNode->attachObject(light);
spotLightNode->setPosition(Ogre::Vector3(-5, -5, 10));
#endif
light->setType(Ogre::Light::LT_POINT);
light->setAttenuation((Ogre::Real)100, (Ogre::Real)1.0, (Ogre::Real)0.045, (Ogre::Real)0.0075);
light->setCastShadows(true);
robot = mSceneMgr->createEntity("Robot", "robot.mesh");
Ogre::SceneNode *RobotNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Robot");
RobotNode->attachObject(robot);
RobotNode->scale((Ogre::Real)0.001, (Ogre::Real)0.001, (Ogre::Real)0.001);
RobotNode->pitch(Ogre::Degree(90));
RobotNode->yaw(Ogre::Degree(-90));
robot->setCastShadows(true);
mSceneMgr->setShadowTechnique(Ogre::SHADOWTYPE_STENCIL_MODULATIVE);
mAnimationState = robot->getAnimationState("Idle");
mAnimationState->setLoop(true);
mAnimationState->setEnabled(true);
Ogre::Plane plan;
plan.d = 0;
plan.normal = Ogre::Vector3::UNIT_Z;
Ogre::MeshManager::getSingleton().createPlane("sol", Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME, plan,
(Ogre::Real)0.22, (Ogre::Real)0.16, 10, 10, true, 1, 1, 1);
Ogre::Entity *ent = mSceneMgr->createEntity("Entitesol", "sol");
Ogre::SceneNode *PlaneNode = mSceneMgr->getRootSceneNode()->createChildSceneNode("Entitesol");
PlaneNode->attachObject(ent);
ent->setMaterialName("Examples/GrassFloor");
}
{
mAnimationState->addTime(evt.timeSinceLastFrame);
return true;
}
#ifdef VISP_HAVE_OIS
{
mKeyboard->capture();
Ogre::Matrix3 rotmy;
double angle = -M_PI / 8;
if (mKeyboard->isKeyDown(OIS::KC_ESCAPE))
return false;
bool event = false;
if (mKeyboard->isKeyDown(OIS::KC_Z) || mKeyboard->isKeyDown(OIS::KC_UP)) {
mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition() +
(Ogre::Real)0.003 * vecDevant);
event = true;
}
if (mKeyboard->isKeyDown(OIS::KC_S) || mKeyboard->isKeyDown(OIS::KC_DOWN)) {
mSceneMgr->getSceneNode("Robot")->setPosition(mSceneMgr->getSceneNode("Robot")->getPosition() -
(Ogre::Real)0.003 * vecDevant);
event = true;
}
if (mKeyboard->isKeyDown(OIS::KC_Q) || mKeyboard->isKeyDown(OIS::KC_LEFT)) {
rotmy = Ogre::Matrix3((Ogre::Real)cos(-angle), (Ogre::Real)sin(-angle), 0, (Ogre::Real)(-sin(-angle)),
(Ogre::Real)cos(-angle), 0, 0, 0, 1);
vecDevant = vecDevant * rotmy;
mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)(-angle)));
event = true;
}
if (mKeyboard->isKeyDown(OIS::KC_D) || mKeyboard->isKeyDown(OIS::KC_RIGHT)) {
rotmy = Ogre::Matrix3((Ogre::Real)cos(angle), (Ogre::Real)sin(angle), 0, (Ogre::Real)(-sin(angle)),
(Ogre::Real)cos(angle), 0, 0, 0, 1);
vecDevant = vecDevant * rotmy;
mSceneMgr->getSceneNode("Robot")->yaw(Ogre::Radian((Ogre::Real)angle));
event = true;
}
if (event) {
mAnimationState = robot->getAnimationState("Walk");
}
else
mAnimationState = robot->getAnimationState("Idle");
mAnimationState->setLoop(true);
mAnimationState->setEnabled(true);
return true;
}
#endif
};
{
bool opt_display = true;
if (opt_display) {
#if defined(VISP_HAVE_DISPLAY)
#else
opt_display = false;
#endif
}
for (
unsigned int i = 0;
i < 4; ++
i) {
if (opt_display) {
}
else {
}
}
if (opt_display) {
display->init(I, 100, 100, "Preliminary Pose Calculation");
}
std::cout << "**"<< std::endl;
std::cout << "** Preliminary Pose Calculation" << std::endl;
std::cout << "** Click on the 4 dots" << std::endl;
std::cout << "** Dot1: (-x,-y,0), Dot2: (x,-y,0), Dot3: (x,y,0), Dot4: (-x,y,0)" << std::endl;
std::cout << "**" << std::endl;
if (!opt_click_allowed) {
}
for (
unsigned int i = 0;
i < 4; ++
i) {
for (
unsigned int j = 0;
j <
i;
j++)
md[j].display(I);
try {
if (opt_click_allowed)
else
}
catch (...) {
}
if (opt_display) {
}
}
if (opt_display) {
for (
unsigned int i = 0;
i < 4; ++
i) {
}
}
double l = 0.06;
double L = 0.07;
for (
unsigned int i = 0;
i < 4; ++
i) {
}
for (
unsigned int i = 0;
i < 4; ++
i) {
}
if (opt_display) {
}
}
#endif
int main(int argc, const char **argv)
{
#if defined(VISP_HAVE_DATASET)
#if VISP_HAVE_DATASET_VERSION >= 0x030600
std::string ext("png");
#else
std::string ext("pgm");
#endif
#else
std::string ext("png");
#endif
try {
std::string env_ipath;
std::string opt_ipath;
std::string ipath;
std::string opt_ppath;
std::string dirname;
bool opt_click_allowed = true;
if (!env_ipath.empty())
ipath = env_ipath;
if (getOptions(argc, argv, opt_ipath, opt_ppath, opt_click_allowed) == false) {
return EXIT_FAILURE;
}
if (!opt_ipath.empty())
ipath = opt_ipath;
if (!opt_ipath.empty() && !env_ipath.empty() && opt_ppath.empty()) {
if (ipath != env_ipath) {
std::cout << std::endl << "WARNING: " << std::endl;
std::cout << " Since -i <visp image path=" << ipath << "> "
<< " is different from VISP_IMAGE_PATH=" << env_ipath << std::endl
<< " we skip the environment variable." << std::endl;
}
}
if (opt_ipath.empty() && env_ipath.empty() && opt_ppath.empty()) {
usage(argv[0], nullptr, ipath, opt_ppath);
std::cerr << std::endl << "ERROR:" << std::endl;
std::cerr << " Use -i <visp image path> option or set VISP_INPUT_IMAGE_PATH " << std::endl
<< " environment variable to specify the location of the " << std::endl
<< " image path where test images are located." << std::endl
<< " Use -p <personal image path> option if you want to " << std::endl
<< " use personal images." << std::endl
<< std::endl;
return EXIT_FAILURE;
}
if (opt_ppath.empty()) {
s.setf(std::ios::right, std::ios::adjustfield);
}
else {
}
try {
std::cout <<
"Load: " <<
filename << std::endl;
computeInitialPose(&mcamTmp, Idisplay, &mPose, md, mcog, &cMo, mP, opt_click_allowed);
}
catch (...) {
std::cerr << std::endl << "ERROR:" << std::endl;
std::cerr <<
" Cannot read " <<
filename << std::endl;
std::cerr << " Check your -i " << ipath << " option " << std::endl
<< " or VISP_INPUT_IMAGE_PATH environment variable." << std::endl;
return EXIT_FAILURE;
}
vpAROgreExample ogre(mcam,
static_cast<unsigned int>(grabber.
getWidth()),
static_cast<unsigned int>(grabber.
getHeight()));
bool bufferedKeys = false, hidden = false;
ogre.init(IC, bufferedKeys, hidden);
bool quit = false;
while (ogre.continueRendering() && !grabber.
end() && !quit) {
for (
int i = 0;
i < 4; ++
i) {
{
}
}
ogre.display(IC, cMo);
std::cout <<
"\r> " << 1000 / (
t1 - t0) <<
" fps";
}
return EXIT_SUCCESS;
}
std::cout <<
"Catch a ViSP exception: " <<
e << std::endl;
return EXIT_FAILURE;
}
catch (Ogre::Exception &e) {
std::cout <<
"Catch an Ogre exception: " <<
e.getDescription() << std::endl;
return EXIT_FAILURE;
}
catch (...) {
std::cout << "Catch an exception " << std::endl;
return EXIT_FAILURE;
}
}
#else
int main()
{
#if (!(defined(VISP_HAVE_X11) || defined(VISP_HAVE_GTK) || defined(VISP_HAVE_GDI)))
std::cout << "You do not have X11, or GTK, or GDI (Graphical Device Interface) functionalities to display images..."
<< std::endl;
std::cout << "Tip if you are on a unix-like system:" << std::endl;
std::cout << "- Install X11, configure again ViSP using cmake and build again this example" << std::endl;
std::cout << "Tip if you are on a windows-like system:" << std::endl;
std::cout << "- Install GDI, configure again ViSP using cmake and build again this example" << std::endl;
#else
std::cout << "You do not have Ogre functionalities" << std::endl;
std::cout << "Tip:" << std::endl;
std::cout << "- Install Ogre3D, configure again ViSP using cmake and build again this example" << std::endl;
#endif
return EXIT_SUCCESS;
}
#endif
Implementation of an augmented reality viewer using Ogre3D 3rd party.
virtual bool customframeEnded(const Ogre::FrameEvent &evt)
virtual bool processInputEvent(const Ogre::FrameEvent &)
virtual void createScene(void)
Generic class defining intrinsic camera parameters.
void init()
Basic initialization with the default parameters.
Class that defines generic functionalities for display.
static void display(const vpImage< unsigned char > &I)
static void displayCross(const vpImage< unsigned char > &I, const vpImagePoint &ip, unsigned int size, const vpColor &color, unsigned int thickness=1)
static void flush(const vpImage< unsigned char > &I)
This tracker is meant to track a blob (connex pixels with same gray level) on a vpImage.
void track(const vpImage< unsigned char > &I, bool canMakeTheWindowGrow=true)
void setGraphics(bool activate)
void display(const vpImage< unsigned char > &I, vpColor color=vpColor::red, unsigned int thickness=1) const
void setSizePrecision(const double &sizePrecision)
void setGrayLevelPrecision(const double &grayLevelPrecision)
vpImagePoint getCog() const
void initTracking(const vpImage< unsigned char > &I, unsigned int size=0)
error that can be emitted by ViSP classes.
unsigned int getWidth() const
Return the number of columns in the image.
unsigned int getHeight() const
Return the number of rows in the image.
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void convert(const vpImage< unsigned char > &src, vpImage< vpRGBa > &dest)
Class that defines a 2D point in an image. This class is useful for image processing and stores only ...
Definition of the vpImage class member functions.
static bool parse(int *argcPtr, const char **argv, vpArgvInfo *argTable, int flags)
static void convertPoint(const vpCameraParameters &cam, const double &u, const double &v, double &x, double &y)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
void set_x(double x)
Set the point x coordinate in the image plane.
void setWorldCoordinates(double oX, double oY, double oZ)
void set_y(double y)
Set the point y coordinate in the image plane.
Class used for pose computation from N points (pose from point only). Some of the algorithms implemen...
void addPoint(const vpPoint &P)
@ DEMENTHON_LAGRANGE_VIRTUAL_VS
bool computePose(vpPoseMethodType method, vpHomogeneousMatrix &cMo, FuncCheckValidityPose func=nullptr)
static void display(vpImage< unsigned char > &I, vpHomogeneousMatrix &cMo, vpCameraParameters &cam, double size, vpColor col=vpColor::none)
Class that enables to manipulate easily a video file or a sequence of images. As it inherits from the...
void open(vpImage< vpRGBa > &I) VP_OVERRIDE
void setFileName(const std::string &filename)
void setFirstFrameIndex(const long first_frame)
void acquire(vpImage< vpRGBa > &I) VP_OVERRIDE
vpDisplay * allocateDisplay()
Return a newly allocated vpDisplay specialization if a GUI library is available or nullptr otherwise.
VISP_EXPORT double measureTimeMs()
VISP_EXPORT int wait(double t0, double t)
A window will appear to select the value of the Ogre settings, such as should we display in Full Screen mode or the Video Mode that specifies the size of the window that will appear to render the sceen.
Once settings are accepted, you should see a virtual robot moving on a grass floor. When OIS third-party is enabled, this robot can be animated using keyboard arrows.
When running the example, the texture of the robot can not be displayed properly. Changing the rendering subsystem should fix the issue.