Visual Servoing Platform version 3.7.0
Loading...
Searching...
No Matches
vpOccipitalStructure.cpp
1/*
2 * ViSP, open source Visual Servoing Platform software.
3 * Copyright (C) 2005 - 2025 by Inria. All rights reserved.
4 *
5 * This software is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 * See the file LICENSE.txt at the root directory of this source
10 * distribution for additional information about the GNU GPL.
11 *
12 * For using ViSP with software that can not be combined with the GNU
13 * GPL, please contact Inria about acquiring a ViSP Professional
14 * Edition License.
15 *
16 * See https://visp.inria.fr for more information.
17 *
18 * This software was developed at:
19 * Inria Rennes - Bretagne Atlantique
20 * Campus Universitaire de Beaulieu
21 * 35042 Rennes Cedex
22 * France
23 *
24 * If you have questions regarding the use of this file, please contact
25 * Inria at visp@inria.fr
26 *
27 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
28 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
29 *
30 * Description:
31 * libStructure interface.
32 */
33
34#include <visp3/core/vpConfig.h>
35
36#if defined(VISP_HAVE_OCCIPITAL_STRUCTURE) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11) && defined(VISP_HAVE_THREADS)
37#include <cstring>
38#include <functional>
39#include <iomanip>
40#include <map>
41#include <set>
42#include <thread>
43
44#include <ST/Utilities.h>
45
46#include <visp3/core/vpImageConvert.h>
47#include <visp3/core/vpMeterPixelConversion.h>
48#include <visp3/core/vpPoint.h>
49#include <visp3/sensor/vpOccipitalStructure.h>
50
51#define MANUAL_POINTCLOUD 1
52
58
64
71void vpOccipitalStructure::acquire(vpImage<unsigned char> &gray, bool undistorted, double *ts)
72{
73 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
74 m_delegate.cv_sampleLock.wait(u);
75
76 if (m_delegate.m_visibleFrame.isValid()) {
77 if (!undistorted)
78 memcpy(gray.bitmap, m_delegate.m_visibleFrame.yData(), m_delegate.m_visibleFrame.ySize());
79 else
80 memcpy(gray.bitmap, m_delegate.m_visibleFrame.undistorted().yData(), m_delegate.m_visibleFrame.ySize());
81
82 if (ts != nullptr)
83 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
84 }
85
86 u.unlock();
87}
88
95void vpOccipitalStructure::acquire(vpImage<vpRGBa> &rgb, bool undistorted, double *ts)
96{
97 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
98 m_delegate.cv_sampleLock.wait(u);
99
100 if (m_delegate.m_visibleFrame.isValid()) {
101 // Detecting if it's a Color Structure Core device.
102 if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
103 vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
104 reinterpret_cast<unsigned char *>(rgb.bitmap),
105 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
106
107 else // If it's a monochrome Structure Core device.
108 {
109 if (!undistorted)
110 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
111 reinterpret_cast<unsigned char *>(rgb.bitmap),
112 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
113 else
114 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
115 reinterpret_cast<unsigned char *>(rgb.bitmap),
116 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
117 }
118
119 if (ts != nullptr)
120 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
121 }
122
123 u.unlock();
124}
125
136 vpColVector *gyroscope_data, bool undistorted, double *ts)
137{
138 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
139 m_delegate.cv_sampleLock.wait(u);
140
141 if (rgb != nullptr && m_delegate.m_visibleFrame.isValid()) {
142 // Detecting if it's a Color Structure Core device.
143 if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
144 vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
145 reinterpret_cast<unsigned char *>(rgb->bitmap),
146 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
147
148 else // If it's a monochrome Structure Core device.
149 {
150 if (!undistorted)
151 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
152 reinterpret_cast<unsigned char *>(rgb->bitmap),
153 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
154 else
155 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
156 reinterpret_cast<unsigned char *>(rgb->bitmap),
157 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
158 }
159
160 if (ts != nullptr)
161 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
162 }
163
164 if (depth != nullptr && m_delegate.m_depthFrame.isValid())
165 memcpy((unsigned char *)depth->bitmap, m_delegate.m_depthFrame.convertDepthToRgba(),
166 m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * 4);
167
168 if (acceleration_data != nullptr) {
169 ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
170 acceleration_data[0] = accel.x;
171 acceleration_data[1] = accel.y;
172 acceleration_data[2] = accel.z;
173 }
174
175 if (gyroscope_data != nullptr) {
176 ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
177 gyroscope_data[0] = gyro_data.x;
178 gyroscope_data[1] = gyro_data.y;
179 gyroscope_data[2] = gyro_data.z;
180 }
181
182 if (ts != nullptr) // By default, frames are synchronized.
183 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
184
185 u.unlock();
186}
187
198 vpColVector *gyroscope_data, bool undistorted, double *ts)
199{
200 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
201 m_delegate.cv_sampleLock.wait(u);
202
203 if (gray != nullptr && m_delegate.m_visibleFrame.isValid()) {
204 if (!undistorted)
205 memcpy(gray->bitmap, m_delegate.m_visibleFrame.yData(), m_delegate.m_visibleFrame.ySize());
206 else
207 memcpy(gray->bitmap, m_delegate.m_visibleFrame.undistorted().yData(), m_delegate.m_visibleFrame.ySize());
208
209 if (ts != nullptr)
210 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
211 }
212
213 if (depth != nullptr && m_delegate.m_depthFrame.isValid())
214 memcpy((unsigned char *)depth->bitmap, m_delegate.m_depthFrame.convertDepthToRgba(),
215 m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * 4);
216
217 if (acceleration_data != nullptr) {
218 ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
219 acceleration_data[0] = accel.x;
220 acceleration_data[1] = accel.y;
221 acceleration_data[2] = accel.z;
222 }
223
224 if (gyroscope_data != nullptr) {
225 ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
226 gyroscope_data[0] = gyro_data.x;
227 gyroscope_data[1] = gyro_data.y;
228 gyroscope_data[2] = gyro_data.z;
229 }
230
231 if (ts != nullptr) // By default, frames are synchronized.
232 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
233
234 u.unlock();
235}
236
248void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
249 std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
250 vpColVector *acceleration_data, vpColVector *gyroscope_data, bool undistorted,
251 double *ts)
252{
253 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
254 m_delegate.cv_sampleLock.wait(u);
255
256 if (m_delegate.m_depthFrame.isValid() && data_depth != nullptr)
257 memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
258 m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
259
260 if (m_delegate.m_visibleFrame.isValid() && data_image != nullptr) {
261 if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
262 vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
263 reinterpret_cast<unsigned char *>(data_image),
264 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
265
266 else // If it's a monochrome Structure Core device.
267 {
268 if (!undistorted)
269 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
270 reinterpret_cast<unsigned char *>(data_image),
271 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
272 else
273 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
274 reinterpret_cast<unsigned char *>(data_image),
275 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
276 }
277 }
278
279 if (m_delegate.m_infraredFrame.isValid() && data_infrared != nullptr)
280 memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
281 m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
282
283 if (data_pointCloud != nullptr)
284 getPointcloud(*data_pointCloud);
285
286 if (acceleration_data != nullptr) {
287 ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
288 acceleration_data[0] = accel.x;
289 acceleration_data[1] = accel.y;
290 acceleration_data[2] = accel.z;
291 }
292
293 if (gyroscope_data != nullptr) {
294 ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
295 gyroscope_data[0] = gyro_data.x;
296 gyroscope_data[1] = gyro_data.y;
297 gyroscope_data[2] = gyro_data.z;
298 }
299
300 if (ts != nullptr) // By default, frames are synchronized.
301 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
302
303 u.unlock();
304}
305
306#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
319void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
320 std::vector<vpColVector> *const data_pointCloud,
321 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
322 vpColVector *acceleration_data, vpColVector *gyroscope_data, bool undistorted,
323 double *ts)
324{
325 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
326 m_delegate.cv_sampleLock.wait(u);
327
328 if (m_delegate.m_visibleFrame.isValid()) {
329 if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
330 vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
331 reinterpret_cast<unsigned char *>(data_image),
332 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
333
334 else // If it's a monochrome Structure Core device.
335 {
336 if (!undistorted)
337 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
338 reinterpret_cast<unsigned char *>(data_image),
339 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
340 else
341 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
342 reinterpret_cast<unsigned char *>(data_image),
343 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
344 }
345 }
346
347 if (m_delegate.m_depthFrame.isValid() && data_depth != nullptr)
348 memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
349 m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
350
351 if (m_delegate.m_infraredFrame.isValid() && data_infrared != nullptr)
352 memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
353 m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
354
355 if (data_pointCloud != nullptr)
356 getPointcloud(*data_pointCloud);
357
358 if (m_delegate.m_depthFrame.isValid() && pointcloud != nullptr)
359 getPointcloud(pointcloud);
360
361 if (acceleration_data != nullptr) {
362 ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
363 acceleration_data[0] = accel.x;
364 acceleration_data[1] = accel.y;
365 acceleration_data[2] = accel.z;
366 }
367
368 if (gyroscope_data != nullptr) {
369 ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
370 gyroscope_data[0] = gyro_data.x;
371 gyroscope_data[1] = gyro_data.y;
372 gyroscope_data[2] = gyro_data.z;
373 }
374
375 if (ts != nullptr) // By default, frames are synchronized.
376 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
377
378 u.unlock();
379}
380
393void vpOccipitalStructure::acquire(unsigned char *const data_image, unsigned char *const data_depth,
394 std::vector<vpColVector> *const data_pointCloud,
395 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud,
396 unsigned char *const data_infrared, vpColVector *acceleration_data,
397 vpColVector *gyroscope_data, bool undistorted, double *ts)
398{
399 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
400 m_delegate.cv_sampleLock.wait(u);
401
402 if (m_delegate.m_depthFrame.isValid() && data_depth != nullptr)
403 memcpy(data_depth, m_delegate.m_depthFrame.depthInMillimeters(),
404 m_delegate.m_depthFrame.width() * m_delegate.m_depthFrame.height() * sizeof(float));
405
406 if (m_delegate.m_visibleFrame.isValid() && data_image != nullptr) {
407 if (m_delegate.m_cameraType == ST::StructureCoreCameraType::Color)
408 vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.rgbData()),
409 reinterpret_cast<unsigned char *>(data_image),
410 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
411
412 else // If it's a monochrome Structure Core device.
413 {
414 if (!undistorted)
415 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.yData()),
416 reinterpret_cast<unsigned char *>(data_image),
417 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
418 else
419 vpImageConvert::GreyToRGBa(const_cast<unsigned char *>(m_delegate.m_visibleFrame.undistorted().yData()),
420 reinterpret_cast<unsigned char *>(data_image),
421 m_delegate.m_visibleFrame.width() * m_delegate.m_visibleFrame.height());
422 }
423 }
424
425 if (m_delegate.m_infraredFrame.isValid() && data_infrared != nullptr)
426 memcpy(data_infrared, m_delegate.m_infraredFrame.data(),
427 m_delegate.m_infraredFrame.width() * m_delegate.m_infraredFrame.height() * sizeof(uint16_t));
428
429 if (m_delegate.m_depthFrame.isValid() && data_pointCloud != nullptr)
430 getPointcloud(*data_pointCloud);
431
432 if (m_delegate.m_depthFrame.isValid() && m_delegate.m_visibleFrame.isValid() && pointcloud != nullptr)
433 getColoredPointcloud(pointcloud);
434
435 if (acceleration_data != nullptr) {
436 ST::Acceleration accel = m_delegate.m_accelerometerEvent.acceleration();
437 acceleration_data[0] = accel.x;
438 acceleration_data[1] = accel.y;
439 acceleration_data[2] = accel.z;
440 }
441
442 if (gyroscope_data != nullptr) {
443 ST::RotationRate gyro_data = m_delegate.m_gyroscopeEvent.rotationRate();
444 gyroscope_data[0] = gyro_data.x;
445 gyroscope_data[1] = gyro_data.y;
446 gyroscope_data[2] = gyro_data.z;
447 }
448
449 if (ts != nullptr) // By default, frames are synchronized.
450 *ts = m_delegate.m_visibleFrame.arrivalTimestamp();
451
452 u.unlock();
453}
454
455#endif
456
479{
480 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
481 m_delegate.cv_sampleLock.wait(u);
482
483 if (imu_vel != nullptr) {
484 imu_vel->resize(3, false);
485 ST::RotationRate imu_rotationRate = m_delegate.m_gyroscopeEvent.rotationRate();
486 (*imu_vel)[0] = imu_rotationRate.x;
487 (*imu_vel)[1] = imu_rotationRate.y;
488 (*imu_vel)[2] = imu_rotationRate.z;
489 }
490
491 if (ts != nullptr)
492 *ts = m_delegate.m_gyroscopeEvent.arrivalTimestamp();
493
494 u.unlock();
495}
496
519{
520 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
521 m_delegate.cv_sampleLock.wait(u);
522
523 if (imu_acc != nullptr) {
524 imu_acc->resize(3, false);
525 ST::Acceleration imu_acceleration = m_delegate.m_accelerometerEvent.acceleration();
526 (*imu_acc)[0] = imu_acceleration.x;
527 (*imu_acc)[1] = imu_acceleration.y;
528 (*imu_acc)[2] = imu_acceleration.z;
529 }
530
531 if (ts != nullptr)
532 *ts = m_delegate.m_accelerometerEvent.arrivalTimestamp();
533
534 u.unlock();
535}
536
560void vpOccipitalStructure::getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
561{
562 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
563 m_delegate.cv_sampleLock.wait(u);
564 double imu_vel_timestamp = 0., imu_acc_timestamp = 0.;
565
566 if (imu_vel != nullptr) {
567 imu_vel->resize(3, false);
568 ST::RotationRate imu_rotationRate = m_delegate.m_gyroscopeEvent.rotationRate();
569 (*imu_vel)[0] = imu_rotationRate.x;
570 (*imu_vel)[1] = imu_rotationRate.y;
571 (*imu_vel)[2] = imu_rotationRate.z;
572 imu_vel_timestamp =
573 m_delegate.m_gyroscopeEvent.arrivalTimestamp(); // Relative to an unspecified epoch. (see documentation).
574 }
575
576 if (imu_acc != nullptr) {
577 imu_acc->resize(3, false);
578 ST::Acceleration imu_acceleration = m_delegate.m_accelerometerEvent.acceleration();
579 (*imu_acc)[0] = imu_acceleration.x;
580 (*imu_acc)[1] = imu_acceleration.y;
581 (*imu_acc)[2] = imu_acceleration.z;
582 imu_acc_timestamp = m_delegate.m_accelerometerEvent.arrivalTimestamp();
583 }
584
585 if (ts != nullptr) {
586 *ts = std::max<double>(imu_vel_timestamp, imu_acc_timestamp);
587 }
588
589 u.unlock();
590}
591
596bool vpOccipitalStructure::open(const ST::CaptureSessionSettings &settings)
597{
598 if (m_init) {
599 close();
600 }
601
602 m_captureSession.setDelegate(&m_delegate);
603
604 if (!m_captureSession.startMonitoring(settings)) {
605 std::cout << "Failed to initialize capture session!" << std::endl;
606 return false;
607 }
608
609 // This will lock the open() function until the CaptureSession
610 // recieves the first frame or a timeout.
611 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
612 std::cv_status var =
613 m_delegate.cv_sampleLock.wait_for(u, std::chrono::seconds(20)); // Make sure a device is connected.
614
615// In case the device is not connected, open() should return false.
616 if (var == std::cv_status::timeout) {
617 m_init = false;
618 return m_init;
619 }
620
621 m_init = true;
622 m_captureSessionSettings = settings;
623 return m_init;
624}
625
637{
638 if (m_init) {
639 m_captureSession.stopStreaming();
640 m_init = false;
641 }
642}
643
649{
650 switch (stream_type) {
652 if (m_delegate.m_visibleFrame.isValid())
653 return m_delegate.m_visibleFrame.width();
654 break;
655
657 if (m_delegate.m_depthFrame.isValid())
658 return m_delegate.m_depthFrame.width();
659 break;
660
662 if (m_delegate.m_infraredFrame.isValid())
663 return m_delegate.m_infraredFrame.width();
664 break;
665
666 default:
667 break;
668 }
669
670 return 0;
671}
672
678{
679 switch (stream_type) {
681 if (m_delegate.m_visibleFrame.isValid())
682 return m_delegate.m_visibleFrame.height();
683 break;
684
686 if (m_delegate.m_depthFrame.isValid())
687 return m_delegate.m_depthFrame.height();
688 break;
689
691 if (m_delegate.m_infraredFrame.isValid())
692 return m_delegate.m_infraredFrame.height();
693 break;
694
695 default:
696 break;
697 }
698
699 return 0;
700}
701
708{
709 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
710 m_delegate.cv_sampleLock.wait(u);
711
712 if (m_delegate.m_depthFrame.isValid())
713 return m_delegate.m_depthFrame(x, y);
714
715 else
716 return 0.;
717
718 u.unlock();
719}
720
726// Return vpColVector
728{
729 std::unique_lock<std::mutex> u(m_delegate.m_sampleLock);
730 m_delegate.cv_sampleLock.wait(u);
731
732 if (m_delegate.m_depthFrame.isValid()) {
733 ST::Vector3f point_3D = m_delegate.m_depthFrame.unprojectPoint(row, col);
734 return vpPoint(point_3D.x, point_3D.y, point_3D.z);
735 }
736
737 else // Should be returning invalid vpPoint.
738 return vpPoint(0., 0., 0.);
739
740 u.unlock();
741}
742
751{
752 vpHomogeneousMatrix result;
753
754 switch (from) {
757 ST::Matrix4 v_M_d = m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame();
758
759 result[0][0] = v_M_d.m00;
760 result[0][1] = v_M_d.m10;
761 result[0][2] = v_M_d.m20;
762 result[0][3] = v_M_d.m30;
763 result[1][0] = v_M_d.m01;
764 result[1][1] = v_M_d.m11;
765 result[1][2] = v_M_d.m21;
766 result[1][3] = v_M_d.m31;
767 result[2][0] = v_M_d.m02;
768 result[2][1] = v_M_d.m12;
769 result[2][2] = v_M_d.m22;
770 result[2][3] = v_M_d.m32;
771 }
772
773 if (to == vpOccipitalStructure::imu) {
774 ST::Matrix4 imu_M_d = m_captureSession.getImuFromDepthExtrinsics().inversed();
775
776 result[0][0] = imu_M_d.m00;
777 result[0][1] = imu_M_d.m10;
778 result[0][2] = imu_M_d.m20;
779 result[0][3] = imu_M_d.m30;
780 result[1][0] = imu_M_d.m01;
781 result[1][1] = imu_M_d.m11;
782 result[1][2] = imu_M_d.m21;
783 result[1][3] = imu_M_d.m31;
784 result[2][0] = imu_M_d.m02;
785 result[2][1] = imu_M_d.m12;
786 result[2][2] = imu_M_d.m22;
787 result[2][3] = imu_M_d.m32;
788 }
789 break;
790
792 if (to == vpOccipitalStructure::depth) {
793 ST::Matrix4 d_M_v = m_delegate.m_depthFrame.visibleCameraPoseInDepthCoordinateFrame().inversed();
794
795 result[0][0] = d_M_v.m00;
796 result[0][1] = d_M_v.m10;
797 result[0][2] = d_M_v.m20;
798 result[0][3] = d_M_v.m30;
799 result[1][0] = d_M_v.m01;
800 result[1][1] = d_M_v.m11;
801 result[1][2] = d_M_v.m21;
802 result[1][3] = d_M_v.m31;
803 result[2][0] = d_M_v.m02;
804 result[2][1] = d_M_v.m12;
805 result[2][2] = d_M_v.m22;
806 result[2][3] = d_M_v.m32;
807 }
808
809 if (to == vpOccipitalStructure::imu) {
810 ST::Matrix4 imu_M_v = m_captureSession.getImuFromVisibleExtrinsics().inversed();
811
812 result[0][0] = imu_M_v.m00;
813 result[0][1] = imu_M_v.m10;
814 result[0][2] = imu_M_v.m20;
815 result[0][3] = imu_M_v.m30;
816 result[1][0] = imu_M_v.m01;
817 result[1][1] = imu_M_v.m11;
818 result[1][2] = imu_M_v.m21;
819 result[1][3] = imu_M_v.m31;
820 result[2][0] = imu_M_v.m02;
821 result[2][1] = imu_M_v.m12;
822 result[2][2] = imu_M_v.m22;
823 result[2][3] = imu_M_v.m32;
824 }
825 break;
826
828 break;
829
831 if (to == vpOccipitalStructure::depth) {
832 ST::Matrix4 d_M_imu = m_captureSession.getImuFromDepthExtrinsics();
833
834 result[0][0] = d_M_imu.m00;
835 result[0][1] = d_M_imu.m10;
836 result[0][2] = d_M_imu.m20;
837 result[0][3] = d_M_imu.m30;
838 result[1][0] = d_M_imu.m01;
839 result[1][1] = d_M_imu.m11;
840 result[1][2] = d_M_imu.m21;
841 result[1][3] = d_M_imu.m31;
842 result[2][0] = d_M_imu.m02;
843 result[2][1] = d_M_imu.m12;
844 result[2][2] = d_M_imu.m22;
845 result[2][3] = d_M_imu.m32;
846 }
847
849 ST::Matrix4 v_M_imu = m_captureSession.getImuFromVisibleExtrinsics();
850
851 result[0][0] = v_M_imu.m00;
852 result[0][1] = v_M_imu.m10;
853 result[0][2] = v_M_imu.m20;
854 result[0][3] = v_M_imu.m30;
855 result[1][0] = v_M_imu.m01;
856 result[1][1] = v_M_imu.m11;
857 result[1][2] = v_M_imu.m21;
858 result[1][3] = v_M_imu.m31;
859 result[2][0] = v_M_imu.m02;
860 result[2][1] = v_M_imu.m12;
861 result[2][2] = v_M_imu.m22;
862 result[2][3] = v_M_imu.m32;
863 }
864 break;
865
866 default:
867 break;
868 }
869
870 return result;
871}
872
878{
879 ST::Intrinsics result;
880
881 switch (stream_type) {
883 result = m_delegate.m_visibleFrame.intrinsics();
884 break;
885
887 result = m_delegate.m_depthFrame.intrinsics();
888 break;
889
891 result = m_delegate.m_infraredFrame.intrinsics();
892 break;
893
894 default:
895 // Deal with exceptions.
896 break;
897 }
898
899 return result;
900}
901
908{
909 m_delegate.m_depthFrame.saveImageAsPointCloudMesh(filename.c_str());
910}
911
919{
920 ST::Intrinsics cam_intrinsics;
921 switch (stream_type) {
923 cam_intrinsics = m_delegate.m_visibleFrame.intrinsics();
925 m_visible_camera_parameters = vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx,
926 cam_intrinsics.cy, cam_intrinsics.k1, -cam_intrinsics.k1);
927
930 vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
931
933 break;
934
936 cam_intrinsics = m_delegate.m_depthFrame.intrinsics();
938 vpCameraParameters(cam_intrinsics.fx, cam_intrinsics.fy, cam_intrinsics.cx, cam_intrinsics.cy);
939
941 break;
942
943 default: // Should throw exception for not having this type of extrinsic transforms.
944 return vpCameraParameters();
945 break;
946 }
947}
948
949// Protected functions.
950void vpOccipitalStructure::getPointcloud(std::vector<vpColVector> &pointcloud)
951{
952 ST::DepthFrame last_df = m_delegate.m_depthFrame;
953 const int width = last_df.width();
954 const int height = last_df.height();
955 pointcloud.resize(static_cast<size_t>(width * height));
956
957 const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
958
959// Multi-threading if OpenMP
960// Concurrent writes at different locations are safe
961#pragma omp parallel for schedule(dynamic)
962 for (int i = 0; i < height; i++) {
963 auto depth_pixel_index = i * width;
964
965 for (int j = 0; j < width; j++, depth_pixel_index++) {
966 if (std::isnan(p_depth_frame[depth_pixel_index])) {
967 pointcloud[static_cast<size_t>(depth_pixel_index)].resize(4, false);
968 pointcloud[static_cast<size_t>(depth_pixel_index)][0] = m_invalidDepthValue;
969 pointcloud[static_cast<size_t>(depth_pixel_index)][1] = m_invalidDepthValue;
970 pointcloud[static_cast<size_t>(depth_pixel_index)][2] = m_invalidDepthValue;
971 pointcloud[static_cast<size_t>(depth_pixel_index)][3] = 1.0;
972 continue;
973 }
974
975 // Get the depth value of the current pixel
976 auto pixels_distance = p_depth_frame[depth_pixel_index];
977
978 ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
979
980 if (pixels_distance > m_maxZ)
981 point_3D.x = point_3D.y = point_3D.z = m_invalidDepthValue;
982
983 pointcloud[static_cast<size_t>(depth_pixel_index)].resize(4, false);
984 pointcloud[static_cast<size_t>(depth_pixel_index)][0] = point_3D.x * 0.001;
985 pointcloud[static_cast<size_t>(depth_pixel_index)][1] = point_3D.y * 0.001;
986 pointcloud[static_cast<size_t>(depth_pixel_index)][2] = point_3D.z * 0.001;
987 pointcloud[static_cast<size_t>(depth_pixel_index)][3] = 1.0;
988 }
989 }
990}
991
992#if defined(VISP_HAVE_PCL) && defined(VISP_HAVE_PCL_COMMON)
993void vpOccipitalStructure::getPointcloud(pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
994{
995 ST::DepthFrame last_df = m_delegate.m_depthFrame;
996 const int width = last_df.width();
997 const int height = last_df.height();
998 pointcloud->width = static_cast<uint32_t>(width);
999 pointcloud->height = static_cast<uint32_t>(height);
1000 pointcloud->resize(static_cast<size_t>(width * height));
1001
1002#if MANUAL_POINTCLOUD // faster to compute manually when tested
1003 const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
1004
1005 // Multi-threading if OpenMP
1006 // Concurrent writes at different locations are safe
1007#pragma omp parallel for schedule(dynamic)
1008 for (int i = 0; i < height; i++) {
1009 auto depth_pixel_index = i * width;
1010
1011 for (int j = 0; j < width; j++, depth_pixel_index++) {
1012 if (p_depth_frame[depth_pixel_index] == 0) {
1013 pointcloud->points[static_cast<size_t>(depth_pixel_index)].x = m_invalidDepthValue;
1014 pointcloud->points[static_cast<size_t>(depth_pixel_index)].y = m_invalidDepthValue;
1015 pointcloud->points[static_cast<size_t>(depth_pixel_index)].z = m_invalidDepthValue;
1016 continue;
1017 }
1018
1019 // Get the depth value of the current pixel
1020 auto pixels_distance = p_depth_frame[depth_pixel_index];
1021
1022 // Get 3D coordinates in depth frame. (in mm)
1023 ST::Vector3f point_3D = last_df.unprojectPoint(i, j);
1024
1025 if (pixels_distance > m_maxZ)
1026 point_3D.x = point_3D.y = point_3D.z = m_invalidDepthValue;
1027
1028 pointcloud->points[static_cast<size_t>(depth_pixel_index)].x = point_3D.x * 0.001;
1029 pointcloud->points[static_cast<size_t>(depth_pixel_index)].y = point_3D.y * 0.001;
1030 pointcloud->points[static_cast<size_t>(depth_pixel_index)].z = point_3D.z * 0.001;
1031 }
1032 }
1033// #else
1034// m_points = m_pointcloud.calculate(depth_frame);
1035// auto vertices = m_points.get_vertices();
1036
1037// for (size_t i = 0; i < m_points.size(); i++) {
1038// if (vertices[i].z <= 0.0f || vertices[i].z > m_maxZ) {
1039// pointcloud->points[i].x = m_invalidDepthValue;
1040// pointcloud->points[i].y = m_invalidDepthValue;
1041// pointcloud->points[i].z = m_invalidDepthValue;
1042// } else {
1043// pointcloud->points[i].x = vertices[i].x;
1044// pointcloud->points[i].y = vertices[i].y;
1045// pointcloud->points[i].z = vertices[i].z;
1046// }
1047// }
1048#endif
1049}
1050
1051void vpOccipitalStructure::getColoredPointcloud(pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
1052{
1053 ST::DepthFrame last_df = m_delegate.m_depthFrame;
1054 ST::ColorFrame last_cf = m_delegate.m_visibleFrame;
1055
1056 const int depth_width = last_df.width();
1057 const int depth_height = last_df.height();
1058 pointcloud->width = static_cast<uint32_t>(depth_width);
1059 pointcloud->height = static_cast<uint32_t>(depth_height);
1060 pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
1061
1062 const int color_width = last_cf.width();
1063 const int color_height = last_cf.height();
1064
1065 const float *p_depth_frame = reinterpret_cast<const float *>(last_df.depthInMillimeters());
1066 const ST::Matrix4 depth2ColorExtrinsics = last_df.visibleCameraPoseInDepthCoordinateFrame(); // c_M_d
1067
1068 const bool swap_rb = false;
1069 unsigned int nb_color_pixel = 3; // RGB image.
1070 const uint8_t *p_color_frame = static_cast<const uint8_t *>(last_cf.rgbData());
1071
1072 const bool registered_streams = last_df.isRegisteredTo(last_cf);
1073
1074 // Multi-threading if OpenMP
1075 // Concurrent writes at different locations are safe
1076#pragma omp parallel for schedule(dynamic)
1077 for (int i = 0; i < depth_height; i++) {
1078 auto depth_pixel_index = i * depth_width;
1079
1080 for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
1081 if (std::isnan(p_depth_frame[depth_pixel_index])) {
1082 pointcloud->points[static_cast<size_t>(depth_pixel_index)].x = m_invalidDepthValue;
1083 pointcloud->points[static_cast<size_t>(depth_pixel_index)].y = m_invalidDepthValue;
1084 pointcloud->points[static_cast<size_t>(depth_pixel_index)].z = m_invalidDepthValue;
1085
1086 // For out of bounds color data, default to black.
1087#if (VISP_HAVE_PCL_VERSION < 0x010100) // 1.1.0
1088 unsigned int r = 0, g = 0, b = 0;
1089 uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1090
1091 pointcloud->points[static_cast<size_t>(depth_pixel_index)].rgb = *reinterpret_cast<float *>(&rgb);
1092#else
1093 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r = (uint8_t)0;
1094 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g = (uint8_t)0;
1095 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b = (uint8_t)0;
1096#endif
1097 continue;
1098 }
1099
1100 // Get the depth value of the current pixel
1101 auto pixels_distance = p_depth_frame[depth_pixel_index];
1102
1103 ST::Vector3f depth_point_3D = last_df.unprojectPoint(i, j);
1104
1105 if (pixels_distance > m_maxZ) {
1106 depth_point_3D.x = depth_point_3D.y = depth_point_3D.z = m_invalidDepthValue;
1107 }
1108
1109 pointcloud->points[static_cast<size_t>(depth_pixel_index)].x = depth_point_3D.x * 0.001;
1110 pointcloud->points[static_cast<size_t>(depth_pixel_index)].y = depth_point_3D.y * 0.001;
1111 pointcloud->points[static_cast<size_t>(depth_pixel_index)].z = depth_point_3D.z * 0.001;
1112
1113 if (!registered_streams) {
1114
1115 ST::Vector3f color_point_3D = depth2ColorExtrinsics * depth_point_3D;
1116
1117 ST::Vector2f color_pixel;
1118 color_pixel.x = -1;
1119 color_pixel.y = -1;
1120 if (color_point_3D.z != 0) {
1121 double x, y, pixel_x = 0., pixel_y = 0.;
1122 x = static_cast<double>(color_point_3D.y / color_point_3D.z);
1123 y = static_cast<double>(color_point_3D.x / color_point_3D.z);
1125 color_pixel.x = pixel_x;
1126 color_pixel.y = pixel_y;
1127 }
1128
1129 if (color_pixel.y < 0 || color_pixel.y >= color_height || color_pixel.x < 0 || color_pixel.x >= color_width) {
1130 // For out of bounds color data, default to a shade of blue in order to visually distinguish holes.
1131#if (VISP_HAVE_PCL_VERSION < 0x010100) // 1.1.0
1132 unsigned int r = 0, g = 0, b = 0;
1133 uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1134
1135 pointcloud->points[static_cast<size_t>(depth_pixel_index)].rgb = *reinterpret_cast<float *>(&rgb);
1136#else
1137 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r = (uint8_t)0;
1138 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g = (uint8_t)0;
1139 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b = (uint8_t)0;
1140#endif
1141 }
1142 else {
1143 unsigned int i_ = static_cast<unsigned int>(color_pixel.x);
1144 unsigned int j_ = static_cast<unsigned int>(color_pixel.y);
1145
1146#if (VISP_HAVE_PCL_VERSION < 0x010100) // 1.1.0
1147 uint32_t rgb = 0;
1148 if (swap_rb) {
1149 rgb =
1150 (static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel]) |
1151 static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1]) << 8 |
1152 static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2])
1153 << 16);
1154 }
1155 else {
1156 rgb =
1157 (static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel]) << 16 |
1158 static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1]) << 8 |
1159 static_cast<uint32_t>(p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2]));
1160 }
1161
1162 pointcloud->points[static_cast<size_t>(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1163#else
1164 if (swap_rb) {
1165 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b =
1166 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel];
1167 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g =
1168 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1];
1169 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r =
1170 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2];
1171 }
1172 else {
1173 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r =
1174 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel];
1175 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g =
1176 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 1];
1177 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b =
1178 p_color_frame[(i_ * static_cast<unsigned int>(color_width) + j_) * nb_color_pixel + 2];
1179 }
1180#endif
1181 }
1182 }
1183 else {
1184#if (VISP_HAVE_PCL_VERSION < 0x010100) // 1.1.0
1185 uint32_t rgb = 0;
1186 if (swap_rb) {
1187 rgb = (static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel]) |
1188 static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1]) << 8 |
1189 static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2]) << 16);
1190 }
1191 else {
1192 rgb = (static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel]) << 16 |
1193 static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1]) << 8 |
1194 static_cast<uint32_t>(p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2]));
1195 }
1196
1197 pointcloud->points[static_cast<size_t>(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1198#else
1199 if (swap_rb) {
1200 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b =
1201 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel];
1202 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g =
1203 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1];
1204 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r =
1205 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2];
1206 }
1207 else {
1208 pointcloud->points[static_cast<size_t>(depth_pixel_index)].r =
1209 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel];
1210 pointcloud->points[static_cast<size_t>(depth_pixel_index)].g =
1211 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 1];
1212 pointcloud->points[static_cast<size_t>(depth_pixel_index)].b =
1213 p_color_frame[(i * static_cast<unsigned int>(color_width) + j) * nb_color_pixel + 2];
1214 }
1215#endif
1216 }
1217 }
1218 }
1219}
1220
1221#endif
1222END_VISP_NAMESPACE
1223#endif
Generic class defining intrinsic camera parameters.
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void GreyToRGBa(unsigned char *grey, unsigned char *rgba, unsigned int width, unsigned int height)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
Definition of the vpImage class member functions.
Definition vpImage.h:131
Type * bitmap
points toward the bitmap
Definition vpImage.h:135
static void convertPoint(const vpCameraParameters &cam, const double &x, const double &y, double &u, double &v)
vpPoint unprojectPoint(int row, int col)
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts=nullptr)
unsigned int getHeight(vpOccipitalStructureStream stream_type)
vpCameraParameters getCameraParameters(const vpOccipitalStructureStream stream_type, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithoutDistortion)
void acquire(vpImage< unsigned char > &gray, bool undistorted=false, double *ts=nullptr)
vpCameraParameters m_visible_camera_parameters
vpCameraParameters m_depth_camera_parameters
void saveDepthImageAsPointCloudMesh(std::string &filename)
float getDepth(int x, int y)
ST::Intrinsics getIntrinsics(const vpOccipitalStructureStream stream_type) const
void getColoredPointcloud(pcl::PointCloud< pcl::PointXYZRGB >::Ptr &pointcloud)
ST::CaptureSession m_captureSession
void getPointcloud(std::vector< vpColVector > &pointcloud)
unsigned int getWidth(vpOccipitalStructureStream stream_type)
ST::CaptureSessionSettings m_captureSessionSettings
vpHomogeneousMatrix getTransform(const vpOccipitalStructureStream from, const vpOccipitalStructureStream to)
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
bool open(const ST::CaptureSessionSettings &settings)
Class that defines a 3D point in the object frame and allows forward projection of a 3D point in the ...
Definition vpPoint.h:79