Visual Servoing Platform version 3.6.0
Loading...
Searching...
No Matches
vpRealSense2.cpp
1/****************************************************************************
2 *
3 * ViSP, open source Visual Servoing Platform software.
4 * Copyright (C) 2005 - 2023 by Inria. All rights reserved.
5 *
6 * This software is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 * See the file LICENSE.txt at the root directory of this source
11 * distribution for additional information about the GNU GPL.
12 *
13 * For using ViSP with software that can not be combined with the GNU
14 * GPL, please contact Inria about acquiring a ViSP Professional
15 * Edition License.
16 *
17 * See https://visp.inria.fr for more information.
18 *
19 * This software was developed at:
20 * Inria Rennes - Bretagne Atlantique
21 * Campus Universitaire de Beaulieu
22 * 35042 Rennes Cedex
23 * France
24 *
25 * If you have questions regarding the use of this file, please contact
26 * Inria at visp@inria.fr
27 *
28 * This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
29 * WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
30 *
31 * Description:
32 * librealSense2 interface.
33 *
34*****************************************************************************/
35
36#include <visp3/core/vpConfig.h>
37
38#if defined(VISP_HAVE_REALSENSE2) && (VISP_CXX_STANDARD >= VISP_CXX_STANDARD_11)
39#include <cstring>
40#include <iomanip>
41#include <map>
42#include <set>
43#include <visp3/core/vpImageConvert.h>
44#include <visp3/sensor/vpRealSense2.h>
45
46#define MANUAL_POINTCLOUD 1
47
48namespace
49{
50bool operator==(const rs2_extrinsics &lhs, const rs2_extrinsics &rhs)
51{
52 for (int i = 0; i < 3; i++) {
53 for (int j = 0; j < 3; j++) {
54 if (std::fabs(lhs.rotation[i * 3 + j] - rhs.rotation[i * 3 + j]) > std::numeric_limits<float>::epsilon()) {
55 return false;
56 }
57 }
58
59 if (std::fabs(lhs.translation[i] - rhs.translation[i]) > std::numeric_limits<float>::epsilon()) {
60 return false;
61 }
62 }
63
64 return true;
65}
66} // namespace
67
72 : m_depthScale(0.0f), m_invalidDepthValue(0.0f), m_max_Z(8.0f), m_pipe(), m_pipelineProfile(), m_pointcloud(),
73 m_points(), m_pos(), m_quat(), m_rot(), m_product_line(), m_init(false)
74{
75}
76
82
89{
90 auto data = m_pipe.wait_for_frames();
91 auto color_frame = data.get_color_frame();
92 getGreyFrame(color_frame, grey);
93 if (ts != NULL) {
94 *ts = data.get_timestamp();
95 }
96}
97
104{
105 auto data = m_pipe.wait_for_frames();
106 auto color_frame = data.get_color_frame();
107 getColorFrame(color_frame, color);
108 if (ts != NULL) {
109 *ts = data.get_timestamp();
110 }
111}
112
123void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
124 std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared,
125 rs2::align *const align_to, double *ts)
126{
127 acquire(data_image, data_depth, data_pointCloud, data_infrared, NULL, align_to, ts);
128}
129
187void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
188 std::vector<vpColVector> *const data_pointCloud, unsigned char *const data_infrared1,
189 unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
190{
191 auto data = m_pipe.wait_for_frames();
192 if (align_to != NULL) {
193 // Infrared stream is not aligned
194 // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
195#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
196 data = align_to->process(data);
197#else
198 data = align_to->proccess(data);
199#endif
200 }
201
202 if (data_image != NULL) {
203 auto color_frame = data.get_color_frame();
204 getNativeFrameData(color_frame, data_image);
205 }
206
207 if (data_depth != NULL || data_pointCloud != NULL) {
208 auto depth_frame = data.get_depth_frame();
209 if (data_depth != NULL) {
210 getNativeFrameData(depth_frame, data_depth);
211 }
212
213 if (data_pointCloud != NULL) {
214 getPointcloud(depth_frame, *data_pointCloud);
215 }
216 }
217
218 if (data_infrared1 != NULL) {
219 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
220 getNativeFrameData(infrared_frame, data_infrared1);
221 }
222
223 if (data_infrared2 != NULL) {
224 auto infrared_frame = data.get_infrared_frame(2);
225 getNativeFrameData(infrared_frame, data_infrared2);
226 }
227
228 if (ts != NULL) {
229 *ts = data.get_timestamp();
230 }
231}
232
233#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
255{
256 auto data = m_pipe.wait_for_frames();
257
258 if (left != NULL) {
259 auto left_fisheye_frame = data.get_fisheye_frame(1);
260 unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
261 unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
262 left->resize(height, width);
263 getNativeFrameData(left_fisheye_frame, (*left).bitmap);
264 }
265
266 if (right != NULL) {
267 auto right_fisheye_frame = data.get_fisheye_frame(2);
268 unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
269 unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
270 right->resize(height, width);
271 getNativeFrameData(right_fisheye_frame, (*right).bitmap);
272 }
273
274 if (ts != NULL) {
275 *ts = data.get_timestamp();
276 }
277}
278
308 vpColVector *odo_vel, vpColVector *odo_acc, unsigned int *confidence, double *ts)
309{
310 auto data = m_pipe.wait_for_frames();
311
312 if (left != NULL) {
313 auto left_fisheye_frame = data.get_fisheye_frame(1);
314 unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
315 unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
316 left->resize(height, width);
317 getNativeFrameData(left_fisheye_frame, (*left).bitmap);
318 }
319
320 if (right != NULL) {
321 auto right_fisheye_frame = data.get_fisheye_frame(2);
322 unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
323 unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
324 right->resize(height, width);
325 getNativeFrameData(right_fisheye_frame, (*right).bitmap);
326 }
327
328 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
329 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
330
331 if (ts != NULL) {
332 *ts = data.get_timestamp();
333 }
334
335 if (cMw != NULL) {
336 m_pos[0] = static_cast<double>(pose_data.translation.x);
337 m_pos[1] = static_cast<double>(pose_data.translation.y);
338 m_pos[2] = static_cast<double>(pose_data.translation.z);
339
340 m_quat[0] = static_cast<double>(pose_data.rotation.x);
341 m_quat[1] = static_cast<double>(pose_data.rotation.y);
342 m_quat[2] = static_cast<double>(pose_data.rotation.z);
343 m_quat[3] = static_cast<double>(pose_data.rotation.w);
344
346 }
347
348 if (odo_vel != NULL) {
349 odo_vel->resize(6, false);
350 (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
351 (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
352 (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
353 (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
354 (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
355 (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
356 }
357
358 if (odo_acc != NULL) {
359 odo_acc->resize(6, false);
360 (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
361 (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
362 (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
363 (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
364 (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
365 (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
366 }
367
368 if (confidence != NULL) {
369 *confidence = pose_data.tracker_confidence;
370 }
371}
372
387 vpColVector *odo_vel, vpColVector *odo_acc, vpColVector *imu_vel, vpColVector *imu_acc,
388 unsigned int *confidence, double *ts)
389{
390 auto data = m_pipe.wait_for_frames();
391
392 if (left != NULL) {
393 auto left_fisheye_frame = data.get_fisheye_frame(1);
394 unsigned int width = static_cast<unsigned int>(left_fisheye_frame.get_width());
395 unsigned int height = static_cast<unsigned int>(left_fisheye_frame.get_height());
396 left->resize(height, width);
397 getNativeFrameData(left_fisheye_frame, (*left).bitmap);
398 }
399
400 if (right != NULL) {
401 auto right_fisheye_frame = data.get_fisheye_frame(2);
402 unsigned int width = static_cast<unsigned int>(right_fisheye_frame.get_width());
403 unsigned int height = static_cast<unsigned int>(right_fisheye_frame.get_height());
404 right->resize(height, width);
405 getNativeFrameData(right_fisheye_frame, (*right).bitmap);
406 }
407
408 auto pose_frame = data.first_or_default(RS2_STREAM_POSE);
409 auto pose_data = pose_frame.as<rs2::pose_frame>().get_pose_data();
410
411 if (ts != NULL) {
412 *ts = data.get_timestamp();
413 }
414
415 if (cMw != NULL) {
416 m_pos[0] = static_cast<double>(pose_data.translation.x);
417 m_pos[1] = static_cast<double>(pose_data.translation.y);
418 m_pos[2] = static_cast<double>(pose_data.translation.z);
419
420 m_quat[0] = static_cast<double>(pose_data.rotation.x);
421 m_quat[1] = static_cast<double>(pose_data.rotation.y);
422 m_quat[2] = static_cast<double>(pose_data.rotation.z);
423 m_quat[3] = static_cast<double>(pose_data.rotation.w);
424
426 }
427
428 if (odo_vel != NULL) {
429 odo_vel->resize(6, false);
430 (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
431 (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
432 (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
433 (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
434 (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
435 (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
436 }
437
438 if (odo_acc != NULL) {
439 odo_acc->resize(6, false);
440 (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
441 (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
442 (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
443 (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
444 (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
445 (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
446 }
447
448 auto accel_frame = data.first_or_default(RS2_STREAM_ACCEL);
449 auto accel_data = accel_frame.as<rs2::motion_frame>().get_motion_data();
450
451 if (imu_acc != NULL) {
452 imu_acc->resize(3, false);
453 (*imu_acc)[0] = static_cast<double>(accel_data.x);
454 (*imu_acc)[1] = static_cast<double>(accel_data.y);
455 (*imu_acc)[2] = static_cast<double>(accel_data.z);
456 }
457
458 auto gyro_frame = data.first_or_default(RS2_STREAM_GYRO);
459 auto gyro_data = gyro_frame.as<rs2::motion_frame>().get_motion_data();
460
461 if (imu_vel != NULL) {
462 imu_vel->resize(3, false);
463 (*imu_vel)[0] = static_cast<double>(gyro_data.x);
464 (*imu_vel)[1] = static_cast<double>(gyro_data.y);
465 (*imu_vel)[2] = static_cast<double>(gyro_data.z);
466 }
467
468 if (confidence != NULL) {
469 *confidence = pose_data.tracker_confidence;
470 }
471}
472#endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
473
474#ifdef VISP_HAVE_PCL
487void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
488 std::vector<vpColVector> *const data_pointCloud,
489 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared,
490 rs2::align *const align_to, double *ts)
491{
492 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to, ts);
493}
494
509void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
510 std::vector<vpColVector> *const data_pointCloud,
511 pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud, unsigned char *const data_infrared1,
512 unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
513{
514 auto data = m_pipe.wait_for_frames();
515 if (align_to != NULL) {
516 // Infrared stream is not aligned
517 // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
518#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
519 data = align_to->process(data);
520#else
521 data = align_to->proccess(data);
522#endif
523 }
524
525 if (data_image != NULL) {
526 auto color_frame = data.get_color_frame();
527 getNativeFrameData(color_frame, data_image);
528 }
529
530 if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
531 auto depth_frame = data.get_depth_frame();
532 if (data_depth != NULL) {
533 getNativeFrameData(depth_frame, data_depth);
534 }
535
536 if (data_pointCloud != NULL) {
537 getPointcloud(depth_frame, *data_pointCloud);
538 }
539
540 if (pointcloud != NULL) {
541 getPointcloud(depth_frame, pointcloud);
542 }
543 }
544
545 if (data_infrared1 != NULL) {
546 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
547 getNativeFrameData(infrared_frame, data_infrared1);
548 }
549
550 if (data_infrared2 != NULL) {
551 auto infrared_frame = data.get_infrared_frame(2);
552 getNativeFrameData(infrared_frame, data_infrared2);
553 }
554
555 if (ts != NULL) {
556 *ts = data.get_timestamp();
557 }
558}
559
572void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
573 std::vector<vpColVector> *const data_pointCloud,
574 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared,
575 rs2::align *const align_to, double *ts)
576{
577 acquire(data_image, data_depth, data_pointCloud, pointcloud, data_infrared, NULL, align_to, ts);
578}
579
594void vpRealSense2::acquire(unsigned char *const data_image, unsigned char *const data_depth,
595 std::vector<vpColVector> *const data_pointCloud,
596 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud, unsigned char *const data_infrared1,
597 unsigned char *const data_infrared2, rs2::align *const align_to, double *ts)
598{
599 auto data = m_pipe.wait_for_frames();
600 if (align_to != NULL) {
601 // Infrared stream is not aligned
602 // see https://github.com/IntelRealSense/librealsense/issues/1556#issuecomment-384919994
603#if (RS2_API_VERSION > ((2 * 10000) + (9 * 100) + 0))
604 data = align_to->process(data);
605#else
606 data = align_to->proccess(data);
607#endif
608 }
609
610 auto color_frame = data.get_color_frame();
611 if (data_image != NULL) {
612 getNativeFrameData(color_frame, data_image);
613 }
614
615 if (data_depth != NULL || data_pointCloud != NULL || pointcloud != NULL) {
616 auto depth_frame = data.get_depth_frame();
617 if (data_depth != NULL) {
618 getNativeFrameData(depth_frame, data_depth);
619 }
620
621 if (data_pointCloud != NULL) {
622 getPointcloud(depth_frame, *data_pointCloud);
623 }
624
625 if (pointcloud != NULL) {
626 getPointcloud(depth_frame, color_frame, pointcloud);
627 }
628 }
629
630 if (data_infrared1 != NULL) {
631 auto infrared_frame = data.first(RS2_STREAM_INFRARED);
632 getNativeFrameData(infrared_frame, data_infrared1);
633 }
634
635 if (data_infrared2 != NULL) {
636 auto infrared_frame = data.get_infrared_frame(2);
637 getNativeFrameData(infrared_frame, data_infrared2);
638 }
639
640 if (ts != NULL) {
641 *ts = data.get_timestamp();
642 }
643}
644#endif
645
658{
659 if (m_init) {
660 m_pipe.stop();
661 m_init = false;
662 }
663}
664
676 int index) const
677{
678 auto rs_stream = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
679 auto intrinsics = rs_stream.get_intrinsics();
680
682 double u0 = intrinsics.ppx;
683 double v0 = intrinsics.ppy;
684 double px = intrinsics.fx;
685 double py = intrinsics.fy;
686
687 switch (type) {
689 double kdu = intrinsics.coeffs[0];
690 cam.initPersProjWithDistortion(px, py, u0, v0, -kdu, kdu);
691 } break;
692
694 std::vector<double> tmp_coefs;
695 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[0]));
696 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[1]));
697 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[2]));
698 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[3]));
699 tmp_coefs.push_back(static_cast<double>(intrinsics.coeffs[4]));
700
701 cam.initProjWithKannalaBrandtDistortion(px, py, u0, v0, tmp_coefs);
702 } break;
703
705 default:
706 cam.initPersProjWithoutDistortion(px, py, u0, v0);
707 break;
708 }
709
710 return cam;
711}
712
721rs2_intrinsics vpRealSense2::getIntrinsics(const rs2_stream &stream, int index) const
722{
723 auto vsp = m_pipelineProfile.get_stream(stream, index).as<rs2::video_stream_profile>();
724 return vsp.get_intrinsics();
725}
726
727void vpRealSense2::getColorFrame(const rs2::frame &frame, vpImage<vpRGBa> &color)
728{
729 auto vf = frame.as<rs2::video_frame>();
730 unsigned int width = (unsigned int)vf.get_width();
731 unsigned int height = (unsigned int)vf.get_height();
732 color.resize(height, width);
733
734 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
735 vpImageConvert::RGBToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
736 reinterpret_cast<unsigned char *>(color.bitmap), width, height);
737 } else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
738 memcpy(reinterpret_cast<unsigned char *>(color.bitmap),
739 const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
740 width * height * sizeof(vpRGBa));
741 } else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
742 vpImageConvert::BGRToRGBa(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
743 reinterpret_cast<unsigned char *>(color.bitmap), width, height);
744 } else {
745 throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
746 }
747}
748
754{
755 if (!m_init) { // If pipe is not yet created, create it. Otherwise, we already know depth scale.
756 rs2::pipeline *pipe = new rs2::pipeline;
757 rs2::pipeline_profile *pipelineProfile = new rs2::pipeline_profile;
758 *pipelineProfile = pipe->start();
759
760 rs2::device dev = pipelineProfile->get_device();
761
762 // Go over the device's sensors
763 for (rs2::sensor &sensor : dev.query_sensors()) {
764 // Check if the sensor is a depth sensor
765 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
766 m_depthScale = dpt.get_depth_scale();
767 }
768 }
769
770 pipe->stop();
771 delete pipe;
772 delete pipelineProfile;
773 }
774
775 return m_depthScale;
776}
777
778void vpRealSense2::getGreyFrame(const rs2::frame &frame, vpImage<unsigned char> &grey)
779{
780 auto vf = frame.as<rs2::video_frame>();
781 unsigned int width = (unsigned int)vf.get_width();
782 unsigned int height = (unsigned int)vf.get_height();
783 grey.resize(height, width);
784
785 if (frame.get_profile().format() == RS2_FORMAT_RGB8) {
786 vpImageConvert::RGBToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
787 grey.bitmap, width, height);
788 } else if (frame.get_profile().format() == RS2_FORMAT_RGBA8) {
789 vpImageConvert::RGBaToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
790 grey.bitmap, width * height);
791 } else if (frame.get_profile().format() == RS2_FORMAT_BGR8) {
792 vpImageConvert::BGRToGrey(const_cast<unsigned char *>(static_cast<const unsigned char *>(frame.get_data())),
793 grey.bitmap, width, height);
794 } else {
795 throw vpException(vpException::fatalError, "RealSense Camera - color stream not supported!");
796 }
797}
798
799void vpRealSense2::getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
800{
801 auto vf = frame.as<rs2::video_frame>();
802 int size = vf.get_width() * vf.get_height();
803
804 switch (frame.get_profile().format()) {
805 case RS2_FORMAT_RGB8:
806 case RS2_FORMAT_BGR8:
807 memcpy(data, frame.get_data(), size * 3);
808 break;
809
810 case RS2_FORMAT_RGBA8:
811 case RS2_FORMAT_BGRA8:
812 memcpy(data, frame.get_data(), size * 4);
813 break;
814
815 case RS2_FORMAT_Y16:
816 case RS2_FORMAT_Z16:
817 memcpy(data, frame.get_data(), size * 2);
818 break;
819
820 case RS2_FORMAT_Y8:
821 memcpy(data, frame.get_data(), size);
822 break;
823
824 default:
825 break;
826 }
827}
828
829void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, std::vector<vpColVector> &pointcloud)
830{
831 if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
832 std::stringstream ss;
833 ss << "Error, depth scale <= 0: " << m_depthScale;
834 throw vpException(vpException::fatalError, ss.str());
835 }
836
837 auto vf = depth_frame.as<rs2::video_frame>();
838 const int width = vf.get_width();
839 const int height = vf.get_height();
840 pointcloud.resize((size_t)(width * height));
841
842 const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
843 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
844
845// Multi-threading if OpenMP
846// Concurrent writes at different locations are safe
847#pragma omp parallel for schedule(dynamic)
848 for (int i = 0; i < height; i++) {
849 auto depth_pixel_index = i * width;
850
851 for (int j = 0; j < width; j++, depth_pixel_index++) {
852 if (p_depth_frame[depth_pixel_index] == 0) {
853 pointcloud[(size_t)depth_pixel_index].resize(4, false);
854 pointcloud[(size_t)depth_pixel_index][0] = m_invalidDepthValue;
855 pointcloud[(size_t)depth_pixel_index][1] = m_invalidDepthValue;
856 pointcloud[(size_t)depth_pixel_index][2] = m_invalidDepthValue;
857 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
858 continue;
859 }
860
861 // Get the depth value of the current pixel
862 auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
863
864 float points[3];
865 const float pixel[] = {(float)j, (float)i};
866 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
867
868 if (pixels_distance > m_max_Z)
869 points[0] = points[1] = points[2] = m_invalidDepthValue;
870
871 pointcloud[(size_t)depth_pixel_index].resize(4, false);
872 pointcloud[(size_t)depth_pixel_index][0] = points[0];
873 pointcloud[(size_t)depth_pixel_index][1] = points[1];
874 pointcloud[(size_t)depth_pixel_index][2] = points[2];
875 pointcloud[(size_t)depth_pixel_index][3] = 1.0;
876 }
877 }
878}
879
880#ifdef VISP_HAVE_PCL
881void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, pcl::PointCloud<pcl::PointXYZ>::Ptr &pointcloud)
882{
883 if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
884 std::stringstream ss;
885 ss << "Error, depth scale <= 0: " << m_depthScale;
886 throw vpException(vpException::fatalError, ss.str());
887 }
888
889 auto vf = depth_frame.as<rs2::video_frame>();
890 const int width = vf.get_width();
891 const int height = vf.get_height();
892 pointcloud->width = (uint32_t)width;
893 pointcloud->height = (uint32_t)height;
894 pointcloud->resize((size_t)(width * height));
895
896#if MANUAL_POINTCLOUD // faster to compute manually when tested
897 const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
898 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
899
900 // Multi-threading if OpenMP
901 // Concurrent writes at different locations are safe
902#pragma omp parallel for schedule(dynamic)
903 for (int i = 0; i < height; i++) {
904 auto depth_pixel_index = i * width;
905
906 for (int j = 0; j < width; j++, depth_pixel_index++) {
907 if (p_depth_frame[depth_pixel_index] == 0) {
908 pointcloud->points[(size_t)(depth_pixel_index)].x = m_invalidDepthValue;
909 pointcloud->points[(size_t)(depth_pixel_index)].y = m_invalidDepthValue;
910 pointcloud->points[(size_t)(depth_pixel_index)].z = m_invalidDepthValue;
911 continue;
912 }
913
914 // Get the depth value of the current pixel
915 auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
916
917 float points[3];
918 const float pixel[] = {(float)j, (float)i};
919 rs2_deproject_pixel_to_point(points, &depth_intrinsics, pixel, pixels_distance);
920
921 if (pixels_distance > m_max_Z)
922 points[0] = points[1] = points[2] = m_invalidDepthValue;
923
924 pointcloud->points[(size_t)(depth_pixel_index)].x = points[0];
925 pointcloud->points[(size_t)(depth_pixel_index)].y = points[1];
926 pointcloud->points[(size_t)(depth_pixel_index)].z = points[2];
927 }
928 }
929#else
930 m_points = m_pointcloud.calculate(depth_frame);
931 auto vertices = m_points.get_vertices();
932
933 for (size_t i = 0; i < m_points.size(); i++) {
934 if (vertices[i].z <= 0.0f || vertices[i].z > m_max_Z) {
935 pointcloud->points[i].x = m_invalidDepthValue;
936 pointcloud->points[i].y = m_invalidDepthValue;
937 pointcloud->points[i].z = m_invalidDepthValue;
938 } else {
939 pointcloud->points[i].x = vertices[i].x;
940 pointcloud->points[i].y = vertices[i].y;
941 pointcloud->points[i].z = vertices[i].z;
942 }
943 }
944#endif
945}
946
947void vpRealSense2::getPointcloud(const rs2::depth_frame &depth_frame, const rs2::frame &color_frame,
948 pcl::PointCloud<pcl::PointXYZRGB>::Ptr &pointcloud)
949{
950 if (m_depthScale <= std::numeric_limits<float>::epsilon()) {
951 throw vpException(vpException::fatalError, "Error, depth scale <= 0: %f", m_depthScale);
952 }
953
954 auto vf = depth_frame.as<rs2::video_frame>();
955 const int depth_width = vf.get_width();
956 const int depth_height = vf.get_height();
957 pointcloud->width = static_cast<uint32_t>(depth_width);
958 pointcloud->height = static_cast<uint32_t>(depth_height);
959 pointcloud->resize(static_cast<uint32_t>(depth_width * depth_height));
960
961 vf = color_frame.as<rs2::video_frame>();
962 const int color_width = vf.get_width();
963 const int color_height = vf.get_height();
964
965 const uint16_t *p_depth_frame = reinterpret_cast<const uint16_t *>(depth_frame.get_data());
966 const rs2_extrinsics depth2ColorExtrinsics =
967 depth_frame.get_profile().as<rs2::video_stream_profile>().get_extrinsics_to(
968 color_frame.get_profile().as<rs2::video_stream_profile>());
969 const rs2_intrinsics depth_intrinsics = depth_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
970 const rs2_intrinsics color_intrinsics = color_frame.get_profile().as<rs2::video_stream_profile>().get_intrinsics();
971
972 auto color_format = color_frame.as<rs2::video_frame>().get_profile().format();
973 const bool swap_rb = color_format == RS2_FORMAT_BGR8 || color_format == RS2_FORMAT_BGRA8;
974 unsigned int nb_color_pixel = (color_format == RS2_FORMAT_RGB8 || color_format == RS2_FORMAT_BGR8) ? 3 : 4;
975 const unsigned char *p_color_frame = static_cast<const unsigned char *>(color_frame.get_data());
976 rs2_extrinsics identity;
977 memset(identity.rotation, 0, sizeof(identity.rotation));
978 memset(identity.translation, 0, sizeof(identity.translation));
979 for (int i = 0; i < 3; i++) {
980 identity.rotation[i * 3 + i] = 1;
981 }
982 const bool registered_streams =
983 (depth2ColorExtrinsics == identity) && (color_width == depth_width) && (color_height == depth_height);
984
985 // Multi-threading if OpenMP
986 // Concurrent writes at different locations are safe
987#pragma omp parallel for schedule(dynamic)
988 for (int i = 0; i < depth_height; i++) {
989 auto depth_pixel_index = i * depth_width;
990
991 for (int j = 0; j < depth_width; j++, depth_pixel_index++) {
992 if (p_depth_frame[depth_pixel_index] == 0) {
993 pointcloud->points[(size_t)depth_pixel_index].x = m_invalidDepthValue;
994 pointcloud->points[(size_t)depth_pixel_index].y = m_invalidDepthValue;
995 pointcloud->points[(size_t)depth_pixel_index].z = m_invalidDepthValue;
996
997 // For out of bounds color data, default to a shade of blue in order to
998 // visually distinguish holes. This color value is same as the librealsense
999 // out of bounds color value.
1000#if PCL_VERSION_COMPARE(<, 1, 1, 0)
1001 unsigned int r = 96, g = 157, b = 198;
1002 uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1003
1004 pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1005#else
1006 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1007 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1008 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1009#endif
1010 continue;
1011 }
1012
1013 // Get the depth value of the current pixel
1014 auto pixels_distance = m_depthScale * p_depth_frame[depth_pixel_index];
1015
1016 float depth_point[3];
1017 const float pixel[] = {(float)j, (float)i};
1018 rs2_deproject_pixel_to_point(depth_point, &depth_intrinsics, pixel, pixels_distance);
1019
1020 if (pixels_distance > m_max_Z) {
1021 depth_point[0] = depth_point[1] = depth_point[2] = m_invalidDepthValue;
1022 }
1023
1024 pointcloud->points[(size_t)depth_pixel_index].x = depth_point[0];
1025 pointcloud->points[(size_t)depth_pixel_index].y = depth_point[1];
1026 pointcloud->points[(size_t)depth_pixel_index].z = depth_point[2];
1027
1028 if (!registered_streams) {
1029 float color_point[3];
1030 rs2_transform_point_to_point(color_point, &depth2ColorExtrinsics, depth_point);
1031 float color_pixel[2];
1032 rs2_project_point_to_pixel(color_pixel, &color_intrinsics, color_point);
1033
1034 if (color_pixel[1] < 0 || color_pixel[1] >= color_height || color_pixel[0] < 0 ||
1035 color_pixel[0] >= color_width) {
1036 // For out of bounds color data, default to a shade of blue in order to
1037 // visually distinguish holes. This color value is same as the librealsense
1038 // out of bounds color value.
1039#if PCL_VERSION_COMPARE(<, 1, 1, 0)
1040 unsigned int r = 96, g = 157, b = 198;
1041 uint32_t rgb = (static_cast<uint32_t>(r) << 16 | static_cast<uint32_t>(g) << 8 | static_cast<uint32_t>(b));
1042
1043 pointcloud->points[(size_t)depth_pixel_index].rgb = *reinterpret_cast<float *>(&rgb);
1044#else
1045 pointcloud->points[(size_t)depth_pixel_index].r = (uint8_t)96;
1046 pointcloud->points[(size_t)depth_pixel_index].g = (uint8_t)157;
1047 pointcloud->points[(size_t)depth_pixel_index].b = (uint8_t)198;
1048#endif
1049 } else {
1050 unsigned int i_ = (unsigned int)color_pixel[1];
1051 unsigned int j_ = (unsigned int)color_pixel[0];
1052
1053#if PCL_VERSION_COMPARE(<, 1, 1, 0)
1054 uint32_t rgb = 0;
1055 if (swap_rb) {
1056 rgb =
1057 (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) |
1058 static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1059 static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2])
1060 << 16);
1061 } else {
1062 rgb =
1063 (static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel]) << 16 |
1064 static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1]) << 8 |
1065 static_cast<uint32_t>(p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2]));
1066 }
1067
1068 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1069#else
1070 if (swap_rb) {
1071 pointcloud->points[(size_t)depth_pixel_index].b =
1072 p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1073 pointcloud->points[(size_t)depth_pixel_index].g =
1074 p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1075 pointcloud->points[(size_t)depth_pixel_index].r =
1076 p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1077 } else {
1078 pointcloud->points[(size_t)depth_pixel_index].r =
1079 p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel];
1080 pointcloud->points[(size_t)depth_pixel_index].g =
1081 p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 1];
1082 pointcloud->points[(size_t)depth_pixel_index].b =
1083 p_color_frame[(i_ * (unsigned int)color_width + j_) * nb_color_pixel + 2];
1084 }
1085#endif
1086 }
1087 } else {
1088#if PCL_VERSION_COMPARE(<, 1, 1, 0)
1089 uint32_t rgb = 0;
1090 if (swap_rb) {
1091 rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) |
1092 static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1093 static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]) << 16);
1094 } else {
1095 rgb = (static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel]) << 16 |
1096 static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1]) << 8 |
1097 static_cast<uint32_t>(p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2]));
1098 }
1099
1100 pointcloud->points[(size_t)(i * depth_width + j)].rgb = *reinterpret_cast<float *>(&rgb);
1101#else
1102 if (swap_rb) {
1103 pointcloud->points[(size_t)depth_pixel_index].b =
1104 p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1105 pointcloud->points[(size_t)depth_pixel_index].g =
1106 p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1107 pointcloud->points[(size_t)depth_pixel_index].r =
1108 p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1109 } else {
1110 pointcloud->points[(size_t)depth_pixel_index].r =
1111 p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel];
1112 pointcloud->points[(size_t)depth_pixel_index].g =
1113 p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 1];
1114 pointcloud->points[(size_t)depth_pixel_index].b =
1115 p_color_frame[(i * (unsigned int)color_width + j) * nb_color_pixel + 2];
1116 }
1117#endif
1118 }
1119 }
1120 }
1121}
1122
1123#endif
1124
1132vpHomogeneousMatrix vpRealSense2::getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index) const
1133{
1134 int to_index = -1;
1135
1136 if (from_index != -1) // If we have to specify indices for streams. (Ex.: T265 device having 2 fisheyes)
1137 {
1138 if (from_index == 1) // From left => To right.
1139 to_index = 2;
1140 else if (from_index == 2) // From right => To left.
1141 to_index = 1;
1142 }
1143
1144 auto from_stream = m_pipelineProfile.get_stream(from, from_index);
1145 auto to_stream = m_pipelineProfile.get_stream(to, to_index);
1146
1147 rs2_extrinsics extrinsics = from_stream.get_extrinsics_to(to_stream);
1148
1151 for (unsigned int i = 0; i < 3; i++) {
1152 t[i] = extrinsics.translation[i];
1153 for (unsigned int j = 0; j < 3; j++)
1154 R[i][j] = extrinsics.rotation[j * 3 + i]; // rotation is column-major order
1155 }
1156
1157 vpHomogeneousMatrix to_M_from(t, R);
1158 return to_M_from;
1159}
1160
1161#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1172 double *ts)
1173{
1174 auto frame = m_pipe.wait_for_frames();
1175 auto f = frame.first_or_default(RS2_STREAM_POSE);
1176 auto pose_data = f.as<rs2::pose_frame>().get_pose_data();
1177
1178 if (ts != NULL)
1179 *ts = frame.get_timestamp();
1180
1181 if (cMw != NULL) {
1182 m_pos[0] = static_cast<double>(pose_data.translation.x);
1183 m_pos[1] = static_cast<double>(pose_data.translation.y);
1184 m_pos[2] = static_cast<double>(pose_data.translation.z);
1185
1186 m_quat[0] = static_cast<double>(pose_data.rotation.x);
1187 m_quat[1] = static_cast<double>(pose_data.rotation.y);
1188 m_quat[2] = static_cast<double>(pose_data.rotation.z);
1189 m_quat[3] = static_cast<double>(pose_data.rotation.w);
1190
1192 }
1193
1194 if (odo_vel != NULL) {
1195 odo_vel->resize(6, false);
1196 (*odo_vel)[0] = static_cast<double>(pose_data.velocity.x);
1197 (*odo_vel)[1] = static_cast<double>(pose_data.velocity.y);
1198 (*odo_vel)[2] = static_cast<double>(pose_data.velocity.z);
1199 (*odo_vel)[3] = static_cast<double>(pose_data.angular_velocity.x);
1200 (*odo_vel)[4] = static_cast<double>(pose_data.angular_velocity.y);
1201 (*odo_vel)[5] = static_cast<double>(pose_data.angular_velocity.z);
1202 }
1203
1204 if (odo_acc != NULL) {
1205 odo_acc->resize(6, false);
1206 (*odo_acc)[0] = static_cast<double>(pose_data.acceleration.x);
1207 (*odo_acc)[1] = static_cast<double>(pose_data.acceleration.y);
1208 (*odo_acc)[2] = static_cast<double>(pose_data.acceleration.z);
1209 (*odo_acc)[3] = static_cast<double>(pose_data.angular_acceleration.x);
1210 (*odo_acc)[4] = static_cast<double>(pose_data.angular_acceleration.y);
1211 (*odo_acc)[5] = static_cast<double>(pose_data.angular_acceleration.z);
1212 }
1213
1214 return pose_data.tracker_confidence;
1215}
1216
1238{
1239 auto frame = m_pipe.wait_for_frames();
1240 auto f = frame.first_or_default(RS2_STREAM_ACCEL);
1241 auto imu_acc_data = f.as<rs2::motion_frame>().get_motion_data();
1242
1243 if (ts != NULL)
1244 *ts = f.get_timestamp();
1245
1246 if (imu_acc != NULL) {
1247 imu_acc->resize(3, false);
1248 (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1249 (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1250 (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1251 }
1252}
1253
1275{
1276 auto frame = m_pipe.wait_for_frames();
1277 auto f = frame.first_or_default(RS2_STREAM_GYRO);
1278 auto imu_vel_data = f.as<rs2::motion_frame>().get_motion_data();
1279
1280 if (ts != NULL)
1281 *ts = f.get_timestamp();
1282
1283 if (imu_vel != NULL) {
1284 imu_vel->resize(3, false);
1285 (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1286 (*imu_vel)[1] = static_cast<double>(imu_vel_data.x);
1287 (*imu_vel)[2] = static_cast<double>(imu_vel_data.x);
1288 }
1289}
1290
1311void vpRealSense2::getIMUData(vpColVector *imu_acc, vpColVector *imu_vel, double *ts)
1312{
1313 auto data = m_pipe.wait_for_frames();
1314
1315 if (ts != NULL)
1316 *ts = data.get_timestamp();
1317
1318 if (imu_acc != NULL) {
1319 auto acc_data = data.first_or_default(RS2_STREAM_ACCEL);
1320 auto imu_acc_data = acc_data.as<rs2::motion_frame>().get_motion_data();
1321
1322 imu_acc->resize(3, false);
1323 (*imu_acc)[0] = static_cast<double>(imu_acc_data.x);
1324 (*imu_acc)[1] = static_cast<double>(imu_acc_data.y);
1325 (*imu_acc)[2] = static_cast<double>(imu_acc_data.z);
1326 }
1327
1328 if (imu_vel != NULL) {
1329 auto vel_data = data.first_or_default(RS2_STREAM_GYRO);
1330 auto imu_vel_data = vel_data.as<rs2::motion_frame>().get_motion_data();
1331
1332 imu_vel->resize(3, false);
1333 (*imu_vel)[0] = static_cast<double>(imu_vel_data.x);
1334 (*imu_vel)[1] = static_cast<double>(imu_vel_data.y);
1335 (*imu_vel)[2] = static_cast<double>(imu_vel_data.z);
1336 }
1337}
1338#endif // #if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1339
1343bool vpRealSense2::open(const rs2::config &cfg)
1344{
1345 if (m_init) {
1346 close();
1347 }
1348
1349 m_pipelineProfile = m_pipe.start(cfg);
1350
1351 rs2::device dev = m_pipelineProfile.get_device();
1352
1353#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1354 // Query device product line D400/SR300/L500/T200
1355 m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1356#endif
1357
1358 // Go over the device's sensors
1359 for (rs2::sensor &sensor : dev.query_sensors()) {
1360 // Check if the sensor is a depth sensor
1361 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1362 m_depthScale = dpt.get_depth_scale();
1363 }
1364 }
1365
1366 m_init = true;
1367 return m_init;
1368}
1369
1376bool vpRealSense2::open(const rs2::config &cfg, std::function<void(rs2::frame)> &callback)
1377{
1378 if (m_init) {
1379 close();
1380 }
1381
1382 m_pipelineProfile = m_pipe.start(cfg, callback);
1383
1384 rs2::device dev = m_pipelineProfile.get_device();
1385
1386#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1387 // Query device product line D400/SR300/L500/T200
1388 m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1389#endif
1390
1391 // Go over the device's sensors
1392 for (rs2::sensor &sensor : dev.query_sensors()) {
1393 // Check if the sensor is a depth sensor
1394 if (rs2::depth_sensor dpt = sensor.as<rs2::depth_sensor>()) {
1395 m_depthScale = dpt.get_depth_scale();
1396 }
1397 }
1398
1399 m_init = true;
1400 return m_init;
1401}
1402
1408{
1409#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1410 if (!m_init) { // If pipe is not already created, create it. Otherwise, we have already determined the product line
1411 rs2::pipeline *pipe = new rs2::pipeline;
1412 rs2::pipeline_profile *pipelineProfile = new rs2::pipeline_profile;
1413 *pipelineProfile = pipe->start();
1414
1415 rs2::device dev = pipelineProfile->get_device();
1416
1417#if (RS2_API_VERSION > ((2 * 10000) + (31 * 100) + 0))
1418 // Query device product line D400/SR300/L500/T200
1419 m_product_line = dev.get_info(RS2_CAMERA_INFO_PRODUCT_LINE);
1420#endif
1421
1422 pipe->stop();
1423 delete pipe;
1424 delete pipelineProfile;
1425 }
1426
1427 return m_product_line;
1428#else
1429 return (std::string("unknown"));
1430#endif
1431}
1432
1433namespace
1434{
1435// Helper functions to print information about the RealSense device
1436void print(const rs2_extrinsics &extrinsics, std::ostream &os)
1437{
1438 std::stringstream ss;
1439 ss << "Rotation Matrix:\n";
1440
1441 for (auto i = 0; i < 3; ++i) {
1442 for (auto j = 0; j < 3; ++j) {
1443 ss << std::left << std::setw(15) << std::setprecision(5) << extrinsics.rotation[j * 3 + i];
1444 }
1445 ss << std::endl;
1446 }
1447
1448 ss << "\nTranslation Vector: ";
1449 for (size_t i = 0; i < sizeof(extrinsics.translation) / sizeof(extrinsics.translation[0]); ++i)
1450 ss << std::setprecision(15) << extrinsics.translation[i] << " ";
1451
1452 os << ss.str() << "\n\n";
1453}
1454
1455void print(const rs2_intrinsics &intrinsics, std::ostream &os)
1456{
1457 std::stringstream ss;
1458 ss << std::left << std::setw(14) << "Width: "
1459 << "\t" << intrinsics.width << "\n"
1460 << std::left << std::setw(14) << "Height: "
1461 << "\t" << intrinsics.height << "\n"
1462 << std::left << std::setw(14) << "PPX: "
1463 << "\t" << std::setprecision(15) << intrinsics.ppx << "\n"
1464 << std::left << std::setw(14) << "PPY: "
1465 << "\t" << std::setprecision(15) << intrinsics.ppy << "\n"
1466 << std::left << std::setw(14) << "Fx: "
1467 << "\t" << std::setprecision(15) << intrinsics.fx << "\n"
1468 << std::left << std::setw(14) << "Fy: "
1469 << "\t" << std::setprecision(15) << intrinsics.fy << "\n"
1470 << std::left << std::setw(14) << "Distortion: "
1471 << "\t" << rs2_distortion_to_string(intrinsics.model) << "\n"
1472 << std::left << std::setw(14) << "Coeffs: ";
1473
1474 for (size_t i = 0; i < sizeof(intrinsics.coeffs) / sizeof(intrinsics.coeffs[0]); ++i)
1475 ss << "\t" << std::setprecision(15) << intrinsics.coeffs[i] << " ";
1476
1477 os << ss.str() << "\n\n";
1478}
1479
1480void safe_get_intrinsics(const rs2::video_stream_profile &profile, rs2_intrinsics &intrinsics)
1481{
1482 try {
1483 intrinsics = profile.get_intrinsics();
1484 } catch (...) {
1485 }
1486}
1487
1488bool operator==(const rs2_intrinsics &lhs, const rs2_intrinsics &rhs)
1489{
1490 return lhs.width == rhs.width && lhs.height == rhs.height && lhs.ppx == rhs.ppx && lhs.ppy == rhs.ppy &&
1491 lhs.fx == rhs.fx && lhs.fy == rhs.fy && lhs.model == rhs.model &&
1492 !std::memcmp(lhs.coeffs, rhs.coeffs, sizeof(rhs.coeffs));
1493}
1494
1495std::string get_str_formats(const std::set<rs2_format> &formats)
1496{
1497 std::stringstream ss;
1498 for (auto format = formats.begin(); format != formats.end(); ++format) {
1499 ss << *format << ((format != formats.end()) && (next(format) == formats.end()) ? "" : "/");
1500 }
1501 return ss.str();
1502}
1503
1504struct stream_and_resolution {
1505 rs2_stream stream;
1506 int stream_index;
1507 int width;
1508 int height;
1509 std::string stream_name;
1510
1511 bool operator<(const stream_and_resolution &obj) const
1512 {
1513 return (std::make_tuple(stream, stream_index, width, height) <
1514 std::make_tuple(obj.stream, obj.stream_index, obj.width, obj.height));
1515 }
1516};
1517
1518struct stream_and_index {
1519 rs2_stream stream;
1520 int stream_index;
1521
1522 bool operator<(const stream_and_index &obj) const
1523 {
1524 return (std::make_tuple(stream, stream_index) < std::make_tuple(obj.stream, obj.stream_index));
1525 }
1526};
1527} // anonymous namespace
1528
1549std::ostream &operator<<(std::ostream &os, const vpRealSense2 &rs)
1550{
1551 rs2::device dev = rs.m_pipelineProfile.get_device();
1552 os << std::left << std::setw(30) << dev.get_info(RS2_CAMERA_INFO_NAME) << std::setw(20)
1553 << dev.get_info(RS2_CAMERA_INFO_SERIAL_NUMBER) << std::setw(20) << dev.get_info(RS2_CAMERA_INFO_FIRMWARE_VERSION)
1554 << std::endl;
1555
1556 // Show which options are supported by this device
1557 os << " Device info: \n";
1558 for (auto j = 0; j < RS2_CAMERA_INFO_COUNT; ++j) {
1559 auto param = static_cast<rs2_camera_info>(j);
1560 if (dev.supports(param))
1561 os << " " << std::left << std::setw(30) << rs2_camera_info_to_string(rs2_camera_info(param)) << ": \t"
1562 << dev.get_info(param) << "\n";
1563 }
1564
1565 os << "\n";
1566
1567 for (auto &&sensor : dev.query_sensors()) {
1568 os << "Options for " << sensor.get_info(RS2_CAMERA_INFO_NAME) << std::endl;
1569
1570 os << std::setw(55) << " Supported options:" << std::setw(10) << "min" << std::setw(10) << " max" << std::setw(6)
1571 << " step" << std::setw(10) << " default" << std::endl;
1572 for (auto j = 0; j < RS2_OPTION_COUNT; ++j) {
1573 auto opt = static_cast<rs2_option>(j);
1574 if (sensor.supports(opt)) {
1575 auto range = sensor.get_option_range(opt);
1576 os << " " << std::left << std::setw(50) << opt << " : " << std::setw(5) << range.min << "... "
1577 << std::setw(12) << range.max << std::setw(6) << range.step << std::setw(10) << range.def << "\n";
1578 }
1579 }
1580
1581 os << "\n";
1582 }
1583
1584 for (auto &&sensor : dev.query_sensors()) {
1585 os << "Stream Profiles supported by " << sensor.get_info(RS2_CAMERA_INFO_NAME) << "\n";
1586
1587 os << std::setw(55) << " Supported modes:" << std::setw(10) << "stream" << std::setw(10) << " resolution"
1588 << std::setw(6) << " fps" << std::setw(10) << " format"
1589 << "\n";
1590 // Show which streams are supported by this device
1591 for (auto &&profile : sensor.get_stream_profiles()) {
1592 if (auto video = profile.as<rs2::video_stream_profile>()) {
1593 os << " " << profile.stream_name() << "\t " << video.width() << "x" << video.height() << "\t@ "
1594 << profile.fps() << "Hz\t" << profile.format() << "\n";
1595 } else {
1596 os << " " << profile.stream_name() << "\t@ " << profile.fps() << "Hz\t" << profile.format() << "\n";
1597 }
1598 }
1599
1600 os << "\n";
1601 }
1602
1603 std::map<stream_and_index, rs2::stream_profile> streams;
1604 std::map<stream_and_resolution, std::vector<std::pair<std::set<rs2_format>, rs2_intrinsics> > > intrinsics_map;
1605 for (auto &&sensor : dev.query_sensors()) {
1606 // Intrinsics
1607 for (auto &&profile : sensor.get_stream_profiles()) {
1608 if (auto video = profile.as<rs2::video_stream_profile>()) {
1609 if (streams.find(stream_and_index{profile.stream_type(), profile.stream_index()}) == streams.end()) {
1610 streams[stream_and_index{profile.stream_type(), profile.stream_index()}] = profile;
1611 }
1612
1613 rs2_intrinsics intrinsics{};
1614 stream_and_resolution stream_res{profile.stream_type(), profile.stream_index(), video.width(), video.height(),
1615 profile.stream_name()};
1616 safe_get_intrinsics(video, intrinsics);
1617 auto it = std::find_if(
1618 (intrinsics_map[stream_res]).begin(), (intrinsics_map[stream_res]).end(),
1619 [&](const std::pair<std::set<rs2_format>, rs2_intrinsics> &kvp) { return intrinsics == kvp.second; });
1620 if (it == (intrinsics_map[stream_res]).end()) {
1621 (intrinsics_map[stream_res]).push_back({{profile.format()}, intrinsics});
1622 } else {
1623 it->first.insert(profile.format()); // If the intrinsics are equals,
1624 // add the profile format to
1625 // format set
1626 }
1627 }
1628 }
1629 }
1630
1631 os << "Provided Intrinsic:\n";
1632 for (auto &kvp : intrinsics_map) {
1633 auto stream_res = kvp.first;
1634 for (auto &intrinsics : kvp.second) {
1635 auto formats = get_str_formats(intrinsics.first);
1636 os << "Intrinsic of \"" << stream_res.stream_name << "\"\t " << stream_res.width << "x" << stream_res.height
1637 << "\t " << formats << "\n";
1638 if (intrinsics.second == rs2_intrinsics{}) {
1639 os << "Intrinsic NOT available!\n\n";
1640 } else {
1641 print(intrinsics.second, os);
1642 }
1643 }
1644 }
1645
1646 // Print Extrinsics
1647 os << "\nProvided Extrinsic:\n";
1648 for (auto kvp1 = streams.begin(); kvp1 != streams.end(); ++kvp1) {
1649 for (auto kvp2 = streams.begin(); kvp2 != streams.end(); ++kvp2) {
1650 os << "Extrinsic from \"" << kvp1->second.stream_name() << "\"\t "
1651 << "To"
1652 << "\t \"" << kvp2->second.stream_name() << "\"\n";
1653 auto extrinsics = kvp1->second.get_extrinsics_to(kvp2->second);
1654 print(extrinsics, os);
1655 }
1656 }
1657
1658 return os;
1659}
1660
1661#elif !defined(VISP_BUILD_SHARED_LIBS)
1662// Work around to avoid warning: libvisp_sensor.a(vpRealSense2.cpp.o) has no
1663// symbols
1664void dummy_vpRealSense2(){};
1665#endif
Generic class defining intrinsic camera parameters.
void initPersProjWithoutDistortion(double px, double py, double u0, double v0)
@ perspectiveProjWithDistortion
Perspective projection with distortion model.
@ ProjWithKannalaBrandtDistortion
Projection with Kannala-Brandt distortion model.
@ perspectiveProjWithoutDistortion
Perspective projection without distortion model.
void initPersProjWithDistortion(double px, double py, double u0, double v0, double kud, double kdu)
void initProjWithKannalaBrandtDistortion(double px, double py, double u0, double v0, const std::vector< double > &distortion_coefficients)
Implementation of column vector and the associated operations.
void resize(unsigned int i, bool flagNullify=true)
error that can be emitted by ViSP classes.
Definition vpException.h:59
@ fatalError
Fatal error.
Definition vpException.h:84
Implementation of an homogeneous matrix and operations on such kind of matrices.
static void RGBToGrey(unsigned char *rgb, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false)
static void RGBaToGrey(unsigned char *rgba, unsigned char *grey, unsigned int width, unsigned int height, unsigned int nThreads=0)
static void RGBToRGBa(unsigned char *rgb, unsigned char *rgba, unsigned int size)
static void BGRToGrey(unsigned char *bgr, unsigned char *grey, unsigned int width, unsigned int height, bool flip=false, unsigned int nThreads=0)
static void BGRToRGBa(unsigned char *bgr, unsigned char *rgba, unsigned int width, unsigned int height, bool flip=false)
Definition of the vpImage class member functions.
Definition vpImage.h:135
void resize(unsigned int h, unsigned int w)
resize the image : Image initialization
Definition vpImage.h:795
Type * bitmap
points toward the bitmap
Definition vpImage.h:139
void acquire(vpImage< unsigned char > &grey, double *ts=NULL)
void getNativeFrameData(const rs2::frame &frame, unsigned char *const data)
vpCameraParameters getCameraParameters(const rs2_stream &stream, vpCameraParameters::vpCameraParametersProjType type=vpCameraParameters::perspectiveProjWithDistortion, int index=-1) const
virtual ~vpRealSense2()
vpQuaternionVector m_quat
rs2::pipeline m_pipe
unsigned int getOdometryData(vpHomogeneousMatrix *cMw, vpColVector *odo_vel, vpColVector *odo_acc, double *ts=NULL)
bool open(const rs2::config &cfg=rs2::config())
rs2_intrinsics getIntrinsics(const rs2_stream &stream, int index=-1) const
float getDepthScale()
rs2::pointcloud m_pointcloud
void getIMUAcceleration(vpColVector *imu_acc, double *ts)
void getPointcloud(const rs2::depth_frame &depth_frame, std::vector< vpColVector > &pointcloud)
void getColorFrame(const rs2::frame &frame, vpImage< vpRGBa > &color)
void getIMUVelocity(vpColVector *imu_vel, double *ts)
vpTranslationVector m_pos
std::string m_product_line
std::string getProductLine()
void getIMUData(vpColVector *imu_vel, vpColVector *imu_acc, double *ts)
vpHomogeneousMatrix getTransformation(const rs2_stream &from, const rs2_stream &to, int from_index=-1) const
void getGreyFrame(const rs2::frame &frame, vpImage< unsigned char > &grey)
rs2::points m_points
float m_invalidDepthValue
rs2::pipeline_profile m_pipelineProfile
Implementation of a rotation matrix and operations on such kind of matrices.
Class that consider the case of a translation vector.