38 ROS1Visualizer::ROS1Visualizer(std::shared_ptr<ros::NodeHandle> nh, std::shared_ptr<VioManager> app, std::shared_ptr<Simulator>
sim)
39 : _nh(nh), _app(
app), _sim(
sim), thread_update_running(false) {
42 mTfBr = std::make_shared<tf::TransformBroadcaster>();
48 pub_poseimu = nh->advertise<geometry_msgs::PoseWithCovarianceStamped>(
"poseimu", 2);
50 pub_odomimu = nh->advertise<nav_msgs::Odometry>(
"odomimu", 2);
52 pub_pathimu = nh->advertise<nav_msgs::Path>(
"pathimu", 2);
56 pub_points_msckf = nh->advertise<sensor_msgs::PointCloud2>(
"points_msckf", 2);
58 pub_points_slam = nh->advertise<sensor_msgs::PointCloud2>(
"points_slam", 2);
60 pub_points_aruco = nh->advertise<sensor_msgs::PointCloud2>(
"points_aruco", 2);
62 pub_points_sim = nh->advertise<sensor_msgs::PointCloud2>(
"points_sim", 2);
70 pub_posegt = nh->advertise<geometry_msgs::PoseStamped>(
"posegt", 2);
72 pub_pathgt = nh->advertise<nav_msgs::Path>(
"pathgt", 2);
76 pub_loop_pose = nh->advertise<nav_msgs::Odometry>(
"loop_pose", 2);
77 pub_loop_point = nh->advertise<sensor_msgs::PointCloud>(
"loop_feats", 2);
89 if (nh->hasParam(
"path_gt") &&
_sim ==
nullptr) {
90 std::string path_to_gt;
91 nh->param<std::string>(
"path_gt", path_to_gt,
"");
92 if (!path_to_gt.empty()) {
93 DatasetReader::load_gt_file(path_to_gt,
gt_states);
94 PRINT_DEBUG(
"gt file path is: %s\n", path_to_gt.c_str());
104 std::string filepath_est, filepath_std, filepath_gt;
105 nh->param<std::string>(
"filepath_est", filepath_est,
"state_estimate.txt");
106 nh->param<std::string>(
"filepath_std", filepath_std,
"state_deviation.txt");
107 nh->param<std::string>(
"filepath_gt", filepath_gt,
"state_groundtruth.txt");
110 if (boost::filesystem::exists(filepath_est))
111 boost::filesystem::remove(filepath_est);
112 if (boost::filesystem::exists(filepath_std))
113 boost::filesystem::remove(filepath_std);
116 boost::filesystem::create_directories(boost::filesystem::path(filepath_est.c_str()).parent_path());
117 boost::filesystem::create_directories(boost::filesystem::path(filepath_std.c_str()).parent_path());
122 of_state_est <<
"# timestamp(s) q p v bg ba cam_imu_dt num_cam cam0_k cam0_d cam0_rot cam0_trans ... imu_model dw da tg wtoI atoI etc"
124 of_state_std <<
"# timestamp(s) q p v bg ba cam_imu_dt num_cam cam0_k cam0_d cam0_rot cam0_trans ... imu_model dw da tg wtoI atoI etc"
128 if (
_sim !=
nullptr) {
129 if (boost::filesystem::exists(filepath_gt))
130 boost::filesystem::remove(filepath_gt);
131 boost::filesystem::create_directories(boost::filesystem::path(filepath_gt.c_str()).parent_path());
133 of_state_gt <<
"# timestamp(s) q p v bg ba cam_imu_dt num_cam cam0_k cam0_d cam0_rot cam0_trans ... imu_model dw da tg wtoI atoI etc"
139 if (
_app->get_params().use_multi_threading_pubs) {
140 std::thread thread([&] {
154 assert(parser !=
nullptr);
157 std::string topic_imu;
158 _nh->param<std::string>(
"topic_imu", topic_imu,
"/imu0");
159 parser->parse_external(
"relative_config_imu",
"imu0",
"rostopic", topic_imu);
161 PRINT_INFO(
"subscribing to IMU: %s\n", topic_imu.c_str());
165 if (
_app->get_params().state_options.num_cameras == 2) {
167 std::string cam_topic0, cam_topic1;
168 _nh->param<std::string>(
"topic_camera" + std::to_string(0), cam_topic0,
"/cam" + std::to_string(0) +
"/image_raw");
169 _nh->param<std::string>(
"topic_camera" + std::to_string(1), cam_topic1,
"/cam" + std::to_string(1) +
"/image_raw");
170 parser->parse_external(
"relative_config_imucam",
"cam" + std::to_string(0),
"rostopic", cam_topic0);
171 parser->parse_external(
"relative_config_imucam",
"cam" + std::to_string(1),
"rostopic", cam_topic1);
173 auto image_sub0 = std::make_shared<message_filters::Subscriber<sensor_msgs::Image>>(*
_nh, cam_topic0, 1);
174 auto image_sub1 = std::make_shared<message_filters::Subscriber<sensor_msgs::Image>>(*
_nh, cam_topic1, 1);
175 auto sync = std::make_shared<message_filters::Synchronizer<sync_pol>>(
sync_pol(10), *image_sub0, *image_sub1);
181 PRINT_INFO(
"subscribing to cam (stereo): %s\n", cam_topic0.c_str());
182 PRINT_INFO(
"subscribing to cam (stereo): %s\n", cam_topic1.c_str());
185 for (
int i = 0; i <
_app->get_params().state_options.num_cameras; i++) {
187 std::string cam_topic;
188 _nh->param<std::string>(
"topic_camera" + std::to_string(i), cam_topic,
"/cam" + std::to_string(i) +
"/image_raw");
189 parser->parse_external(
"relative_config_imucam",
"cam" + std::to_string(i),
"rostopic", cam_topic);
192 PRINT_INFO(
"subscribing to cam (mono): %s\n", cam_topic.c_str());
209 if (!
_app->get_params().use_multi_threading_pubs)
213 if (!
_app->initialized())
218 rT1 = boost::posix_time::microsec_clock::local_time();
248 if (!
_app->initialized())
252 std::shared_ptr<State> state =
_app->get_state();
253 Eigen::Matrix<double, 13, 1> state_plus = Eigen::Matrix<double, 13, 1>::Zero();
254 Eigen::Matrix<double, 12, 12> cov_plus = Eigen::Matrix<double, 12, 12>::Zero();
255 if (!
_app->get_propagator()->fast_state_propagate(state, timestamp, state_plus, cov_plus))
288 nav_msgs::Odometry odomIinM;
289 odomIinM.header.stamp =
ros::Time(timestamp);
290 odomIinM.header.frame_id =
"global";
293 odomIinM.pose.pose.orientation.x = state_plus(0);
294 odomIinM.pose.pose.orientation.y = state_plus(1);
295 odomIinM.pose.pose.orientation.z = state_plus(2);
296 odomIinM.pose.pose.orientation.w = state_plus(3);
297 odomIinM.pose.pose.position.x = state_plus(4);
298 odomIinM.pose.pose.position.y = state_plus(5);
299 odomIinM.pose.pose.position.z = state_plus(6);
302 odomIinM.child_frame_id =
"imu";
303 odomIinM.twist.twist.linear.x = state_plus(7);
304 odomIinM.twist.twist.linear.y = state_plus(8);
305 odomIinM.twist.twist.linear.z = state_plus(9);
306 odomIinM.twist.twist.angular.x = state_plus(10);
307 odomIinM.twist.twist.angular.y = state_plus(11);
308 odomIinM.twist.twist.angular.z = state_plus(12);
311 Eigen::Matrix<double, 12, 12> Phi = Eigen::Matrix<double, 12, 12>::Zero();
312 Phi.block(0, 3, 3, 3).setIdentity();
313 Phi.block(3, 0, 3, 3).setIdentity();
314 Phi.block(6, 6, 6, 6).setIdentity();
315 cov_plus = Phi * cov_plus * Phi.transpose();
316 for (
int r = 0; r < 6; r++) {
317 for (
int c = 0; c < 6; c++) {
318 odomIinM.pose.covariance[6 * r + c] = cov_plus(r, c);
321 for (
int r = 0; r < 6; r++) {
322 for (
int c = 0; c < 6; c++) {
323 odomIinM.twist.covariance[6 * r + c] = cov_plus(r + 6, c + 6);
332 auto odom_pose = std::make_shared<ov_type::PoseJPL>();
333 odom_pose->set_value(state_plus.block(0, 0, 7, 1));
334 tf::StampedTransform trans = ROSVisualizerHelper::get_stamped_transform_from_pose(odom_pose,
false);
338 mTfBr->sendTransform(trans);
342 for (
const auto &calib : state->_calib_IMUtoCAM) {
343 tf::StampedTransform trans_calib = ROSVisualizerHelper::get_stamped_transform_from_pose(calib.second,
true);
347 mTfBr->sendTransform(trans_calib);
355 if (
_app->get_state()->_options.do_calib_camera_timeoffset) {
360 if (
_app->get_state()->_options.do_calib_camera_intrinsics) {
361 for (
int i = 0; i <
_app->get_state()->_options.num_cameras; i++) {
362 std::shared_ptr<Vec> calib =
_app->get_state()->_cam_intrinsics.at(i);
364 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f\n" RESET, calib->value()(0), calib->value()(1), calib->value()(2), calib->value()(3));
365 PRINT_INFO(
REDPURPLE "%.5f,%.5f,%.5f,%.5f\n\n" RESET, calib->value()(4), calib->value()(5), calib->value()(6), calib->value()(7));
370 if (
_app->get_state()->_options.do_calib_camera_pose) {
371 for (
int i = 0; i <
_app->get_state()->_options.num_cameras; i++) {
372 std::shared_ptr<PoseJPL> calib =
_app->get_state()->_calib_IMUtoCAM.at(i);
373 Eigen::Matrix4d T_CtoI = Eigen::Matrix4d::Identity();
374 T_CtoI.block(0, 0, 3, 3) =
quat_2_Rot(calib->quat()).transpose();
375 T_CtoI.block(0, 3, 3, 1) = -T_CtoI.block(0, 0, 3, 3) * calib->pos();
377 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f,\n" RESET, T_CtoI(0, 0), T_CtoI(0, 1), T_CtoI(0, 2), T_CtoI(0, 3));
378 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f,\n" RESET, T_CtoI(1, 0), T_CtoI(1, 1), T_CtoI(1, 2), T_CtoI(1, 3));
379 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f,\n" RESET, T_CtoI(2, 0), T_CtoI(2, 1), T_CtoI(2, 2), T_CtoI(2, 3));
380 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f\n\n" RESET, T_CtoI(3, 0), T_CtoI(3, 1), T_CtoI(3, 2), T_CtoI(3, 3));
385 if (
_app->get_state()->_options.do_calib_imu_intrinsics) {
386 Eigen::Matrix3d Dw =
State::Dm(
_app->get_state()->_options.imu_model,
_app->get_state()->_calib_imu_dw->value());
387 Eigen::Matrix3d Da =
State::Dm(
_app->get_state()->_options.imu_model,
_app->get_state()->_calib_imu_da->value());
388 Eigen::Matrix3d Tw = Dw.colPivHouseholderQr().solve(Eigen::Matrix3d::Identity());
389 Eigen::Matrix3d Ta = Da.colPivHouseholderQr().solve(Eigen::Matrix3d::Identity());
390 Eigen::Matrix3d R_IMUtoACC =
_app->get_state()->_calib_imu_ACCtoIMU->Rot().transpose();
391 Eigen::Matrix3d R_IMUtoGYRO =
_app->get_state()->_calib_imu_GYROtoIMU->Rot().transpose();
411 if (
_app->get_state()->_options.do_calib_imu_g_sensitivity) {
412 Eigen::Matrix3d Tg =
State::Tg(
_app->get_state()->_calib_imu_tg->value());
426 if (
_sim !=
nullptr) {
434 rT2 = boost::posix_time::microsec_clock::local_time();
442 message.
timestamp = msg->header.stamp.toSec();
443 message.
wm << msg->angular_velocity.x, msg->angular_velocity.y, msg->angular_velocity.z;
444 message.
am << msg->linear_acceleration.x, msg->linear_acceleration.y, msg->linear_acceleration.z;
447 _app->feed_measurement_imu(message);
456 std::thread thread([&] {
461 std::map<int, bool> unique_cam_ids;
463 unique_cam_ids[cam_msg.sensor_ids.at(0)] =
true;
468 auto params =
_app->get_params();
469 size_t num_unique_cameras = (params.state_options.num_cameras == 2) ? 1 : params.state_options.num_cameras;
470 if (unique_cam_ids.size() == num_unique_cameras) {
474 double timestamp_imu_inC = message.
timestamp -
_app->get_state()->_calib_dt_CAMtoIMU->value()(0);
476 auto rT0_1 = boost::posix_time::microsec_clock::local_time();
477 double update_dt = 100.0 * (timestamp_imu_inC -
camera_queue.at(0).timestamp);
481 auto rT0_2 = boost::posix_time::microsec_clock::local_time();
482 double time_total = (rT0_2 - rT0_1).total_microseconds() * 1e-6;
483 PRINT_INFO(
BLUE "[TIME]: %.4f seconds total (%.1f hz, %.2f ms behind)\n" RESET, time_total, 1.0 / time_total, update_dt);
491 if (!
_app->get_params().use_multi_threading_subs) {
501 double timestamp = msg0->header.stamp.toSec();
502 double time_delta = 1.0 /
_app->get_params().track_frequency;
519 message.
timestamp = cv_ptr->header.stamp.toSec();
521 message.
images.push_back(cv_ptr->image.clone());
525 if (
_app->get_params().use_mask) {
526 message.
masks.push_back(
_app->get_params().masks.at(cam_id0));
528 message.
masks.push_back(cv::Mat::zeros(cv_ptr->image.rows, cv_ptr->image.cols, CV_8UC1));
541 double timestamp = msg0->header.stamp.toSec();
542 double time_delta = 1.0 /
_app->get_params().track_frequency;
553 PRINT_ERROR(
"cv_bridge exception: %s\n", e.what());
562 PRINT_ERROR(
"cv_bridge exception: %s\n", e.what());
568 message.
timestamp = cv_ptr0->header.stamp.toSec();
571 message.
images.push_back(cv_ptr0->image.clone());
572 message.
images.push_back(cv_ptr1->image.clone());
576 if (
_app->get_params().use_mask) {
577 message.
masks.push_back(
_app->get_params().masks.at(cam_id0));
578 message.
masks.push_back(
_app->get_params().masks.at(cam_id1));
581 message.
masks.push_back(cv::Mat::zeros(cv_ptr0->image.rows, cv_ptr0->image.cols, CV_8UC1));
582 message.
masks.push_back(cv::Mat::zeros(cv_ptr1->image.rows, cv_ptr1->image.cols, CV_8UC1));
594 std::shared_ptr<State> state =
_app->get_state();
598 double t_ItoC = state->_calib_dt_CAMtoIMU->value()(0);
599 double timestamp_inI = state->_timestamp + t_ItoC;
602 geometry_msgs::PoseWithCovarianceStamped poseIinM;
603 poseIinM.header.stamp =
ros::Time(timestamp_inI);
605 poseIinM.header.frame_id =
"global";
606 poseIinM.pose.pose.orientation.x = state->_imu->quat()(0);
607 poseIinM.pose.pose.orientation.y = state->_imu->quat()(1);
608 poseIinM.pose.pose.orientation.z = state->_imu->quat()(2);
609 poseIinM.pose.pose.orientation.w = state->_imu->quat()(3);
610 poseIinM.pose.pose.position.x = state->_imu->pos()(0);
611 poseIinM.pose.pose.position.y = state->_imu->pos()(1);
612 poseIinM.pose.pose.position.z = state->_imu->pos()(2);
615 std::vector<std::shared_ptr<Type>> statevars;
616 statevars.push_back(state->_imu->pose()->p());
617 statevars.push_back(state->_imu->pose()->q());
619 for (
int r = 0; r < 6; r++) {
620 for (
int c = 0; c < 6; c++) {
621 poseIinM.pose.covariance[6 * r + c] = covariance_posori(r, c);
630 geometry_msgs::PoseStamped posetemp;
631 posetemp.header = poseIinM.header;
632 posetemp.pose = poseIinM.pose.pose;
638 nav_msgs::Path arrIMU;
641 arrIMU.header.frame_id =
"global";
642 for (
size_t i = 0; i <
poses_imu.size(); i += std::floor((
double)
poses_imu.size() / 16384.0) + 1) {
654 if (
_app->get_state() ==
nullptr)
665 cv::Mat img_history =
_app->get_historical_viz_image();
666 if (img_history.empty())
687 std::vector<Eigen::Vector3d> feats_msckf =
_app->get_good_features_MSCKF();
688 sensor_msgs::PointCloud2 cloud = ROSVisualizerHelper::get_ros_pointcloud(feats_msckf);
692 std::vector<Eigen::Vector3d> feats_slam =
_app->get_features_SLAM();
693 sensor_msgs::PointCloud2 cloud_SLAM = ROSVisualizerHelper::get_ros_pointcloud(feats_slam);
697 std::vector<Eigen::Vector3d> feats_aruco =
_app->get_features_ARUCO();
698 sensor_msgs::PointCloud2 cloud_ARUCO = ROSVisualizerHelper::get_ros_pointcloud(feats_aruco);
706 std::vector<Eigen::Vector3d> feats_sim =
_sim->get_map_vec();
707 sensor_msgs::PointCloud2 cloud_SIM = ROSVisualizerHelper::get_ros_pointcloud(feats_sim);
714 Eigen::Matrix<double, 17, 1> state_gt;
718 double t_ItoC =
_app->get_state()->_calib_dt_CAMtoIMU->value()(0);
719 double timestamp_inI =
_app->get_state()->_timestamp + t_ItoC;
722 if (
_sim ==
nullptr && (
gt_states.empty() || !DatasetReader::get_gt_state(timestamp_inI, state_gt,
gt_states))) {
728 if (
_sim !=
nullptr) {
729 timestamp_inI =
_app->get_state()->_timestamp +
_sim->get_true_parameters().calib_camimu_dt;
730 if (!
_sim->get_state(timestamp_inI, state_gt))
735 Eigen::Matrix<double, 16, 1> state_ekf =
_app->get_state()->_imu->value();
738 geometry_msgs::PoseStamped poseIinM;
739 poseIinM.header.stamp =
ros::Time(timestamp_inI);
741 poseIinM.header.frame_id =
"global";
742 poseIinM.pose.orientation.x = state_gt(1, 0);
743 poseIinM.pose.orientation.y = state_gt(2, 0);
744 poseIinM.pose.orientation.z = state_gt(3, 0);
745 poseIinM.pose.orientation.w = state_gt(4, 0);
746 poseIinM.pose.position.x = state_gt(5, 0);
747 poseIinM.pose.position.y = state_gt(6, 0);
748 poseIinM.pose.position.z = state_gt(7, 0);
757 nav_msgs::Path arrIMU;
760 arrIMU.header.frame_id =
"global";
761 for (
size_t i = 0; i <
poses_gt.size(); i += std::floor((
double)
poses_gt.size() / 16384.0) + 1) {
762 arrIMU.poses.push_back(
poses_gt.at(i));
774 tf::Quaternion quat(state_gt(1, 0), state_gt(2, 0), state_gt(3, 0), state_gt(4, 0));
776 tf::Vector3 orig(state_gt(5, 0), state_gt(6, 0), state_gt(7, 0));
779 mTfBr->sendTransform(trans);
786 double dx = state_ekf(4, 0) - state_gt(5, 0);
787 double dy = state_ekf(5, 0) - state_gt(6, 0);
788 double dz = state_ekf(6, 0) - state_gt(7, 0);
789 double err_pos = std::sqrt(dx * dx + dy * dy + dz * dz);
792 Eigen::Matrix<double, 4, 1> quat_gt, quat_st, quat_diff;
793 quat_gt << state_gt(1, 0), state_gt(2, 0), state_gt(3, 0), state_gt(4, 0);
794 quat_st << state_ekf(0, 0), state_ekf(1, 0), state_ekf(2, 0), state_ekf(3, 0);
796 double err_ori = (180 / M_PI) * 2 * quat_diff.block(0, 0, 3, 1).norm();
802 std::vector<std::shared_ptr<Type>> statevars;
803 statevars.push_back(
_app->get_state()->_imu->q());
804 statevars.push_back(
_app->get_state()->_imu->p());
812 Eigen::Vector3d quat_diff_vec = quat_diff.block(0, 0, 3, 1);
813 Eigen::Vector3d cov_vec = covariance.block(0, 0, 3, 3).inverse() * 2 * quat_diff.block(0, 0, 3, 1);
814 double ori_nees = 2 * quat_diff_vec.dot(cov_vec);
815 Eigen::Vector3d errpos = state_ekf.block(4, 0, 3, 1) - state_gt.block(5, 0, 3, 1);
816 double pos_nees = errpos.transpose() * covariance.block(3, 3, 3, 3).inverse() * errpos;
822 if (!std::isnan(ori_nees) && !std::isnan(pos_nees)) {
831 PRINT_INFO(
REDPURPLE "error to gt => %.3f, %.3f (deg,m) | rmse => %.3f, %.3f (deg,m) | called %d times\n" RESET, err_ori, err_pos,
843 double active_tracks_time1 = -1;
844 double active_tracks_time2 = -1;
845 std::unordered_map<size_t, Eigen::Vector3d> active_tracks_posinG;
846 std::unordered_map<size_t, Eigen::Vector3d> active_tracks_uvd;
847 cv::Mat active_cam0_image;
848 _app->get_active_tracks(active_tracks_time1, active_tracks_posinG, active_tracks_uvd);
849 _app->get_active_image(active_tracks_time2, active_cam0_image);
850 if (active_tracks_time1 == -1)
852 if (
_app->get_state()->_clones_IMU.find(active_tracks_time1) ==
_app->get_state()->_clones_IMU.end())
854 Eigen::Vector4d quat =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->quat();
855 Eigen::Vector3d pos =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->pos();
856 if (active_tracks_time1 != active_tracks_time2)
869 nav_msgs::Odometry odometry_pose;
870 odometry_pose.header =
header;
871 odometry_pose.header.frame_id =
"global";
872 odometry_pose.pose.pose.position.x = pos(0);
873 odometry_pose.pose.pose.position.y = pos(1);
874 odometry_pose.pose.pose.position.z = pos(2);
875 odometry_pose.pose.pose.orientation.x = quat(0);
876 odometry_pose.pose.pose.orientation.y = quat(1);
877 odometry_pose.pose.pose.orientation.z = quat(2);
878 odometry_pose.pose.pose.orientation.w = quat(3);
883 Eigen::Vector4d q_ItoC =
_app->get_state()->_calib_IMUtoCAM.at(0)->quat();
884 Eigen::Vector3d p_CinI = -
_app->get_state()->_calib_IMUtoCAM.at(0)->Rot().transpose() *
_app->get_state()->_calib_IMUtoCAM.at(0)->pos();
885 nav_msgs::Odometry odometry_calib;
886 odometry_calib.header =
header;
887 odometry_calib.header.frame_id =
"imu";
888 odometry_calib.pose.pose.position.x = p_CinI(0);
889 odometry_calib.pose.pose.position.y = p_CinI(1);
890 odometry_calib.pose.pose.position.z = p_CinI(2);
891 odometry_calib.pose.pose.orientation.x = q_ItoC(0);
892 odometry_calib.pose.pose.orientation.y = q_ItoC(1);
893 odometry_calib.pose.pose.orientation.z = q_ItoC(2);
894 odometry_calib.pose.pose.orientation.w = q_ItoC(3);
898 bool is_fisheye = (std::dynamic_pointer_cast<ov_core::CamEqui>(
_app->get_params().camera_intrinsics.at(0)) !=
nullptr);
899 sensor_msgs::CameraInfo cameraparams;
900 cameraparams.header =
header;
901 cameraparams.header.frame_id =
"cam0";
902 cameraparams.distortion_model = is_fisheye ?
"equidistant" :
"plumb_bob";
903 Eigen::VectorXd cparams =
_app->get_state()->_cam_intrinsics.at(0)->value();
904 cameraparams.D = {cparams(4), cparams(5), cparams(6), cparams(7)};
905 cameraparams.K = {cparams(0), 0, cparams(2), 0, cparams(1), cparams(3), 0, 0, 1};
914 sensor_msgs::PointCloud point_cloud;
915 point_cloud.header =
header;
916 point_cloud.header.frame_id =
"global";
917 for (
const auto &feattimes : active_tracks_posinG) {
920 size_t featid = feattimes.first;
921 Eigen::Vector3d uvd = Eigen::Vector3d::Zero();
922 if (active_tracks_uvd.find(featid) != active_tracks_uvd.end()) {
923 uvd = active_tracks_uvd.at(featid);
925 Eigen::Vector3d pFinG = active_tracks_posinG.at(featid);
928 geometry_msgs::Point32 p;
932 point_cloud.points.push_back(p);
937 sensor_msgs::ChannelFloat32 p_2d;
938 p_2d.values.push_back(0);
939 p_2d.values.push_back(0);
940 p_2d.values.push_back(uvd(0));
941 p_2d.values.push_back(uvd(1));
942 p_2d.values.push_back(featid);
943 point_cloud.channels.push_back(p_2d);
953 std::pair<int, int> wh_pair = {active_cam0_image.cols, active_cam0_image.rows};
954 cv::Mat depthmap = cv::Mat::zeros(wh_pair.second, wh_pair.first, CV_16UC1);
955 cv::Mat depthmap_viz = active_cam0_image;
958 for (
const auto &feattimes : active_tracks_uvd) {
961 size_t featid = feattimes.first;
962 Eigen::Vector3d uvd = active_tracks_uvd.at(featid);
966 if (uvd(0) < dw || uvd(0) > wh_pair.first - dw || uvd(1) < dw || uvd(1) > wh_pair.second - dw) {
973 depthmap.at<uint16_t>((int)uvd(1), (int)uvd(0)) = (uint16_t)(1000 * uvd(2));
977 float id = 1.0f / (float)uvd(2);
978 float r = (0.0f - id) * 255 / 1.0
f;
981 float g = (1.0f - id) * 255 / 1.0
f;
984 float b = (2.0f - id) * 255 / 1.0
f;
987 uchar rc = r < 0 ? 0 : (r > 255 ? 255 : r);
988 uchar gc = g < 0 ? 0 : (g > 255 ? 255 : g);
989 uchar bc = b < 0 ? 0 : (b > 255 ? 255 : b);
990 cv::Scalar color(255 - rc, 255 - gc, 255 - bc);
993 cv::Point p0(uvd(0) - dw, uvd(1) - dw);
994 cv::Point p1(uvd(0) + dw, uvd(1) + dw);
995 cv::rectangle(depthmap_viz, p0, p1, color, -1);
1003 header.frame_id =
"cam0";