38 ROS2Visualizer::ROS2Visualizer(std::shared_ptr<rclcpp::Node> node, std::shared_ptr<VioManager> app, std::shared_ptr<Simulator>
sim)
39 : _node(node), _app(
app), _sim(
sim), thread_update_running(false) {
42 mTfBr = std::make_shared<tf2_ros::TransformBroadcaster>(node);
48 pub_poseimu = node->create_publisher<geometry_msgs::msg::PoseWithCovarianceStamped>(
"poseimu", 2);
50 pub_odomimu = node->create_publisher<nav_msgs::msg::Odometry>(
"odomimu", 2);
52 pub_pathimu = node->create_publisher<nav_msgs::msg::Path>(
"pathimu", 2);
56 pub_points_msckf = node->create_publisher<sensor_msgs::msg::PointCloud2>(
"points_msckf", 2);
58 pub_points_slam = node->create_publisher<sensor_msgs::msg::PointCloud2>(
"points_slam", 2);
60 pub_points_aruco = node->create_publisher<sensor_msgs::msg::PointCloud2>(
"points_aruco", 2);
62 pub_points_sim = node->create_publisher<sensor_msgs::msg::PointCloud2>(
"points_sim", 2);
70 pub_posegt = node->create_publisher<geometry_msgs::msg::PoseStamped>(
"posegt", 2);
72 pub_pathgt = node->create_publisher<nav_msgs::msg::Path>(
"pathgt", 2);
76 pub_loop_pose = node->create_publisher<nav_msgs::msg::Odometry>(
"loop_pose", 2);
77 pub_loop_point = node->create_publisher<sensor_msgs::msg::PointCloud>(
"loop_feats", 2);
78 pub_loop_extrinsic = node->create_publisher<nav_msgs::msg::Odometry>(
"loop_extrinsic", 2);
79 pub_loop_intrinsics = node->create_publisher<sensor_msgs::msg::CameraInfo>(
"loop_intrinsics", 2);
84 if (node->has_parameter(
"publish_global_to_imu_tf")) {
87 if (node->has_parameter(
"publish_calibration_tf")) {
93 std::string path_to_gt;
94 bool has_gt = node->get_parameter(
"path_gt", path_to_gt);
95 if (has_gt &&
_sim ==
nullptr && !path_to_gt.empty()) {
96 DatasetReader::load_gt_file(path_to_gt,
gt_states);
97 PRINT_DEBUG(
"gt file path is: %s\n", path_to_gt.c_str());
102 if (node->has_parameter(
"save_total_state")) {
108 std::string filepath_est =
"state_estimate.txt";
109 std::string filepath_std =
"state_deviation.txt";
110 std::string filepath_gt =
"state_groundtruth.txt";
111 if (node->has_parameter(
"filepath_est")) {
112 node->get_parameter<std::string>(
"filepath_est", filepath_est);
114 if (node->has_parameter(
"filepath_std")) {
115 node->get_parameter<std::string>(
"filepath_std", filepath_std);
117 if (node->has_parameter(
"filepath_gt")) {
118 node->get_parameter<std::string>(
"filepath_gt", filepath_gt);
122 if (boost::filesystem::exists(filepath_est))
123 boost::filesystem::remove(filepath_est);
124 if (boost::filesystem::exists(filepath_std))
125 boost::filesystem::remove(filepath_std);
128 boost::filesystem::create_directories(boost::filesystem::path(filepath_est.c_str()).parent_path());
129 boost::filesystem::create_directories(boost::filesystem::path(filepath_std.c_str()).parent_path());
134 of_state_est <<
"# timestamp(s) q p v bg ba cam_imu_dt num_cam cam0_k cam0_d cam0_rot cam0_trans ... imu_model dw da tg wtoI atoI etc"
136 of_state_std <<
"# timestamp(s) q p v bg ba cam_imu_dt num_cam cam0_k cam0_d cam0_rot cam0_trans ... imu_model dw da tg wtoI atoI etc"
140 if (
_sim !=
nullptr) {
141 if (boost::filesystem::exists(filepath_gt))
142 boost::filesystem::remove(filepath_gt);
143 boost::filesystem::create_directories(boost::filesystem::path(filepath_gt.c_str()).parent_path());
145 of_state_gt <<
"# timestamp(s) q p v bg ba cam_imu_dt num_cam cam0_k cam0_d cam0_rot cam0_trans ... imu_model dw da tg wtoI atoI etc"
151 if (
_app->get_params().use_multi_threading_pubs) {
152 std::thread thread([&] {
153 rclcpp::Rate loop_rate(20);
154 while (rclcpp::ok()) {
166 assert(parser !=
nullptr);
169 std::string topic_imu;
170 _node->declare_parameter<std::string>(
"topic_imu",
"/imu0");
171 _node->get_parameter(
"topic_imu", topic_imu);
172 parser->parse_external(
"relative_config_imu",
"imu0",
"rostopic", topic_imu);
173 sub_imu =
_node->create_subscription<sensor_msgs::msg::Imu>(topic_imu, rclcpp::SensorDataQoS(),
175 PRINT_INFO(
"subscribing to IMU: %s\n", topic_imu.c_str());
179 if (
_app->get_params().state_options.num_cameras == 2) {
181 std::string cam_topic0, cam_topic1;
182 _node->declare_parameter<std::string>(
"topic_camera" + std::to_string(0),
"/cam" + std::to_string(0) +
"/image_raw");
183 _node->get_parameter(
"topic_camera" + std::to_string(0), cam_topic0);
184 _node->declare_parameter<std::string>(
"topic_camera" + std::to_string(1),
"/cam" + std::to_string(1) +
"/image_raw");
185 _node->get_parameter(
"topic_camera" + std::to_string(1), cam_topic1);
186 parser->parse_external(
"relative_config_imucam",
"cam" + std::to_string(0),
"rostopic", cam_topic0);
187 parser->parse_external(
"relative_config_imucam",
"cam" + std::to_string(1),
"rostopic", cam_topic1);
189 auto image_sub0 = std::make_shared<message_filters::Subscriber<sensor_msgs::msg::Image>>(
_node, cam_topic0);
190 auto image_sub1 = std::make_shared<message_filters::Subscriber<sensor_msgs::msg::Image>>(
_node, cam_topic1);
191 auto sync = std::make_shared<message_filters::Synchronizer<sync_pol>>(
sync_pol(10), *image_sub0, *image_sub1);
200 PRINT_INFO(
"subscribing to cam (stereo): %s\n", cam_topic0.c_str());
201 PRINT_INFO(
"subscribing to cam (stereo): %s\n", cam_topic1.c_str());
204 for (
int i = 0; i <
_app->get_params().state_options.num_cameras; i++) {
206 std::string cam_topic;
207 _node->declare_parameter<std::string>(
"topic_camera" + std::to_string(i),
"/cam" + std::to_string(i) +
"/image_raw");
208 _node->get_parameter(
"topic_camera" + std::to_string(i), cam_topic);
209 parser->parse_external(
"relative_config_imucam",
"cam" + std::to_string(i),
"rostopic", cam_topic);
213 auto sub =
_node->create_subscription<sensor_msgs::msg::Image>(
214 cam_topic, 10, [
this, i](
const sensor_msgs::msg::Image::SharedPtr msg0) {
callback_monocular(msg0, i); });
216 PRINT_INFO(
"subscribing to cam (mono): %s\n", cam_topic.c_str());
233 if (!
_app->get_params().use_multi_threading_pubs)
237 if (!
_app->initialized())
242 rT1 = boost::posix_time::microsec_clock::local_time();
272 if (!
_app->initialized() || (timestamp -
_app->initialized_time()) < 1)
276 std::shared_ptr<State> state =
_app->get_state();
277 Eigen::Matrix<double, 13, 1> state_plus = Eigen::Matrix<double, 13, 1>::Zero();
278 Eigen::Matrix<double, 12, 12> cov_plus = Eigen::Matrix<double, 12, 12>::Zero();
279 if (!
_app->get_propagator()->fast_state_propagate(state, timestamp, state_plus, cov_plus))
286 nav_msgs::msg::Odometry odomIinM;
287 odomIinM.header.stamp = ROSVisualizerHelper::get_time_from_seconds(timestamp);
288 odomIinM.header.frame_id =
"global";
291 odomIinM.pose.pose.orientation.x = state_plus(0);
292 odomIinM.pose.pose.orientation.y = state_plus(1);
293 odomIinM.pose.pose.orientation.z = state_plus(2);
294 odomIinM.pose.pose.orientation.w = state_plus(3);
295 odomIinM.pose.pose.position.x = state_plus(4);
296 odomIinM.pose.pose.position.y = state_plus(5);
297 odomIinM.pose.pose.position.z = state_plus(6);
300 odomIinM.child_frame_id =
"imu";
301 odomIinM.twist.twist.linear.x = state_plus(7);
302 odomIinM.twist.twist.linear.y = state_plus(8);
303 odomIinM.twist.twist.linear.z = state_plus(9);
304 odomIinM.twist.twist.angular.x = state_plus(10);
305 odomIinM.twist.twist.angular.y = state_plus(11);
306 odomIinM.twist.twist.angular.z = state_plus(12);
309 Eigen::Matrix<double, 12, 12> Phi = Eigen::Matrix<double, 12, 12>::Zero();
310 Phi.block(0, 3, 3, 3).setIdentity();
311 Phi.block(3, 0, 3, 3).setIdentity();
312 Phi.block(6, 6, 6, 6).setIdentity();
313 cov_plus = Phi * cov_plus * Phi.transpose();
314 for (
int r = 0; r < 6; r++) {
315 for (
int c = 0; c < 6; c++) {
316 odomIinM.pose.covariance[6 * r + c] = cov_plus(r, c);
319 for (
int r = 0; r < 6; r++) {
320 for (
int c = 0; c < 6; c++) {
321 odomIinM.twist.covariance[6 * r + c] = cov_plus(r + 6, c + 6);
330 auto odom_pose = std::make_shared<ov_type::PoseJPL>();
331 odom_pose->set_value(state_plus.block(0, 0, 7, 1));
332 geometry_msgs::msg::TransformStamped trans = ROSVisualizerHelper::get_stamped_transform_from_pose(
_node, odom_pose,
false);
333 trans.header.stamp =
_node->now();
334 trans.header.frame_id =
"global";
335 trans.child_frame_id =
"imu";
337 mTfBr->sendTransform(trans);
341 for (
const auto &calib : state->_calib_IMUtoCAM) {
342 geometry_msgs::msg::TransformStamped trans_calib = ROSVisualizerHelper::get_stamped_transform_from_pose(
_node, calib.second,
true);
343 trans_calib.header.stamp =
_node->now();
344 trans_calib.header.frame_id =
"imu";
345 trans_calib.child_frame_id =
"cam" + std::to_string(calib.first);
347 mTfBr->sendTransform(trans_calib);
355 if (
_app->get_state()->_options.do_calib_camera_timeoffset) {
360 if (
_app->get_state()->_options.do_calib_camera_intrinsics) {
361 for (
int i = 0; i <
_app->get_state()->_options.num_cameras; i++) {
362 std::shared_ptr<Vec> calib =
_app->get_state()->_cam_intrinsics.at(i);
364 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f\n" RESET, calib->value()(0), calib->value()(1), calib->value()(2), calib->value()(3));
365 PRINT_INFO(
REDPURPLE "%.5f,%.5f,%.5f,%.5f\n\n" RESET, calib->value()(4), calib->value()(5), calib->value()(6), calib->value()(7));
370 if (
_app->get_state()->_options.do_calib_camera_pose) {
371 for (
int i = 0; i <
_app->get_state()->_options.num_cameras; i++) {
372 std::shared_ptr<PoseJPL> calib =
_app->get_state()->_calib_IMUtoCAM.at(i);
373 Eigen::Matrix4d T_CtoI = Eigen::Matrix4d::Identity();
374 T_CtoI.block(0, 0, 3, 3) =
quat_2_Rot(calib->quat()).transpose();
375 T_CtoI.block(0, 3, 3, 1) = -T_CtoI.block(0, 0, 3, 3) * calib->pos();
377 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f,\n" RESET, T_CtoI(0, 0), T_CtoI(0, 1), T_CtoI(0, 2), T_CtoI(0, 3));
378 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f,\n" RESET, T_CtoI(1, 0), T_CtoI(1, 1), T_CtoI(1, 2), T_CtoI(1, 3));
379 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f,\n" RESET, T_CtoI(2, 0), T_CtoI(2, 1), T_CtoI(2, 2), T_CtoI(2, 3));
380 PRINT_INFO(
REDPURPLE "%.3f,%.3f,%.3f,%.3f\n\n" RESET, T_CtoI(3, 0), T_CtoI(3, 1), T_CtoI(3, 2), T_CtoI(3, 3));
385 if (
_app->get_state()->_options.do_calib_imu_intrinsics) {
386 Eigen::Matrix3d Dw =
State::Dm(
_app->get_state()->_options.imu_model,
_app->get_state()->_calib_imu_dw->value());
387 Eigen::Matrix3d Da =
State::Dm(
_app->get_state()->_options.imu_model,
_app->get_state()->_calib_imu_da->value());
388 Eigen::Matrix3d Tw = Dw.colPivHouseholderQr().solve(Eigen::Matrix3d::Identity());
389 Eigen::Matrix3d Ta = Da.colPivHouseholderQr().solve(Eigen::Matrix3d::Identity());
390 Eigen::Matrix3d R_IMUtoACC =
_app->get_state()->_calib_imu_ACCtoIMU->Rot().transpose();
391 Eigen::Matrix3d R_IMUtoGYRO =
_app->get_state()->_calib_imu_GYROtoIMU->Rot().transpose();
411 if (
_app->get_state()->_options.do_calib_imu_g_sensitivity) {
412 Eigen::Matrix3d Tg =
State::Tg(
_app->get_state()->_calib_imu_tg->value());
426 if (
_sim !=
nullptr) {
434 rT2 = boost::posix_time::microsec_clock::local_time();
442 message.
timestamp = msg->header.stamp.sec + msg->header.stamp.nanosec * 1e-9;
443 message.
wm << msg->angular_velocity.x, msg->angular_velocity.y, msg->angular_velocity.z;
444 message.
am << msg->linear_acceleration.x, msg->linear_acceleration.y, msg->linear_acceleration.z;
447 _app->feed_measurement_imu(message);
456 std::thread thread([&] {
461 std::map<int, bool> unique_cam_ids;
463 unique_cam_ids[cam_msg.sensor_ids.at(0)] =
true;
468 auto params =
_app->get_params();
469 size_t num_unique_cameras = (params.state_options.num_cameras == 2) ? 1 : params.state_options.num_cameras;
470 if (unique_cam_ids.size() == num_unique_cameras) {
474 double timestamp_imu_inC = message.
timestamp -
_app->get_state()->_calib_dt_CAMtoIMU->value()(0);
476 auto rT0_1 = boost::posix_time::microsec_clock::local_time();
477 double update_dt = 100.0 * (timestamp_imu_inC -
camera_queue.at(0).timestamp);
481 auto rT0_2 = boost::posix_time::microsec_clock::local_time();
482 double time_total = (rT0_2 - rT0_1).total_microseconds() * 1e-6;
483 PRINT_INFO(
BLUE "[TIME]: %.4f seconds total (%.1f hz, %.2f ms behind)\n" RESET, time_total, 1.0 / time_total, update_dt);
491 if (!
_app->get_params().use_multi_threading_subs) {
501 double timestamp = msg0->header.stamp.sec + msg0->header.stamp.nanosec * 1e-9;
502 double time_delta = 1.0 /
_app->get_params().track_frequency;
519 message.
timestamp = cv_ptr->header.stamp.sec + cv_ptr->header.stamp.nanosec * 1e-9;
521 message.
images.push_back(cv_ptr->image.clone());
525 if (
_app->get_params().use_mask) {
526 message.
masks.push_back(
_app->get_params().masks.at(cam_id0));
528 message.
masks.push_back(cv::Mat::zeros(cv_ptr->image.rows, cv_ptr->image.cols, CV_8UC1));
538 int cam_id0,
int cam_id1) {
541 double timestamp = msg0->header.stamp.sec + msg0->header.stamp.nanosec * 1e-9;
542 double time_delta = 1.0 /
_app->get_params().track_frequency;
568 message.
timestamp = cv_ptr0->header.stamp.sec + cv_ptr0->header.stamp.nanosec * 1e-9;
571 message.
images.push_back(cv_ptr0->image.clone());
572 message.
images.push_back(cv_ptr1->image.clone());
576 if (
_app->get_params().use_mask) {
577 message.
masks.push_back(
_app->get_params().masks.at(cam_id0));
578 message.
masks.push_back(
_app->get_params().masks.at(cam_id1));
581 message.
masks.push_back(cv::Mat::zeros(cv_ptr0->image.rows, cv_ptr0->image.cols, CV_8UC1));
582 message.
masks.push_back(cv::Mat::zeros(cv_ptr1->image.rows, cv_ptr1->image.cols, CV_8UC1));
594 std::shared_ptr<State> state =
_app->get_state();
598 double t_ItoC = state->_calib_dt_CAMtoIMU->value()(0);
599 double timestamp_inI = state->_timestamp + t_ItoC;
602 geometry_msgs::msg::PoseWithCovarianceStamped poseIinM;
603 poseIinM.header.stamp = ROSVisualizerHelper::get_time_from_seconds(timestamp_inI);
604 poseIinM.header.frame_id =
"global";
605 poseIinM.pose.pose.orientation.x = state->_imu->quat()(0);
606 poseIinM.pose.pose.orientation.y = state->_imu->quat()(1);
607 poseIinM.pose.pose.orientation.z = state->_imu->quat()(2);
608 poseIinM.pose.pose.orientation.w = state->_imu->quat()(3);
609 poseIinM.pose.pose.position.x = state->_imu->pos()(0);
610 poseIinM.pose.pose.position.y = state->_imu->pos()(1);
611 poseIinM.pose.pose.position.z = state->_imu->pos()(2);
614 std::vector<std::shared_ptr<Type>> statevars;
615 statevars.push_back(state->_imu->pose()->p());
616 statevars.push_back(state->_imu->pose()->q());
618 for (
int r = 0; r < 6; r++) {
619 for (
int c = 0; c < 6; c++) {
620 poseIinM.pose.covariance[6 * r + c] = covariance_posori(r, c);
629 geometry_msgs::msg::PoseStamped posetemp;
630 posetemp.header = poseIinM.header;
631 posetemp.pose = poseIinM.pose.pose;
637 nav_msgs::msg::Path arrIMU;
638 arrIMU.header.stamp =
_node->now();
639 arrIMU.header.frame_id =
"global";
640 for (
size_t i = 0; i <
poses_imu.size(); i += std::floor((
double)
poses_imu.size() / 16384.0) + 1) {
649 if (
_app->get_state() ==
nullptr)
660 cv::Mat img_history =
_app->get_historical_viz_image();
661 if (img_history.empty())
665 std_msgs::msg::Header
header;
682 std::vector<Eigen::Vector3d> feats_msckf =
_app->get_good_features_MSCKF();
683 sensor_msgs::msg::PointCloud2 cloud = ROSVisualizerHelper::get_ros_pointcloud(
_node, feats_msckf);
687 std::vector<Eigen::Vector3d> feats_slam =
_app->get_features_SLAM();
688 sensor_msgs::msg::PointCloud2 cloud_SLAM = ROSVisualizerHelper::get_ros_pointcloud(
_node, feats_slam);
692 std::vector<Eigen::Vector3d> feats_aruco =
_app->get_features_ARUCO();
693 sensor_msgs::msg::PointCloud2 cloud_ARUCO = ROSVisualizerHelper::get_ros_pointcloud(
_node, feats_aruco);
701 std::vector<Eigen::Vector3d> feats_sim =
_sim->get_map_vec();
702 sensor_msgs::msg::PointCloud2 cloud_SIM = ROSVisualizerHelper::get_ros_pointcloud(
_node, feats_sim);
709 Eigen::Matrix<double, 17, 1> state_gt;
713 double t_ItoC =
_app->get_state()->_calib_dt_CAMtoIMU->value()(0);
714 double timestamp_inI =
_app->get_state()->_timestamp + t_ItoC;
717 if (
_sim ==
nullptr && (
gt_states.empty() || !DatasetReader::get_gt_state(timestamp_inI, state_gt,
gt_states))) {
723 if (
_sim !=
nullptr) {
724 timestamp_inI =
_app->get_state()->_timestamp +
_sim->get_true_parameters().calib_camimu_dt;
725 if (!
_sim->get_state(timestamp_inI, state_gt))
730 Eigen::Matrix<double, 16, 1> state_ekf =
_app->get_state()->_imu->value();
733 geometry_msgs::msg::PoseStamped poseIinM;
734 poseIinM.header.stamp = ROSVisualizerHelper::get_time_from_seconds(timestamp_inI);
735 poseIinM.header.frame_id =
"global";
736 poseIinM.pose.orientation.x = state_gt(1, 0);
737 poseIinM.pose.orientation.y = state_gt(2, 0);
738 poseIinM.pose.orientation.z = state_gt(3, 0);
739 poseIinM.pose.orientation.w = state_gt(4, 0);
740 poseIinM.pose.position.x = state_gt(5, 0);
741 poseIinM.pose.position.y = state_gt(6, 0);
742 poseIinM.pose.position.z = state_gt(7, 0);
751 nav_msgs::msg::Path arrIMU;
752 arrIMU.header.stamp =
_node->now();
753 arrIMU.header.frame_id =
"global";
754 for (
size_t i = 0; i <
poses_gt.size(); i += std::floor((
double)
poses_gt.size() / 16384.0) + 1) {
755 arrIMU.poses.push_back(
poses_gt.at(i));
760 geometry_msgs::msg::TransformStamped trans;
761 trans.header.stamp =
_node->now();
762 trans.header.frame_id =
"global";
763 trans.child_frame_id =
"truth";
764 trans.transform.rotation.x = state_gt(1, 0);
765 trans.transform.rotation.y = state_gt(2, 0);
766 trans.transform.rotation.z = state_gt(3, 0);
767 trans.transform.rotation.w = state_gt(4, 0);
768 trans.transform.translation.x = state_gt(5, 0);
769 trans.transform.translation.y = state_gt(6, 0);
770 trans.transform.translation.z = state_gt(7, 0);
772 mTfBr->sendTransform(trans);
779 double dx = state_ekf(4, 0) - state_gt(5, 0);
780 double dy = state_ekf(5, 0) - state_gt(6, 0);
781 double dz = state_ekf(6, 0) - state_gt(7, 0);
782 double err_pos = std::sqrt(dx * dx + dy * dy + dz * dz);
785 Eigen::Matrix<double, 4, 1> quat_gt, quat_st, quat_diff;
786 quat_gt << state_gt(1, 0), state_gt(2, 0), state_gt(3, 0), state_gt(4, 0);
787 quat_st << state_ekf(0, 0), state_ekf(1, 0), state_ekf(2, 0), state_ekf(3, 0);
789 double err_ori = (180 / M_PI) * 2 * quat_diff.block(0, 0, 3, 1).norm();
795 std::vector<std::shared_ptr<Type>> statevars;
796 statevars.push_back(
_app->get_state()->_imu->q());
797 statevars.push_back(
_app->get_state()->_imu->p());
805 Eigen::Vector3d quat_diff_vec = quat_diff.block(0, 0, 3, 1);
806 Eigen::Vector3d cov_vec = covariance.block(0, 0, 3, 3).inverse() * 2 * quat_diff.block(0, 0, 3, 1);
807 double ori_nees = 2 * quat_diff_vec.dot(cov_vec);
808 Eigen::Vector3d errpos = state_ekf.block(4, 0, 3, 1) - state_gt.block(5, 0, 3, 1);
809 double pos_nees = errpos.transpose() * covariance.block(3, 3, 3, 3).inverse() * errpos;
815 if (!std::isnan(ori_nees) && !std::isnan(pos_nees)) {
824 PRINT_INFO(
REDPURPLE "error to gt => %.3f, %.3f (deg,m) | rmse => %.3f, %.3f (deg,m) | called %d times\n" RESET, err_ori, err_pos,
836 double active_tracks_time1 = -1;
837 double active_tracks_time2 = -1;
838 std::unordered_map<size_t, Eigen::Vector3d> active_tracks_posinG;
839 std::unordered_map<size_t, Eigen::Vector3d> active_tracks_uvd;
840 cv::Mat active_cam0_image;
841 _app->get_active_tracks(active_tracks_time1, active_tracks_posinG, active_tracks_uvd);
842 _app->get_active_image(active_tracks_time2, active_cam0_image);
843 if (active_tracks_time1 == -1)
845 if (
_app->get_state()->_clones_IMU.find(active_tracks_time1) ==
_app->get_state()->_clones_IMU.end())
847 if (active_tracks_time1 != active_tracks_time2)
851 std_msgs::msg::Header
header;
852 header.stamp = ROSVisualizerHelper::get_time_from_seconds(active_tracks_time1);
860 nav_msgs::msg::Odometry odometry_pose;
861 odometry_pose.header =
header;
862 odometry_pose.header.frame_id =
"global";
863 odometry_pose.pose.pose.position.x =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->pos()(0);
864 odometry_pose.pose.pose.position.y =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->pos()(1);
865 odometry_pose.pose.pose.position.z =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->pos()(2);
866 odometry_pose.pose.pose.orientation.x =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->quat()(0);
867 odometry_pose.pose.pose.orientation.y =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->quat()(1);
868 odometry_pose.pose.pose.orientation.z =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->quat()(2);
869 odometry_pose.pose.pose.orientation.w =
_app->get_state()->_clones_IMU.at(active_tracks_time1)->quat()(3);
874 Eigen::Vector4d q_ItoC =
_app->get_state()->_calib_IMUtoCAM.at(0)->quat();
875 Eigen::Vector3d p_CinI = -
_app->get_state()->_calib_IMUtoCAM.at(0)->Rot().transpose() *
_app->get_state()->_calib_IMUtoCAM.at(0)->pos();
876 nav_msgs::msg::Odometry odometry_calib;
877 odometry_calib.header =
header;
878 odometry_calib.header.frame_id =
"imu";
879 odometry_calib.pose.pose.position.x = p_CinI(0);
880 odometry_calib.pose.pose.position.y = p_CinI(1);
881 odometry_calib.pose.pose.position.z = p_CinI(2);
882 odometry_calib.pose.pose.orientation.x = q_ItoC(0);
883 odometry_calib.pose.pose.orientation.y = q_ItoC(1);
884 odometry_calib.pose.pose.orientation.z = q_ItoC(2);
885 odometry_calib.pose.pose.orientation.w = q_ItoC(3);
889 bool is_fisheye = (std::dynamic_pointer_cast<ov_core::CamEqui>(
_app->get_params().camera_intrinsics.at(0)) !=
nullptr);
890 sensor_msgs::msg::CameraInfo cameraparams;
891 cameraparams.header =
header;
892 cameraparams.header.frame_id =
"cam0";
893 cameraparams.distortion_model = is_fisheye ?
"equidistant" :
"plumb_bob";
894 Eigen::VectorXd cparams =
_app->get_state()->_cam_intrinsics.at(0)->value();
895 cameraparams.d = {cparams(4), cparams(5), cparams(6), cparams(7)};
896 cameraparams.k = {cparams(0), 0, cparams(2), 0, cparams(1), cparams(3), 0, 0, 1};
905 sensor_msgs::msg::PointCloud point_cloud;
906 point_cloud.header =
header;
907 point_cloud.header.frame_id =
"global";
908 for (
const auto &feattimes : active_tracks_posinG) {
911 size_t featid = feattimes.first;
912 Eigen::Vector3d uvd = Eigen::Vector3d::Zero();
913 if (active_tracks_uvd.find(featid) != active_tracks_uvd.end()) {
914 uvd = active_tracks_uvd.at(featid);
916 Eigen::Vector3d pFinG = active_tracks_posinG.at(featid);
919 geometry_msgs::msg::Point32 p;
923 point_cloud.points.push_back(p);
928 sensor_msgs::msg::ChannelFloat32 p_2d;
929 p_2d.values.push_back(0);
930 p_2d.values.push_back(0);
931 p_2d.values.push_back(uvd(0));
932 p_2d.values.push_back(uvd(1));
933 p_2d.values.push_back(featid);
934 point_cloud.channels.push_back(p_2d);
944 std::pair<int, int> wh_pair = {active_cam0_image.cols, active_cam0_image.rows};
945 cv::Mat depthmap = cv::Mat::zeros(wh_pair.second, wh_pair.first, CV_16UC1);
946 cv::Mat depthmap_viz = active_cam0_image;
949 for (
const auto &feattimes : active_tracks_uvd) {
952 size_t featid = feattimes.first;
953 Eigen::Vector3d uvd = active_tracks_uvd.at(featid);
957 if (uvd(0) < dw || uvd(0) > wh_pair.first - dw || uvd(1) < dw || uvd(1) > wh_pair.second - dw) {
964 depthmap.at<uint16_t>((int)uvd(1), (int)uvd(0)) = (uint16_t)(1000 * uvd(2));
968 float id = 1.0f / (float)uvd(2);
969 float r = (0.0f - id) * 255 / 1.0
f;
972 float g = (1.0f - id) * 255 / 1.0
f;
975 float b = (2.0f - id) * 255 / 1.0
f;
978 uchar rc = r < 0 ? 0 : (r > 255 ? 255 : r);
979 uchar gc = g < 0 ? 0 : (g > 255 ? 255 : g);
980 uchar bc = b < 0 ? 0 : (b > 255 ? 255 : b);
981 cv::Scalar color(255 - rc, 255 - gc, 255 - bc);
984 cv::Point p0(uvd(0) - dw, uvd(1) - dw);
985 cv::Point p1(uvd(0) + dw, uvd(1) + dw);
986 cv::rectangle(depthmap_viz, p0, p1, color, -1);
991 sensor_msgs::msg::Image::SharedPtr exl_msg1 =