35 #define BOOST_PARAMETER_MAX_ARITY 7 37 #include <boost/thread/thread.hpp> 38 #include <boost/filesystem.hpp> 40 #include <pcl/features/normal_3d.h> 41 #include <pcl/io/obj_io.h> 44 #include <jsk_recognition_msgs/TrackingStatus.h> 45 #include <jsk_recognition_msgs/SaveMesh.h> 60 if (cloud_xyz.points.size() != cloud_rgb.points.size())
62 std::cout <<
"Clouds being concatenated must have same size." << std::endl;
65 cloud.points.resize(cloud_xyz.points.size());
66 for (
size_t i = 0; i < cloud_xyz.points.size(); i++)
68 cloud.points[i].x = cloud_xyz.points[i].x;
69 cloud.points[i].y = cloud_xyz.points[i].y;
70 cloud.points[i].z = cloud_xyz.points[i].z;
71 cloud.points[i].r = cloud_rgb.points[i].r;
72 cloud.points[i].g = cloud_rgb.points[i].g;
73 cloud.points[i].b = cloud_rgb.points[i].b;
75 cloud.width = cloud_xyz.width;
76 cloud.height = cloud_xyz.height;
85 ConnectionBasedNodelet::onInit();
86 always_subscribe_ =
true;
88 pnh_->param(
"device", device_, 0);
89 pnh_->param(
"auto_reset", auto_reset_,
true);
90 pnh_->param(
"integrate_color", integrate_color_,
false);
91 pnh_->param(
"slam", slam_,
false);
92 pnh_->param<std::string>(
"fixed_frame_id", fixed_frame_id_,
"odom_init");
93 pnh_->param(
"n_textures", n_textures_, -1);
94 pnh_->param(
"volume_size", volume_size_, pcl::device::kinfuLS::VOLUME_SIZE);
96 srv_ = boost::make_shared <dynamic_reconfigure::Server<Config> >(*pnh_);
97 dynamic_reconfigure::Server<Config>::CallbackType
f = boost::bind(&Kinfu::configCallback,
this, _1, _2);
102 pub_camera_pose_ = advertise<geometry_msgs::PoseStamped>(*pnh_,
"output", 1);
103 pub_cloud_ = advertise<sensor_msgs::PointCloud2>(*pnh_,
"output/cloud", 1);
104 pub_depth_ = advertise<sensor_msgs::Image>(*pnh_,
"output/depth", 1);
105 pub_rendered_image_ = advertise<sensor_msgs::Image>(*pnh_,
"output/rendered_image", 1);
106 pub_status_ = advertise<jsk_recognition_msgs::TrackingStatus>(*pnh_,
"output/status", 1);
108 srv_reset_ = pnh_->advertiseService(
"reset", &Kinfu::resetCallback,
this);
109 srv_save_mesh_ = pnh_->advertiseService(
"save_mesh", &Kinfu::saveMeshCallback,
this);
110 srv_save_mesh_with_context_ = pnh_->advertiseService(
111 "save_mesh_with_context", &Kinfu::saveMeshWithContextCallback,
this);
117 Kinfu::configCallback(
Config& config, uint32_t level)
120 save_dir_ = config.save_dir;
124 Kinfu::initKinfu(
const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg)
126 pcl::gpu::setDevice(device_);
127 pcl::gpu::printShortCudaDeviceInfo(device_);
131 float shift_distance = pcl::device::kinfuLS::DISTANCE_THRESHOLD;
132 Eigen::Vector3f volume_size_vector = Eigen::Vector3f::Constant(volume_size_);
133 if (shift_distance > 2.5 * volume_size_)
135 NODELET_WARN(
"WARNING Shifting distance (%.2f) is very large compared to the volume size (%.2f).",
136 shift_distance, volume_size_);
139 kinfu_.reset(
new pcl::gpu::kinfuLS::KinfuTracker(
140 volume_size_vector, shift_distance, caminfo_msg->height, caminfo_msg->width));
142 Eigen::Matrix3f
R = Eigen::Matrix3f::Identity();
143 Eigen::Vector3f
t = volume_size_vector * 0.5f - Eigen::Vector3f(0, 0, volume_size_vector(2) / 2 * 1.2
f);
145 Eigen::Affine3f
pose = Eigen::Translation3f(t) * Eigen::AngleAxisf(R);
147 kinfu_->setInitialCameraPose(pose);
148 kinfu_->volume().setTsdfTruncDist(0.030
f);
149 kinfu_->setIcpCorespFilteringParams(0.1
f,
sin(pcl::deg2rad(20.
f)));
151 kinfu_->setCameraMovementThreshold(0.001
f);
153 if (integrate_color_)
155 const int max_color_integration_weight = 2;
156 kinfu_->initColorIntegration(max_color_integration_weight);
164 pnh_->param(
"queue_size", queue_size, 10);
166 sub_camera_info_.subscribe(*pnh_,
"input/camera_info", 1);
167 sub_depth_.subscribe(*pnh_,
"input/depth", 1);
168 if (integrate_color_)
170 sub_color_.subscribe(*pnh_,
"input/color", 1);
172 sync_with_color_->connectInput(sub_camera_info_, sub_depth_, sub_color_);
173 sync_with_color_->registerCallback(boost::bind(&Kinfu::update,
this, _1, _2, _3));
178 sync_->connectInput(sub_camera_info_, sub_depth_);
179 sync_->registerCallback(boost::bind(&Kinfu::update,
this, _1, _2));
189 Kinfu::update(
const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg,
190 const sensor_msgs::Image::ConstPtr& depth_msg)
192 update(caminfo_msg, depth_msg, sensor_msgs::ImageConstPtr());
196 Kinfu::update(
const sensor_msgs::CameraInfo::ConstPtr& caminfo_msg,
197 const sensor_msgs::Image::ConstPtr& depth_msg,
198 const sensor_msgs::Image::ConstPtr& color_msg)
202 if ((depth_msg->height != caminfo_msg->height) || (depth_msg->width != caminfo_msg->width))
204 ROS_ERROR(
"Image size of input depth and camera info must be same. Depth: (%d, %d), Camera Info: (%d, %d)",
205 depth_msg->height, depth_msg->width, caminfo_msg->height, caminfo_msg->width);
208 if (integrate_color_ && ((color_msg->height != caminfo_msg->height) || (color_msg->width != color_msg->width)))
210 ROS_ERROR(
"Image size of input color image and camera info must be same. Color: (%d, %d), Camera Info: (%d, %d)",
211 color_msg->height, color_msg->width, caminfo_msg->height, caminfo_msg->width);
217 initKinfu(caminfo_msg);
222 kinfu_->setDepthIntrinsics(caminfo_msg->K[0], caminfo_msg->K[4],
223 caminfo_msg->K[2], caminfo_msg->K[5]);
227 if (depth_msg->encoding == enc::TYPE_32FC1)
230 depth_32fc1 *= 1000.;
231 depth_32fc1.convertTo(depth, CV_16UC1);
233 else if (depth_msg->encoding == enc::TYPE_16UC1)
239 NODELET_FATAL(
"Unsupported depth image encoding: %s", depth_msg->encoding.c_str());
244 pcl::gpu::kinfuLS::KinfuTracker::DepthMap depth_device;
245 depth_device.upload(&(depth.data[0]), depth.cols * 2, depth.rows, depth.cols);
248 if (integrate_color_)
251 if (color_msg->encoding == enc::BGR8)
254 colors_device_.upload(&(tmp_image_ptr_->toImageMsg()->data[0]),
255 color_msg->step, color_msg->height, color_msg->width);
259 colors_device_.upload(&(color_msg->data[0]), color_msg->step, color_msg->height, color_msg->width);
262 (*kinfu_)(depth_device, colors_device_);
266 (*kinfu_)(depth_device);
271 jsk_recognition_msgs::TrackingStatus
status;
272 status.header = caminfo_msg->header;
273 if (kinfu_->icpIsLost())
282 status.is_lost =
true;
283 pub_status_.publish(status);
286 status.is_lost =
false;
287 pub_status_.publish(status);
290 if (integrate_color_ && (frame_idx_ % pcl::device::kinfuLS::SNAPSHOT_RATE == 1))
293 if (color_msg->encoding == enc::RGB8)
295 cv::cvtColor(texture, texture, cv::COLOR_RGB2BGR);
297 textures_.push_back(texture);
299 pcl::TextureMapping<pcl::PointXYZ>::Camera camera;
300 camera.pose = kinfu_->getCameraPose();
301 camera.focal_length = caminfo_msg->K[0];
302 camera.height = caminfo_msg->height;
303 camera.width = caminfo_msg->width;
304 cameras_.push_back(camera);
309 Eigen::Affine3f camera_pose = kinfu_->getCameraPose();
312 if (pub_camera_pose_.getNumSubscribers() > 0)
314 geometry_msgs::PoseStamped camera_pose_msg;
316 camera_pose_msg.header.stamp = caminfo_msg->header.stamp;
317 camera_pose_msg.header.frame_id =
"kinfu_origin";
318 pub_camera_pose_.publish(camera_pose_msg);
321 Eigen::Affine3f camera_to_kinfu_origin = camera_pose.inverse();
325 tf_broadcaster_.sendTransform(
327 caminfo_msg->header.frame_id,
"kinfu_origin"));
335 tf_listener_->lookupTransform(
336 fixed_frame_id_, caminfo_msg->header.frame_id,
ros::Time(0), tf_odom_to_camera);
337 Eigen::Affine3f odom_to_camera;
342 odom_init_to_kinfu_origin_ = odom_to_camera * camera_to_kinfu_origin;
345 Eigen::Affine3f map_to_odom;
347 map_to_odom = odom_init_to_kinfu_origin_ * (odom_to_camera * camera_to_kinfu_origin).
inverse();
351 tf_broadcaster_.sendTransform(
362 if (pub_depth_.getNumSubscribers() > 0)
364 pcl::gpu::kinfuLS::KinfuTracker::DepthMap depth_gpu;
365 Eigen::Affine3f camera_pose = kinfu_->getCameraPose();
368 raycaster_ = pcl::gpu::kinfuLS::RayCaster::Ptr(
369 new pcl::gpu::kinfuLS::RayCaster(
370 kinfu_->rows(), kinfu_->cols(),
371 caminfo_msg->K[0], caminfo_msg->K[4],
372 caminfo_msg->K[2], caminfo_msg->K[5]));
374 raycaster_->run(kinfu_->volume(), camera_pose, kinfu_->getCyclicalBufferStructure());
375 raycaster_->generateDepthImage(depth_gpu);
378 std::vector<unsigned short>
data;
379 depth_gpu.download(data, cols);
381 sensor_msgs::Image output_depth_msg;
386 depth_gpu.cols() * 2,
387 reinterpret_cast<unsigned short*
>(&data[0]));
388 output_depth_msg.header = caminfo_msg->header;
389 pub_depth_.publish(output_depth_msg);
393 if (pub_rendered_image_.getNumSubscribers() > 0)
395 pcl::gpu::kinfuLS::KinfuTracker::View view_device;
396 std::vector<pcl::gpu::kinfuLS::PixelRGB> view_host;
397 kinfu_->getImage(view_device);
399 if (integrate_color_)
405 view_device.download(view_host, cols);
407 sensor_msgs::Image rendered_image_msg;
412 view_device.cols() * 3,
413 reinterpret_cast<unsigned char*
>(&view_host[0]));
414 rendered_image_msg.header = caminfo_msg->header;
415 pub_rendered_image_.publish(rendered_image_msg);
419 if (pub_cloud_.getNumSubscribers() > 0)
421 pcl::gpu::DeviceArray<pcl::PointXYZ> cloud_buffer_device;
422 pcl::gpu::DeviceArray<pcl::PointXYZ> extracted = kinfu_->volume().fetchCloud(cloud_buffer_device);
424 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_xyz(
new pcl::PointCloud<pcl::PointXYZ>());
425 extracted.download(cloud_xyz->points);
426 cloud_xyz->width =
static_cast<int>(cloud_xyz->points.size());
427 cloud_xyz->height = 1;
429 sensor_msgs::PointCloud2 output_cloud_msg;
430 if (integrate_color_)
432 pcl::gpu::DeviceArray<pcl::RGB> point_colors_device;
433 kinfu_->colorVolume().fetchColors(extracted, point_colors_device);
435 pcl::PointCloud<pcl::RGB>::Ptr cloud_rgb(
new pcl::PointCloud<pcl::RGB>());
436 point_colors_device.download(cloud_rgb->points);
437 cloud_rgb->width =
static_cast<int>(cloud_rgb->points.size());
438 cloud_rgb->height = 1;
440 pcl::PointCloud<pcl::PointXYZRGB>::Ptr
cloud(
new pcl::PointCloud<pcl::PointXYZRGB>());
441 cloud->points.resize(cloud_xyz->width);
449 output_cloud_msg.header.frame_id =
"kinfu_origin";
450 pub_cloud_.publish(output_cloud_msg);
455 Kinfu::resetCallback(std_srvs::Empty::Request&
req, std_srvs::Empty::Response&
res)
466 Kinfu::saveMeshWithContextCallback(
467 jsk_recognition_msgs::SaveMesh::Request&
req, jsk_recognition_msgs::SaveMesh::Response&
res)
469 pcl::PolygonMesh polygon_mesh = createPolygonMesh(req.box, req.ground_frame_id);
471 boost::filesystem::path dir(integrate_color_ ? save_dir_ +
"/textures" : save_dir_);
472 if (boost::filesystem::create_directories(dir))
474 NODELET_INFO(
"Created save_dir: %s", save_dir_.c_str());
477 std::string out_file = save_dir_ +
"/mesh.obj";
478 if (integrate_color_ && n_textures_ != 0)
480 pcl::TextureMesh texture_mesh;
483 std::vector<cv::Mat> textures(textures_.end() - n_textures_, textures_.end());
484 pcl::texture_mapping::CameraVector cameras(cameras_.end() - n_textures_, cameras_.end());
485 texture_mesh = convertToTextureMesh(polygon_mesh, textures, cameras);
489 texture_mesh = convertToTextureMesh(polygon_mesh, textures_, cameras_);
491 pcl::io::saveOBJFile(out_file, texture_mesh, 5);
495 pcl::io::saveOBJFile(out_file, polygon_mesh);
502 Kinfu::saveMeshCallback(std_srvs::Empty::Request&
req, std_srvs::Empty::Response&
res)
504 pcl::PolygonMesh polygon_mesh = createPolygonMesh();
506 boost::filesystem::path dir(integrate_color_ ? save_dir_ +
"/textures" : save_dir_);
507 if (boost::filesystem::create_directories(dir))
509 NODELET_INFO(
"Created save_dir: %s", save_dir_.c_str());
512 std::string out_file = save_dir_ +
"/mesh.obj";
513 if (integrate_color_ && n_textures_ != 0)
515 pcl::TextureMesh texture_mesh;
518 std::vector<cv::Mat> textures(textures_.end() - n_textures_, textures_.end());
519 pcl::texture_mapping::CameraVector cameras(cameras_.end() - n_textures_, cameras_.end());
520 texture_mesh = convertToTextureMesh(polygon_mesh, textures, cameras);
524 texture_mesh = convertToTextureMesh(polygon_mesh, textures_, cameras_);
526 pcl::io::saveOBJFile(out_file, texture_mesh, 5);
530 pcl::io::saveOBJFile(out_file, polygon_mesh);
537 Kinfu::convertToTextureMesh(
const pcl::PolygonMesh& triangles,
538 const std::vector<cv::Mat> textures,
539 pcl::texture_mapping::CameraVector cameras)
541 pcl::PointCloud<pcl::PointXYZ>::Ptr
cloud (
new pcl::PointCloud<pcl::PointXYZ>);
542 pcl::fromPCLPointCloud2(triangles.cloud, *cloud);
545 pcl::TextureMesh mesh;
546 mesh.cloud = triangles.cloud;
547 std::vector< pcl::Vertices> polygon_1;
550 polygon_1.resize (triangles.polygons.size ());
551 for(
size_t i =0; i < triangles.polygons.size (); ++i)
553 polygon_1[i] = triangles.polygons[i];
555 mesh.tex_polygons.push_back(polygon_1);
556 NODELET_INFO(
"Input mesh contains %lu faces, %lu vertices and %lu textures.",
557 mesh.tex_polygons[0].size(), cloud->points.size(), cameras.size());
560 mesh.tex_materials.resize(cameras.size () + 1);
561 for(
int i = 0 ; i <= cameras.size() ; ++i)
563 pcl::TexMaterial mesh_material;
564 mesh_material.tex_Ka.r = 0.2f;
565 mesh_material.tex_Ka.g = 0.2f;
566 mesh_material.tex_Ka.b = 0.2f;
568 mesh_material.tex_Kd.r = 0.8f;
569 mesh_material.tex_Kd.g = 0.8f;
570 mesh_material.tex_Kd.b = 0.8f;
572 mesh_material.tex_Ks.r = 1.0f;
573 mesh_material.tex_Ks.g = 1.0f;
574 mesh_material.tex_Ks.b = 1.0f;
576 mesh_material.tex_d = 1.0f;
577 mesh_material.tex_Ns = 75.0f;
578 mesh_material.tex_illum = 2;
580 std::stringstream tex_name;
581 tex_name <<
"material_" << i;
582 tex_name >> mesh_material.tex_name;
584 if (i < cameras.size ())
586 std::stringstream ss;
587 ss <<
"textures/" << i <<
".jpg";
588 std::string texture_file = ss.str();
589 cv::imwrite(save_dir_ +
"/" + texture_file, textures[i]);
590 cameras[i].texture_file = texture_file;
591 mesh_material.tex_file = texture_file;
595 std::string texture_file =
"textures/occluded.jpg";
596 cv::imwrite(save_dir_ +
"/" + texture_file,
597 cv::Mat::zeros(textures[0].rows, textures[0].cols, CV_8UC1));
598 mesh_material.tex_file = texture_file;
601 mesh.tex_materials[i] = mesh_material;
605 pcl::TextureMapping<pcl::PointXYZ> tm;
606 tm.textureMeshwithMultipleCameras(mesh, cameras);
609 pcl::NormalEstimation<pcl::PointXYZ, pcl::Normal>
n;
610 pcl::PointCloud<pcl::Normal>::Ptr normals(
new pcl::PointCloud<pcl::Normal>);
611 pcl::search::KdTree<pcl::PointXYZ>::Ptr tree(
new pcl::search::KdTree<pcl::PointXYZ>);
612 tree->setInputCloud(cloud);
613 n.setInputCloud(cloud);
614 n.setSearchMethod(tree);
617 pcl::PointCloud<pcl::PointNormal>::Ptr cloud_with_normals(
new pcl::PointCloud<pcl::PointNormal>);
619 pcl::toPCLPointCloud2(*cloud_with_normals, mesh.cloud);
625 Kinfu::createPolygonMesh()
628 if (!marching_cubes_)
630 marching_cubes_ = pcl::gpu::kinfuLS::MarchingCubes::Ptr(
new pcl::gpu::kinfuLS::MarchingCubes());
632 pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_buffer_device;
633 pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_device =
634 marching_cubes_->run(kinfu_->volume(), triangles_buffer_device);
636 if (triangles_device.empty())
638 return pcl::PolygonMesh();
641 pcl::PointCloud<pcl::PointXYZ>::Ptr
cloud (
new pcl::PointCloud<pcl::PointXYZ>);
642 cloud->width =
static_cast<int>(triangles_device.size());
644 cloud->header.frame_id =
"kinfu_origin";
645 triangles_device.download(cloud->points);
647 pcl::PolygonMesh mesh;
648 pcl::toPCLPointCloud2(*cloud, mesh.cloud);
649 mesh.polygons.resize(triangles_device.size() / 3);
650 for (
size_t i = 0; i < mesh.polygons.size(); ++i)
653 v.vertices.push_back(i*3+0);
654 v.vertices.push_back(i*3+2);
655 v.vertices.push_back(i*3+1);
656 mesh.polygons[i] = v;
662 Kinfu::createPolygonMesh(
const jsk_recognition_msgs::BoundingBox& box_msg,
const std::string& ground_frame_id)
665 if (!marching_cubes_)
667 marching_cubes_ = pcl::gpu::kinfuLS::MarchingCubes::Ptr(
new pcl::gpu::kinfuLS::MarchingCubes());
669 pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_buffer_device;
670 pcl::gpu::DeviceArray<pcl::PointXYZ> triangles_device =
671 marching_cubes_->run(kinfu_->volume(), triangles_buffer_device);
673 if (triangles_device.empty())
675 return pcl::PolygonMesh();
679 pcl::PointCloud<pcl::PointXYZ>::Ptr
cloud(
new pcl::PointCloud<pcl::PointXYZ>());
680 cloud->width =
static_cast<int>(triangles_device.size());
682 cloud->header.frame_id =
"kinfu_origin";
683 triangles_device.download(cloud->points);
686 Eigen::Affine3f kinfu_origin_to_box = Eigen::Affine3f::Identity();
687 if (box_msg.header.frame_id !=
"kinfu_origin")
690 tf_listener_->lookupTransform(
"kinfu_origin", box_msg.header.frame_id,
ros::Time(0), tf_kinfu_origin_to_box);
695 jsk_recognition_msgs::BoundingBox transformed_box_msg;
696 transformed_box_msg.dimensions = box_msg.dimensions;
697 Eigen::Affine3f box_pose;
699 box_pose = kinfu_origin_to_box * box_pose;
701 transformed_box_msg.header.frame_id =
"kinfu_origin";
704 std::vector<int> indices;
705 jsk_recognition_utils::cropPointCloud<pcl::PointXYZ>(cloud, transformed_box_msg, &indices);
708 pcl::PolygonMesh mesh;
709 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_in_box(
new pcl::PointCloud<pcl::PointXYZ>());
710 cloud_in_box->width =
static_cast<int>(indices.size());
711 cloud_in_box->height = 1;
712 cloud_in_box->points.resize(indices.size());
713 for (
size_t i = 0; i < indices.size(); i++)
715 cloud_in_box->points[i] = cloud->points[indices[i]];
716 if (indices[i] % 3 == 0 && (indices[i] + 1) == indices[i + 1] && (indices[i] + 2) == indices[i + 2])
719 v.vertices.push_back(i + 0);
720 v.vertices.push_back(i + 2);
721 v.vertices.push_back(i + 1);
722 mesh.polygons.push_back(v);
727 if (!ground_frame_id.empty())
729 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_ground_frame(
new pcl::PointCloud<pcl::PointXYZ>());
731 tf_listener_->lookupTransform(ground_frame_id,
"kinfu_origin",
ros::Time(0), tf_transform);
732 Eigen::Affine3f transform;
734 pcl::transformPointCloud(*cloud_in_box, *cloud_ground_frame, transform);
736 pcl::PointXYZ min_pt, max_pt;
737 pcl::getMinMax3D(*cloud_ground_frame, min_pt, max_pt);
739 pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_occluded(
new pcl::PointCloud<pcl::PointXYZ>());
740 for (
size_t i = 0; i < cloud_ground_frame->points.size(); i++)
742 pcl::PointXYZ pt_visible = cloud_ground_frame->points[i];
743 pcl::PointXYZ pt_occluded(pt_visible.x, pt_visible.y, min_pt.z);
744 cloud_occluded->points.push_back(pt_occluded);
748 v.vertices.push_back(cloud_in_box->width + i - 2);
749 v.vertices.push_back(cloud_in_box->width + i - 1);
750 v.vertices.push_back(cloud_in_box->width + i - 0);
751 mesh.polygons.push_back(v);
754 cloud_occluded->width = cloud_occluded->points.size();
755 cloud_occluded->height = 1;
756 pcl::transformPointCloud(*cloud_occluded, *cloud_occluded, transform.inverse());
757 *cloud_in_box = *cloud_in_box + *cloud_occluded;
760 pcl::toPCLPointCloud2(*cloud_in_box, mesh.cloud);
static bool fillImage(Image &image, const std::string &encoding_arg, uint32_t rows_arg, uint32_t cols_arg, uint32_t step_arg, const void *data_arg)
CvImageConstPtr toCvShare(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
void transformEigenToTF(const Eigen::Affine3d &e, tf::Transform &t)
#define NODELET_WARN(...)
void transformTFToEigen(const tf::Transform &t, Eigen::Affine3d &e)
void poseEigenToMsg(const Eigen::Affine3d &e, geometry_msgs::Pose &m)
void poseMsgToEigen(const geometry_msgs::Pose &m, Eigen::Affine3d &e)
bool update(const T &new_val, T &my_val)
void paint3DView(const KinfuTracker::View &rgb24, KinfuTracker::View &view, float colors_weight=0.5f)
sensor_msgs::PointCloud2 PointCloud
CvImagePtr toCvCopy(const sensor_msgs::ImageConstPtr &source, const std::string &encoding=std::string())
void concatenateFields(PointCloud< PointXYZ > &cloud_xyz, PointCloud< RGB > &cloud_rgb, PointCloud< PointXYZRGB > &cloud)
jsk_pcl_ros::KinfuConfig Config
void toROSMsg(const pcl::PointCloud< T > &pcl_cloud, sensor_msgs::PointCloud2 &cloud)
PLUGINLIB_EXPORT_CLASS(jsk_pcl_ros::Kinfu, nodelet::Nodelet)
#define NODELET_INFO(...)
#define NODELET_FATAL_THROTTLE(rate,...)
#define NODELET_FATAL(...)
Quaternion normalized() const